From 058be9697ae248f5978ec73f2f12e308d23dcff9 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Mon, 18 Nov 2024 10:29:54 +0100 Subject: [PATCH 001/386] [Gradle] Fix PublishPlugin configuration cache compatibility (#116887) Relying on GenerateMavenPom#getPom breaks configuration cache compatibility as this is a transient property that is not serialized. --- .../internal/conventions/PublishPlugin.java | 54 +++++++++++-------- 1 file changed, 33 insertions(+), 21 deletions(-) diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java index d19a1d492d9ed..c3124812e5089 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/PublishPlugin.java @@ -17,11 +17,9 @@ import org.elasticsearch.gradle.internal.conventions.info.GitInfo; import org.elasticsearch.gradle.internal.conventions.precommit.PomValidationPrecommitPlugin; import org.elasticsearch.gradle.internal.conventions.util.Util; -import org.gradle.api.Action; import org.gradle.api.NamedDomainObjectSet; import org.gradle.api.Plugin; import org.gradle.api.Project; -import org.gradle.api.Task; import org.gradle.api.XmlProvider; import org.gradle.api.file.ProjectLayout; import org.gradle.api.plugins.BasePlugin; @@ -69,6 +67,7 @@ public void apply(Project project) { configureSourcesJar(project); configurePomGeneration(project); configurePublications(project); + formatGeneratedPom(project); } private void configurePublications(Project project) { @@ -127,42 +126,27 @@ private void configurePomGeneration(Project project) { projectVersion.get() ) ); - pomTask.doFirst(t -> pomTask.getPom().withXml(xml -> formatDependencies(xml))); }); var publishing = extensions.getByType(PublishingExtension.class); final var mavenPublications = publishing.getPublications().withType(MavenPublication.class); - addNameAndDescriptionToPom(project, mavenPublications); mavenPublications.configureEach(publication -> { - // Add git origin info to generated POM files for internal builds - publication.getPom().withXml(xml -> addScmInfo(xml, gitInfo.get())); + publication.getPom().withXml(xml -> { + // Add git origin info to generated POM files for internal builds + addScmInfo(xml, gitInfo.get()); + }); // have to defer this until archivesBaseName is set project.afterEvaluate(p -> publication.setArtifactId(archivesBaseName.get())); generatePomTask.configure(t -> t.dependsOn(generateMavenPoms)); }); } - /** - * just ensure we put dependencies to the end. more a cosmetic thing than anything else - * */ - private void formatDependencies(XmlProvider xml) { - Element rootElement = xml.asElement(); - var dependencies = rootElement.getElementsByTagName("dependencies"); - if (dependencies.getLength() == 1 && dependencies.item(0) != null) { - org.w3c.dom.Node item = dependencies.item(0); - rootElement.removeChild(item); - rootElement.appendChild(item); - } - } - private void addNameAndDescriptionToPom(Project project, NamedDomainObjectSet mavenPublications) { var name = project.getName(); var description = providerFactory.provider(() -> project.getDescription() != null ? project.getDescription() : ""); mavenPublications.configureEach(p -> p.getPom().withXml(xml -> { var root = xml.asNode(); - // Node versionNode = root.get("version"); - // versionNode.plus(1, "name", name); root.appendNode("name", name); root.appendNode("description", description.get()); })); @@ -209,4 +193,32 @@ static void configureSourcesJar(Project project) { project.getTasks().named(BasePlugin.ASSEMBLE_TASK_NAME).configure(t -> t.dependsOn(sourcesJarTask)); }); } + + + /** + * Format the generated pom files to be in a sort of reproducible order. + */ + private void formatGeneratedPom(Project project) { + var publishing = project.getExtensions().getByType(PublishingExtension.class); + final var mavenPublications = publishing.getPublications().withType(MavenPublication.class); + mavenPublications.configureEach(publication -> { + publication.getPom().withXml(xml -> { + // Add some pom formatting + formatDependencies(xml); + }); + }); + } + + /** + * just ensure we put dependencies to the end. more a cosmetic thing than anything else + * */ + private void formatDependencies(XmlProvider xml) { + Element rootElement = xml.asElement(); + var dependencies = rootElement.getElementsByTagName("dependencies"); + if (dependencies.getLength() == 1 && dependencies.item(0) != null) { + org.w3c.dom.Node item = dependencies.item(0); + rootElement.removeChild(item); + rootElement.appendChild(item); + } + } } From 929d39820a657b423911ccfe9f974cffac5a9e34 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Mon, 18 Nov 2024 10:43:53 +0100 Subject: [PATCH 002/386] [Gradle] Fix build finished hooks on ci when using configuration cache (#116888) Fixes two incompatibilities with Gradle configuration cache in our build scan build finished hook: referencing static methods from build script referencing gradle object from closure --- .../groovy/elasticsearch.build-scan.gradle | 10 +++++----- .../gradle/internal/util/CiUtils.java | 18 ++++++++++++++++++ 2 files changed, 23 insertions(+), 5 deletions(-) create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/CiUtils.java diff --git a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle index 2748411750a5a..847eda7a355c0 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle @@ -12,10 +12,14 @@ import java.time.LocalDateTime; import org.elasticsearch.gradle.Architecture import org.elasticsearch.gradle.OS +import static org.elasticsearch.gradle.internal.util.CiUtils.safeName import java.lang.management.ManagementFactory import java.time.LocalDateTime +// Resolving this early to avoid issues with the build scan plugin in combination with the configuration cache usage +def taskNames = gradle.startParameter.taskNames.join(' ') + develocity { buildScan { @@ -110,7 +114,7 @@ develocity { // Add a build annotation // See: https://buildkite.com/docs/agent/v3/cli-annotate - def body = """
${System.getenv('BUILDKITE_LABEL')} :gradle: ${result.failures ? 'failed' : 'successful'} build: gradle ${gradle.startParameter.taskNames.join(' ')}
""" + def body = """
${System.getenv('BUILDKITE_LABEL')} :gradle: ${result.failures ? 'failed' : 'successful'} build: gradle ${taskNames}
""" def process = [ 'buildkite-agent', 'annotate', @@ -131,7 +135,3 @@ develocity { } } } - -static def safeName(String string) { - return string.replaceAll(/[^a-zA-Z0-9_\-\.]+/, ' ').trim().replaceAll(' ', '_').toLowerCase() -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/CiUtils.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/CiUtils.java new file mode 100644 index 0000000000000..1b019a6cbd3e6 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/CiUtils.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.util; + +public class CiUtils { + + static String safeName(String input) { + return input.replaceAll("[^a-zA-Z0-9_\\-\\.]+", " ").trim().replaceAll(" ", "_").toLowerCase(); + } + +} From 4e17c61d39c64f2522d8ed6245c9c43d41a0dad0 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Mon, 18 Nov 2024 11:34:28 +0100 Subject: [PATCH 003/386] [DOCS] Remove 'rescore' from retriever.asciidoc (#116921) --- docs/reference/search/retriever.asciidoc | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/reference/search/retriever.asciidoc b/docs/reference/search/retriever.asciidoc index 0da75ac30d2dd..86a81f1d155d2 100644 --- a/docs/reference/search/retriever.asciidoc +++ b/docs/reference/search/retriever.asciidoc @@ -704,5 +704,3 @@ Instead they are only allowed as elements of specific retrievers: * <> * <> * <> -* <> - From 595230ece086ed402d1a9ed539e9ff800b584158 Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Mon, 18 Nov 2024 13:14:29 +0100 Subject: [PATCH 004/386] Always check if index mode is logsdb (#116922) --- docs/changelog/116922.yaml | 5 +++++ .../elasticsearch/cluster/routing/IndexRouting.java | 12 ++++++++++-- 2 files changed, 15 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/116922.yaml diff --git a/docs/changelog/116922.yaml b/docs/changelog/116922.yaml new file mode 100644 index 0000000000000..39e63da50ea24 --- /dev/null +++ b/docs/changelog/116922.yaml @@ -0,0 +1,5 @@ +pr: 116922 +summary: Always check if index mode is logsdb +area: Logs +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java index aa92f395b20d2..be0e3429a2ce4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java @@ -167,8 +167,7 @@ public void process(IndexRequest indexRequest) { // generate id if not already provided final String id = indexRequest.id(); if (id == null) { - if (creationVersion.between(IndexVersions.TIME_BASED_K_ORDERED_DOC_ID_BACKPORT, IndexVersions.UPGRADE_TO_LUCENE_10_0_0) - || creationVersion.onOrAfter(IndexVersions.TIME_BASED_K_ORDERED_DOC_ID) && indexMode == IndexMode.LOGSDB) { + if (shouldUseTimeBasedId(indexMode, creationVersion)) { indexRequest.autoGenerateTimeBasedId(); } else { indexRequest.autoGenerateId(); @@ -178,6 +177,15 @@ public void process(IndexRequest indexRequest) { } } + private static boolean shouldUseTimeBasedId(final IndexMode indexMode, final IndexVersion creationVersion) { + return indexMode == IndexMode.LOGSDB && isNewIndexVersion(creationVersion); + } + + private static boolean isNewIndexVersion(final IndexVersion creationVersion) { + return creationVersion.between(IndexVersions.TIME_BASED_K_ORDERED_DOC_ID_BACKPORT, IndexVersions.UPGRADE_TO_LUCENE_10_0_0) + || creationVersion.onOrAfter(IndexVersions.TIME_BASED_K_ORDERED_DOC_ID); + } + @Override public int indexShard( String id, From 366fa749b06d30e76344921a15b347447cda35c9 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 18 Nov 2024 13:17:22 +0100 Subject: [PATCH 005/386] Fix handling of time exceeded exception in fetch phase (#116676) The fetch phase is subject to timeouts like any other search phase. Timeouts may happen when low level cancellation is enabled (true by default), hence the directory reader is wrapped into ExitableDirectoryReader and a timeout is provided to the search request. The exception that is used is TimeExceededException, but it is an internal exception that should never be returned to the user. When that is thrown, we need to catch it and throw error or mark the response as timed out depending on whether partial results are allowed or not. --- docs/changelog/116676.yaml | 5 + .../search/fetch/FetchPhase.java | 11 +- .../search/fetch/FetchPhaseDocsIterator.java | 66 +++++-- .../action/search/FetchSearchPhaseTests.java | 185 ++++++++++++++++++ .../fetch/FetchPhaseDocsIteratorTests.java | 4 +- .../aggregations/AggregatorTestCase.java | 17 +- 6 files changed, 268 insertions(+), 20 deletions(-) create mode 100644 docs/changelog/116676.yaml diff --git a/docs/changelog/116676.yaml b/docs/changelog/116676.yaml new file mode 100644 index 0000000000000..8c6671e177499 --- /dev/null +++ b/docs/changelog/116676.yaml @@ -0,0 +1,5 @@ +pr: 116676 +summary: Fix handling of time exceeded exception in fetch phase +area: Search +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 6bc667d4359b1..546586a9ff3c3 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -191,7 +191,16 @@ protected SearchHit nextDoc(int doc) throws IOException { } }; - SearchHit[] hits = docsIterator.iterate(context.shardTarget(), context.searcher().getIndexReader(), docIdsToLoad); + SearchHit[] hits = docsIterator.iterate( + context.shardTarget(), + context.searcher().getIndexReader(), + docIdsToLoad, + context.request().allowPartialSearchResults() + ); + + if (docsIterator.isTimedOut()) { + context.queryResult().searchTimedOut(true); + } if (context.isCancelled()) { for (SearchHit hit : hits) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java index 682ee4b375668..df4e7649ffd3b 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java @@ -13,7 +13,10 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.search.query.SearchTimeoutException; import java.io.IOException; import java.util.Arrays; @@ -27,6 +30,12 @@ */ abstract class FetchPhaseDocsIterator { + private boolean timedOut = false; + + public boolean isTimedOut() { + return timedOut; + } + /** * Called when a new leaf reader is reached * @param ctx the leaf reader for this set of doc ids @@ -44,7 +53,7 @@ abstract class FetchPhaseDocsIterator { /** * Iterate over a set of docsIds within a particular shard and index reader */ - public final SearchHit[] iterate(SearchShardTarget shardTarget, IndexReader indexReader, int[] docIds) { + public final SearchHit[] iterate(SearchShardTarget shardTarget, IndexReader indexReader, int[] docIds, boolean allowPartialResults) { SearchHit[] searchHits = new SearchHit[docIds.length]; DocIdToIndex[] docs = new DocIdToIndex[docIds.length]; for (int index = 0; index < docIds.length; index++) { @@ -58,30 +67,55 @@ public final SearchHit[] iterate(SearchShardTarget shardTarget, IndexReader inde LeafReaderContext ctx = indexReader.leaves().get(leafOrd); int endReaderIdx = endReaderIdx(ctx, 0, docs); int[] docsInLeaf = docIdsInLeaf(0, endReaderIdx, docs, ctx.docBase); - setNextReader(ctx, docsInLeaf); - for (int i = 0; i < docs.length; i++) { - if (i >= endReaderIdx) { - leafOrd = ReaderUtil.subIndex(docs[i].docId, indexReader.leaves()); - ctx = indexReader.leaves().get(leafOrd); - endReaderIdx = endReaderIdx(ctx, i, docs); - docsInLeaf = docIdsInLeaf(i, endReaderIdx, docs, ctx.docBase); - setNextReader(ctx, docsInLeaf); + try { + setNextReader(ctx, docsInLeaf); + } catch (ContextIndexSearcher.TimeExceededException timeExceededException) { + if (allowPartialResults) { + timedOut = true; + return SearchHits.EMPTY; } - currentDoc = docs[i].docId; - assert searchHits[docs[i].index] == null; - searchHits[docs[i].index] = nextDoc(docs[i].docId); + throw new SearchTimeoutException(shardTarget, "Time exceeded"); } - } catch (Exception e) { - for (SearchHit searchHit : searchHits) { - if (searchHit != null) { - searchHit.decRef(); + for (int i = 0; i < docs.length; i++) { + try { + if (i >= endReaderIdx) { + leafOrd = ReaderUtil.subIndex(docs[i].docId, indexReader.leaves()); + ctx = indexReader.leaves().get(leafOrd); + endReaderIdx = endReaderIdx(ctx, i, docs); + docsInLeaf = docIdsInLeaf(i, endReaderIdx, docs, ctx.docBase); + setNextReader(ctx, docsInLeaf); + } + currentDoc = docs[i].docId; + assert searchHits[docs[i].index] == null; + searchHits[docs[i].index] = nextDoc(docs[i].docId); + } catch (ContextIndexSearcher.TimeExceededException timeExceededException) { + if (allowPartialResults) { + timedOut = true; + SearchHit[] partialSearchHits = new SearchHit[i]; + System.arraycopy(searchHits, 0, partialSearchHits, 0, i); + return partialSearchHits; + } + purgeSearchHits(searchHits); + throw new SearchTimeoutException(shardTarget, "Time exceeded"); } } + } catch (SearchTimeoutException e) { + throw e; + } catch (Exception e) { + purgeSearchHits(searchHits); throw new FetchPhaseExecutionException(shardTarget, "Error running fetch phase for doc [" + currentDoc + "]", e); } return searchHits; } + private static void purgeSearchHits(SearchHit[] searchHits) { + for (SearchHit searchHit : searchHits) { + if (searchHit != null) { + searchHit.decRef(); + } + } + } + private static int endReaderIdx(LeafReaderContext currentReaderContext, int index, DocIdToIndex[] docs) { int firstInNextReader = currentReaderContext.docBase + currentReaderContext.reader().maxDoc(); int i = index + 1; diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index 762a7e0f47cab..dda20dfb37e9d 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -8,35 +8,65 @@ */ package org.elasticsearch.action.search; +import org.apache.lucene.document.Document; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.tests.store.MockDirectoryWrapper; +import org.apache.lucene.util.Accountable; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.cache.bitset.BitsetFilterCache; +import org.elasticsearch.index.mapper.IdLoader; +import org.elasticsearch.index.mapper.MapperMetrics; +import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchSearchResult; +import org.elasticsearch.search.fetch.FetchSubPhase; +import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; import org.elasticsearch.search.fetch.QueryFetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchSearchRequest; +import org.elasticsearch.search.fetch.StoredFieldsSpec; +import org.elasticsearch.search.internal.AliasFilter; +import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.query.QuerySearchResult; +import org.elasticsearch.search.query.SearchTimeoutException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; +import org.elasticsearch.test.TestSearchContext; import org.elasticsearch.transport.Transport; +import java.io.IOException; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; @@ -749,4 +779,159 @@ private static void addProfiling(boolean profiled, QuerySearchResult queryResult private static ProfileResult fetchProfile(boolean profiled) { return profiled ? new ProfileResult("fetch", "fetch", Map.of(), Map.of(), FETCH_PROFILE_TIME, List.of()) : null; } + + public void testFetchTimeoutWithPartialResults() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter w = new RandomIndexWriter(random(), dir); + w.addDocument(new Document()); + w.addDocument(new Document()); + w.addDocument(new Document()); + IndexReader r = w.getReader(); + w.close(); + ContextIndexSearcher contextIndexSearcher = createSearcher(r); + try (SearchContext searchContext = createSearchContext(contextIndexSearcher, true)) { + FetchPhase fetchPhase = createFetchPhase(contextIndexSearcher); + fetchPhase.execute(searchContext, new int[] { 0, 1, 2 }, null); + assertTrue(searchContext.queryResult().searchTimedOut()); + assertEquals(1, searchContext.fetchResult().hits().getHits().length); + } finally { + r.close(); + dir.close(); + } + } + + public void testFetchTimeoutNoPartialResults() throws IOException { + Directory dir = newDirectory(); + RandomIndexWriter w = new RandomIndexWriter(random(), dir); + w.addDocument(new Document()); + w.addDocument(new Document()); + w.addDocument(new Document()); + IndexReader r = w.getReader(); + w.close(); + ContextIndexSearcher contextIndexSearcher = createSearcher(r); + + try (SearchContext searchContext = createSearchContext(contextIndexSearcher, false)) { + FetchPhase fetchPhase = createFetchPhase(contextIndexSearcher); + expectThrows(SearchTimeoutException.class, () -> fetchPhase.execute(searchContext, new int[] { 0, 1, 2 }, null)); + assertNull(searchContext.fetchResult().hits()); + } finally { + r.close(); + dir.close(); + } + } + + private static ContextIndexSearcher createSearcher(IndexReader reader) throws IOException { + return new ContextIndexSearcher(reader, null, null, new QueryCachingPolicy() { + @Override + public void onUse(Query query) {} + + @Override + public boolean shouldCache(Query query) { + return false; + } + }, randomBoolean()); + } + + private static FetchPhase createFetchPhase(ContextIndexSearcher contextIndexSearcher) { + return new FetchPhase(Collections.singletonList(fetchContext -> new FetchSubPhaseProcessor() { + boolean processCalledOnce = false; + + @Override + public void setNextReader(LeafReaderContext readerContext) {} + + @Override + public void process(FetchSubPhase.HitContext hitContext) { + // we throw only once one doc has been fetched, so we can test partial results are returned + if (processCalledOnce) { + contextIndexSearcher.throwTimeExceededException(); + } else { + processCalledOnce = true; + } + } + + @Override + public StoredFieldsSpec storedFieldsSpec() { + return StoredFieldsSpec.NO_REQUIREMENTS; + } + })); + } + + private static SearchContext createSearchContext(ContextIndexSearcher contextIndexSearcher, boolean allowPartialResults) { + IndexSettings indexSettings = new IndexSettings( + IndexMetadata.builder("index") + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())) + .numberOfShards(1) + .numberOfReplicas(0) + .creationDate(System.currentTimeMillis()) + .build(), + Settings.EMPTY + ); + BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(indexSettings, new BitsetFilterCache.Listener() { + @Override + public void onCache(ShardId shardId, Accountable accountable) { + + } + + @Override + public void onRemoval(ShardId shardId, Accountable accountable) { + + } + }); + + SearchExecutionContext searchExecutionContext = new SearchExecutionContext( + 0, + 0, + indexSettings, + bitsetFilterCache, + null, + null, + MappingLookup.EMPTY, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + Collections.emptyMap(), + null, + MapperMetrics.NOOP + ); + TestSearchContext searchContext = new TestSearchContext(searchExecutionContext, null, contextIndexSearcher) { + private final FetchSearchResult fetchSearchResult = new FetchSearchResult(); + private final ShardSearchRequest request = new ShardSearchRequest( + OriginalIndices.NONE, + new SearchRequest().allowPartialSearchResults(allowPartialResults), + new ShardId("index", "indexUUID", 0), + 0, + 1, + AliasFilter.EMPTY, + 1f, + 0L, + null + ); + + @Override + public IdLoader newIdLoader() { + return new IdLoader.StoredIdLoader(); + } + + @Override + public FetchSearchResult fetchResult() { + return fetchSearchResult; + } + + @Override + public ShardSearchRequest request() { + return request; + } + }; + searchContext.addReleasable(searchContext.fetchResult()::decRef); + searchContext.setTask(new SearchShardTask(-1, "type", "action", "description", null, Collections.emptyMap())); + return searchContext; + } } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java b/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java index c699e117ffbf4..d5e930321db95 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java @@ -77,7 +77,7 @@ protected SearchHit nextDoc(int doc) { } }; - SearchHit[] hits = it.iterate(null, reader, docs); + SearchHit[] hits = it.iterate(null, reader, docs, randomBoolean()); assertThat(hits.length, equalTo(docs.length)); for (int i = 0; i < hits.length; i++) { @@ -125,7 +125,7 @@ protected SearchHit nextDoc(int doc) { } }; - Exception e = expectThrows(FetchPhaseExecutionException.class, () -> it.iterate(null, reader, docs)); + Exception e = expectThrows(FetchPhaseExecutionException.class, () -> it.iterate(null, reader, docs, randomBoolean())); assertThat(e.getMessage(), containsString("Error running fetch phase for doc [" + badDoc + "]")); assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index b50fd4e96044c..51f66418bb44b 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -53,6 +53,8 @@ import org.apache.lucene.util.packed.PackedInts; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.TriConsumer; @@ -143,8 +145,10 @@ import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.subphase.FetchDocValuesPhase; import org.elasticsearch.search.fetch.subphase.FetchSourcePhase; +import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.internal.SubSearchContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; @@ -466,7 +470,18 @@ private SubSearchContext buildSubSearchContext( .when(subContext) .getNestedDocuments(); when(ctx.getSearchExecutionContext()).thenReturn(subContext); - + ShardSearchRequest request = new ShardSearchRequest( + OriginalIndices.NONE, + new SearchRequest().allowPartialSearchResults(randomBoolean()), + new ShardId("index", "indexUUID", 0), + 0, + 1, + AliasFilter.EMPTY, + 1f, + 0L, + null + ); + when(ctx.request()).thenReturn(request); IndexShard indexShard = mock(IndexShard.class); when(indexShard.shardId()).thenReturn(new ShardId("test", "test", 0)); when(indexShard.indexSettings()).thenReturn(indexSettings); From 9790cc4679a7309255ab2197efbd71a82127a8a0 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 18 Nov 2024 13:41:33 +0000 Subject: [PATCH 006/386] [ML] Enable built-in Inference Endpoints and default for Semantic Text (#116931) Adds built-in inference endpoints for the ELSER (.elser-2-elasticsearch) and multilingual-e5-small models (.multilingual-e5-small-elasticsearch). The semantic text inference Id field now defaults to elser-2-elasticsearch --- docs/changelog/116931.yaml | 5 +++ .../test/cluster/FeatureFlag.java | 1 - .../xpack/inference/DefaultEndPointsIT.java | 2 - .../xpack/inference/InferenceCrudIT.java | 6 +-- .../inference/DefaultElserFeatureFlag.java | 21 --------- .../xpack/inference/InferenceFeatures.java | 19 ++++---- .../xpack/inference/InferencePlugin.java | 6 +-- .../mapper/SemanticTextFieldMapper.java | 13 ++---- .../rest/RestGetInferenceModelAction.java | 10 +---- .../BaseElasticsearchInternalService.java | 6 --- .../mapper/SemanticTextFieldMapperTests.java | 44 ++++++------------- .../inference/30_semantic_text_inference.yml | 2 +- .../test/inference/40_semantic_text_query.yml | 2 +- 13 files changed, 39 insertions(+), 98 deletions(-) create mode 100644 docs/changelog/116931.yaml delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/DefaultElserFeatureFlag.java diff --git a/docs/changelog/116931.yaml b/docs/changelog/116931.yaml new file mode 100644 index 0000000000000..8b31d236ff137 --- /dev/null +++ b/docs/changelog/116931.yaml @@ -0,0 +1,5 @@ +pr: 116931 +summary: Enable built-in Inference Endpoints and default for Semantic Text +area: "Machine Learning" +type: enhancement +issues: [] diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index f996db92e57f4..22449ca763d09 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -19,7 +19,6 @@ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null), SUB_OBJECTS_AUTO_ENABLED("es.sub_objects_auto_feature_flag_enabled=true", Version.fromString("8.16.0"), null), - INFERENCE_DEFAULT_ELSER("es.inference_default_elser_feature_flag_enabled=true", Version.fromString("8.16.0"), null), ML_SCALE_FROM_ZERO("es.ml_scale_from_zero_feature_flag_enabled=true", Version.fromString("8.16.0"), null); public final String systemProperty; diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java index 1fef26989d845..3db834bb579ff 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java @@ -47,7 +47,6 @@ public void tearDown() throws Exception { @SuppressWarnings("unchecked") public void testInferDeploysDefaultElser() throws IOException { - assumeTrue("Default config requires a feature flag", DefaultElserFeatureFlag.isEnabled()); var model = getModel(ElasticsearchInternalService.DEFAULT_ELSER_ID); assertDefaultElserConfig(model); @@ -78,7 +77,6 @@ private static void assertDefaultElserConfig(Map modelConfig) { @SuppressWarnings("unchecked") public void testInferDeploysDefaultE5() throws IOException { - assumeTrue("Default config requires a feature flag", DefaultElserFeatureFlag.isEnabled()); var model = getModel(ElasticsearchInternalService.DEFAULT_E5_ID); assertDefaultE5Config(model); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index 081c83b1e7067..591db6db8495a 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -44,18 +44,18 @@ public void testCRUD() throws IOException { } var getAllModels = getAllModels(); - int numModels = DefaultElserFeatureFlag.isEnabled() ? 11 : 9; + int numModels = 11; assertThat(getAllModels, hasSize(numModels)); var getSparseModels = getModels("_all", TaskType.SPARSE_EMBEDDING); - int numSparseModels = DefaultElserFeatureFlag.isEnabled() ? 6 : 5; + int numSparseModels = 6; assertThat(getSparseModels, hasSize(numSparseModels)); for (var sparseModel : getSparseModels) { assertEquals("sparse_embedding", sparseModel.get("task_type")); } var getDenseModels = getModels("_all", TaskType.TEXT_EMBEDDING); - int numDenseModels = DefaultElserFeatureFlag.isEnabled() ? 5 : 4; + int numDenseModels = 5; assertThat(getDenseModels, hasSize(numDenseModels)); for (var denseModel : getDenseModels) { assertEquals("text_embedding", denseModel.get("task_type")); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/DefaultElserFeatureFlag.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/DefaultElserFeatureFlag.java deleted file mode 100644 index 2a764dabd62ae..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/DefaultElserFeatureFlag.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference; - -import org.elasticsearch.common.util.FeatureFlag; - -public class DefaultElserFeatureFlag { - - private DefaultElserFeatureFlag() {} - - private static final FeatureFlag FEATURE_FLAG = new FeatureFlag("inference_default_elser"); - - public static boolean isEnabled() { - return FEATURE_FLAG.isEnabled(); - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index 10ffedef14e26..ad89cba945143 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.inference.rank.random.RandomRankRetrieverBuilder; import org.elasticsearch.xpack.inference.rank.textsimilarity.TextSimilarityRankRetrieverBuilder; -import java.util.HashSet; import java.util.Set; /** @@ -24,16 +23,14 @@ public class InferenceFeatures implements FeatureSpecification { @Override public Set getFeatures() { - var features = new HashSet(); - features.add(TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED); - features.add(RandomRankRetrieverBuilder.RANDOM_RERANKER_RETRIEVER_SUPPORTED); - features.add(SemanticTextFieldMapper.SEMANTIC_TEXT_SEARCH_INFERENCE_ID); - features.add(SemanticQueryBuilder.SEMANTIC_TEXT_INNER_HITS); - features.add(TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_COMPOSITION_SUPPORTED); - if (DefaultElserFeatureFlag.isEnabled()) { - features.add(SemanticTextFieldMapper.SEMANTIC_TEXT_DEFAULT_ELSER_2); - } - return Set.copyOf(features); + return Set.of( + TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED, + RandomRankRetrieverBuilder.RANDOM_RERANKER_RETRIEVER_SUPPORTED, + SemanticTextFieldMapper.SEMANTIC_TEXT_SEARCH_INFERENCE_ID, + SemanticQueryBuilder.SEMANTIC_TEXT_INNER_HITS, + SemanticTextFieldMapper.SEMANTIC_TEXT_DEFAULT_ELSER_2, + TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_COMPOSITION_SUPPORTED + ); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index f068caff805af..62405a2e9f7de 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -227,10 +227,8 @@ public Collection createComponents(PluginServices services) { // reference correctly var registry = new InferenceServiceRegistry(inferenceServices, factoryContext); registry.init(services.client()); - if (DefaultElserFeatureFlag.isEnabled()) { - for (var service : registry.getServices().values()) { - service.defaultConfigIds().forEach(modelRegistry::addDefaultIds); - } + for (var service : registry.getServices().values()) { + service.defaultConfigIds().forEach(modelRegistry::addDefaultIds); } inferenceServiceRegistry.set(registry); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index f0cb612c9082f..890856d0b6e80 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -57,7 +57,6 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; -import org.elasticsearch.xpack.inference.DefaultElserFeatureFlag; import java.io.IOException; import java.util.ArrayList; @@ -111,16 +110,12 @@ public static class Builder extends FieldMapper.Builder { INFERENCE_ID_FIELD, false, mapper -> ((SemanticTextFieldType) mapper.fieldType()).inferenceId, - DefaultElserFeatureFlag.isEnabled() ? DEFAULT_ELSER_2_INFERENCE_ID : null + DEFAULT_ELSER_2_INFERENCE_ID ).addValidator(v -> { if (Strings.isEmpty(v)) { - // If the default ELSER feature flag is enabled, the only way we get here is if the user explicitly sets the param to an - // empty value. However, if the feature flag is disabled, we can get here if the user didn't set the param. - // Adjust the error message appropriately. - String message = DefaultElserFeatureFlag.isEnabled() - ? "[" + INFERENCE_ID_FIELD + "] on mapper [" + leafName() + "] of type [" + CONTENT_TYPE + "] must not be empty" - : "[" + INFERENCE_ID_FIELD + "] on mapper [" + leafName() + "] of type [" + CONTENT_TYPE + "] must be specified"; - throw new IllegalArgumentException(message); + throw new IllegalArgumentException( + "[" + INFERENCE_ID_FIELD + "] on mapper [" + leafName() + "] of type [" + CONTENT_TYPE + "] must not be empty" + ); } }); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java index 967ad4b46dcb3..83b2a8a0f5182 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java @@ -15,10 +15,7 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; -import org.elasticsearch.xpack.inference.DefaultElserFeatureFlag; -import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Set; @@ -69,11 +66,6 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient @Override public Set supportedCapabilities() { - Set capabilities = new HashSet<>(); - if (DefaultElserFeatureFlag.isEnabled()) { - capabilities.add(DEFAULT_ELSER_2_CAPABILITY); - } - - return Collections.unmodifiableSet(capabilities); + return Set.of(DEFAULT_ELSER_2_CAPABILITY); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java index 922b366498c27..fc070965f29c2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java @@ -35,7 +35,6 @@ import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigUpdate; import org.elasticsearch.xpack.core.ml.utils.MlPlatformArchitecturesUtil; -import org.elasticsearch.xpack.inference.DefaultElserFeatureFlag; import org.elasticsearch.xpack.inference.InferencePlugin; import java.io.IOException; @@ -296,11 +295,6 @@ protected void maybeStartDeployment( InferModelAction.Request request, ActionListener listener ) { - if (DefaultElserFeatureFlag.isEnabled() == false) { - listener.onFailure(e); - return; - } - if (isDefaultId(model.getInferenceEntityId()) && ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { this.start(model, request.getInferenceTimeout(), listener.delegateFailureAndWrap((l, started) -> { client.execute(InferModelAction.INSTANCE, request, listener); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index f444719c730f5..6e58226f85f28 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -61,7 +61,6 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.inference.DefaultElserFeatureFlag; import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.model.TestModel; import org.junit.AssumptionViolatedException; @@ -103,9 +102,6 @@ protected Collection getPlugins() { @Override protected void minimalMapping(XContentBuilder b) throws IOException { b.field("type", "semantic_text"); - if (DefaultElserFeatureFlag.isEnabled() == false) { - b.field("inference_id", "test_model"); - } } @Override @@ -175,9 +171,7 @@ public void testDefaults() throws Exception { DocumentMapper mapper = mapperService.documentMapper(); assertEquals(Strings.toString(fieldMapping), mapper.mappingSource().toString()); assertSemanticTextField(mapperService, fieldName, false); - if (DefaultElserFeatureFlag.isEnabled()) { - assertInferenceEndpoints(mapperService, fieldName, DEFAULT_ELSER_2_INFERENCE_ID, DEFAULT_ELSER_2_INFERENCE_ID); - } + assertInferenceEndpoints(mapperService, fieldName, DEFAULT_ELSER_2_INFERENCE_ID, DEFAULT_ELSER_2_INFERENCE_ID); ParsedDocument doc1 = mapper.parse(source(this::writeField)); List fields = doc1.rootDoc().getFields("field"); @@ -211,15 +205,13 @@ public void testSetInferenceEndpoints() throws IOException { assertSerialization.accept(fieldMapping, mapperService); } { - if (DefaultElserFeatureFlag.isEnabled()) { - final XContentBuilder fieldMapping = fieldMapping( - b -> b.field("type", "semantic_text").field(SEARCH_INFERENCE_ID_FIELD, searchInferenceId) - ); - final MapperService mapperService = createMapperService(fieldMapping); - assertSemanticTextField(mapperService, fieldName, false); - assertInferenceEndpoints(mapperService, fieldName, DEFAULT_ELSER_2_INFERENCE_ID, searchInferenceId); - assertSerialization.accept(fieldMapping, mapperService); - } + final XContentBuilder fieldMapping = fieldMapping( + b -> b.field("type", "semantic_text").field(SEARCH_INFERENCE_ID_FIELD, searchInferenceId) + ); + final MapperService mapperService = createMapperService(fieldMapping); + assertSemanticTextField(mapperService, fieldName, false); + assertInferenceEndpoints(mapperService, fieldName, DEFAULT_ELSER_2_INFERENCE_ID, searchInferenceId); + assertSerialization.accept(fieldMapping, mapperService); } { final XContentBuilder fieldMapping = fieldMapping( @@ -246,26 +238,18 @@ public void testInvalidInferenceEndpoints() { ); } { - final String expectedMessage = DefaultElserFeatureFlag.isEnabled() - ? "[inference_id] on mapper [field] of type [semantic_text] must not be empty" - : "[inference_id] on mapper [field] of type [semantic_text] must be specified"; Exception e = expectThrows( MapperParsingException.class, () -> createMapperService(fieldMapping(b -> b.field("type", "semantic_text").field(INFERENCE_ID_FIELD, ""))) ); - assertThat(e.getMessage(), containsString(expectedMessage)); + assertThat(e.getMessage(), containsString("[inference_id] on mapper [field] of type [semantic_text] must not be empty")); } { - if (DefaultElserFeatureFlag.isEnabled()) { - Exception e = expectThrows( - MapperParsingException.class, - () -> createMapperService(fieldMapping(b -> b.field("type", "semantic_text").field(SEARCH_INFERENCE_ID_FIELD, ""))) - ); - assertThat( - e.getMessage(), - containsString("[search_inference_id] on mapper [field] of type [semantic_text] must not be empty") - ); - } + Exception e = expectThrows( + MapperParsingException.class, + () -> createMapperService(fieldMapping(b -> b.field("type", "semantic_text").field(SEARCH_INFERENCE_ID_FIELD, ""))) + ); + assertThat(e.getMessage(), containsString("[search_inference_id] on mapper [field] of type [semantic_text] must not be empty")); } } diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml index 445df1dc302b9..71c9e5a23aea1 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml @@ -551,7 +551,7 @@ setup: --- "Calculates embeddings using the default ELSER 2 endpoint": - requires: - reason: "default ELSER 2 inference ID is behind a feature flag" + reason: "default ELSER 2 inference ID is enabled via a capability" test_runner_features: [capabilities] capabilities: - method: GET diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml index 17938f3b61a41..d28fce3be0d87 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml @@ -843,7 +843,7 @@ setup: --- "Query a field that uses the default ELSER 2 endpoint": - requires: - reason: "default ELSER 2 inference ID is behind a feature flag" + reason: "default ELSER 2 inference ID is enabled via a capability" test_runner_features: [capabilities] capabilities: - method: GET From 81e3afaafacc9cc6bc9de3c2d866e0c3fda95c31 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Mon, 18 Nov 2024 14:55:52 +0100 Subject: [PATCH 007/386] [Build] Fix fips testing after buildparams rework (#116934) --- build-tools-internal/src/main/groovy/elasticsearch.fips.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle index 567812c740817..493f7a505bb5b 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle @@ -20,7 +20,7 @@ import org.elasticsearch.gradle.testclusters.TestDistribution // Common config when running with a FIPS-140 runtime JVM if (buildParams.inFipsJvm) { allprojects { - String javaSecurityFilename = buildParams.runtimeJavaDetails.toLowerCase().contains('oracle') ? 'fips_java_oracle.security' : 'fips_java.security' + String javaSecurityFilename = buildParams.runtimeJavaDetails.get().toLowerCase().contains('oracle') ? 'fips_java_oracle.security' : 'fips_java.security' File fipsResourcesDir = new File(project.buildDir, 'fips-resources') File fipsSecurity = new File(fipsResourcesDir, javaSecurityFilename) File fipsPolicy = new File(fipsResourcesDir, 'fips_java.policy') From 08daf65592834a5865a09181c389b589a81caf7e Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Mon, 18 Nov 2024 14:30:11 +0000 Subject: [PATCH 008/386] Remove health historical features and upgrade test (#116928) --- .../upgrades/HealthNodeUpgradeIT.java | 45 ------------------- .../elasticsearch/health/HealthFeatures.java | 17 ------- .../metadata/HealthMetadataService.java | 3 +- .../node/DiskHealthIndicatorService.java | 10 ----- .../health/node/LocalHealthMonitor.java | 2 - .../ShardsCapacityHealthIndicatorService.java | 10 ----- .../selection/HealthNodeTaskExecutor.java | 8 +--- .../node/DiskHealthIndicatorServiceTests.java | 8 +--- ...dsCapacityHealthIndicatorServiceTests.java | 7 +-- .../core/HealthApiUsageTransportAction.java | 11 +---- .../xpack/core/XPackFeatures.java | 7 --- 11 files changed, 7 insertions(+), 121 deletions(-) delete mode 100644 qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java deleted file mode 100644 index 2ed1b7fe9e79b..0000000000000 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.upgrades; - -import com.carrotsearch.randomizedtesting.annotations.Name; - -import org.apache.http.util.EntityUtils; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; -import org.hamcrest.Matchers; - -import java.nio.charset.StandardCharsets; -import java.util.Map; - -import static org.hamcrest.CoreMatchers.equalTo; - -public class HealthNodeUpgradeIT extends AbstractRollingUpgradeTestCase { - - public HealthNodeUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { - super(upgradedNodes); - } - - public void testHealthNode() throws Exception { - if (clusterHasFeature("health.supports_health")) { - assertBusy(() -> { - Response response = client().performRequest(new Request("GET", "_cat/tasks")); - String tasks = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); - assertThat(tasks, Matchers.containsString("health-node")); - }); - assertBusy(() -> { - String path = clusterHasFeature("health.supports_health_report_api") ? "_health_report" : "_internal/_health"; - Response response = client().performRequest(new Request("GET", path)); - Map health_report = entityAsMap(response.getEntity()); - assertThat(health_report.get("status"), equalTo("green")); - }); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/health/HealthFeatures.java b/server/src/main/java/org/elasticsearch/health/HealthFeatures.java index 6d106199610d6..091dbc0eae742 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthFeatures.java +++ b/server/src/main/java/org/elasticsearch/health/HealthFeatures.java @@ -9,34 +9,17 @@ package org.elasticsearch.health; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import java.util.Map; import java.util.Set; public class HealthFeatures implements FeatureSpecification { - public static final NodeFeature SUPPORTS_HEALTH = new NodeFeature("health.supports_health"); - public static final NodeFeature SUPPORTS_HEALTH_REPORT_API = new NodeFeature("health.supports_health_report_api"); - public static final NodeFeature SUPPORTS_SHARDS_CAPACITY_INDICATOR = new NodeFeature("health.shards_capacity_indicator"); public static final NodeFeature SUPPORTS_EXTENDED_REPOSITORY_INDICATOR = new NodeFeature("health.extended_repository_indicator"); @Override public Set getFeatures() { return Set.of(SUPPORTS_EXTENDED_REPOSITORY_INDICATOR); } - - @Override - public Map getHistoricalFeatures() { - return Map.of( - SUPPORTS_HEALTH, - Version.V_8_5_0, // health accessible via /_internal/_health - SUPPORTS_HEALTH_REPORT_API, - Version.V_8_7_0, // health accessible via /_health_report - SUPPORTS_SHARDS_CAPACITY_INDICATOR, - Version.V_8_8_0 - ); - } } diff --git a/server/src/main/java/org/elasticsearch/health/metadata/HealthMetadataService.java b/server/src/main/java/org/elasticsearch/health/metadata/HealthMetadataService.java index 44fc65fab534f..0d30e157a3a09 100644 --- a/server/src/main/java/org/elasticsearch/health/metadata/HealthMetadataService.java +++ b/server/src/main/java/org/elasticsearch/health/metadata/HealthMetadataService.java @@ -28,7 +28,6 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.features.FeatureService; import org.elasticsearch.gateway.GatewayService; -import org.elasticsearch.health.HealthFeatures; import java.util.List; import java.util.stream.Stream; @@ -137,7 +136,7 @@ private void updateOnHealthNodeEnabledChange(boolean enabled) { private boolean canPostClusterStateUpdates(ClusterState state) { // Wait until every node in the cluster supports health checks - return isMaster && state.clusterRecovered() && featureService.clusterHasFeature(state, HealthFeatures.SUPPORTS_HEALTH); + return isMaster && state.clusterRecovered(); } private void updateOnClusterStateChange(ClusterChangedEvent event) { diff --git a/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java index e38ce7ac92a05..c975e1d1abd91 100644 --- a/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.Diagnosis; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; import org.elasticsearch.health.HealthIndicatorResult; @@ -91,15 +90,6 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources ClusterState clusterState = clusterService.state(); Map diskHealthInfoMap = healthInfo.diskInfoByNode(); if (diskHealthInfoMap == null || diskHealthInfoMap.isEmpty()) { - if (featureService.clusterHasFeature(clusterState, HealthFeatures.SUPPORTS_HEALTH) == false) { - return createIndicator( - HealthStatus.GREEN, - "No disk usage data available. The cluster currently has mixed versions (an upgrade may be in progress).", - HealthIndicatorDetails.EMPTY, - List.of(), - List.of() - ); - } /* * If there is no disk health info, that either means that a new health node was just elected, or something is seriously * wrong with health data collection on the health node. Either way, we immediately return UNKNOWN. If there are at least diff --git a/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java b/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java index a08de9abb4aed..aab9e972cba73 100644 --- a/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java +++ b/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.util.concurrent.RunOnce; import org.elasticsearch.core.TimeValue; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.metadata.HealthMetadata; import org.elasticsearch.health.node.action.HealthNodeNotDiscoveredException; import org.elasticsearch.health.node.selection.HealthNode; @@ -200,7 +199,6 @@ public void clusterChanged(ClusterChangedEvent event) { } } prerequisitesFulfilled = event.state().clusterRecovered() - && featureService.clusterHasFeature(event.state(), HealthFeatures.SUPPORTS_HEALTH) && HealthMetadata.getFromClusterState(event.state()) != null && currentHealthNode != null && currentMasterNode != null; diff --git a/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java index b02bbd95bb9ae..4dd94cfc046c9 100644 --- a/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.Diagnosis; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; import org.elasticsearch.health.HealthIndicatorResult; @@ -111,15 +110,6 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources var state = clusterService.state(); var healthMetadata = HealthMetadata.getFromClusterState(state); if (healthMetadata == null || healthMetadata.getShardLimitsMetadata() == null) { - if (featureService.clusterHasFeature(state, HealthFeatures.SUPPORTS_SHARDS_CAPACITY_INDICATOR) == false) { - return createIndicator( - HealthStatus.GREEN, - "No shard limits configured yet. The cluster currently has mixed versions (an upgrade may be in progress).", - HealthIndicatorDetails.EMPTY, - List.of(), - List.of() - ); - } return unknownIndicator(); } diff --git a/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java b/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java index 3357936e5f10c..3efad1aee26b0 100644 --- a/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.persistent.PersistentTaskParams; @@ -157,11 +156,8 @@ public PersistentTasksCustomMetadata.Assignment getAssignment( // visible for testing void startTask(ClusterChangedEvent event) { - // Wait until every node in the cluster supports health checks - if (event.localNodeMaster() - && event.state().clusterRecovered() - && HealthNode.findTask(event.state()) == null - && featureService.clusterHasFeature(event.state(), HealthFeatures.SUPPORTS_HEALTH)) { + // Wait until master is stable before starting health task + if (event.localNodeMaster() && event.state().clusterRecovered() && HealthNode.findTask(event.state()) == null) { persistentTasksService.sendStartRequest( TASK_NAME, TASK_NAME, diff --git a/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java index 6713042002fa3..07aa9af3b4030 100644 --- a/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.Diagnosis; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; import org.elasticsearch.health.HealthIndicatorResult; @@ -1085,12 +1084,8 @@ static ClusterState createClusterState( Collection nodes, Map> indexNameToNodeIdsMap ) { - Map> features = new HashMap<>(); DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(); - for (DiscoveryNode node : nodes) { - nodesBuilder = nodesBuilder.add(node); - features.put(node.getId(), Set.of(HealthFeatures.SUPPORTS_HEALTH.id())); - } + nodes.forEach(nodesBuilder::add); nodesBuilder.localNodeId(randomFrom(nodes).getId()); nodesBuilder.masterNodeId(randomFrom(nodes).getId()); ClusterBlocks.Builder clusterBlocksBuilder = new ClusterBlocks.Builder(); @@ -1125,7 +1120,6 @@ static ClusterState createClusterState( state.metadata(metadata.generateClusterUuidIfNeeded().build()); state.routingTable(routingTable.build()); state.blocks(clusterBlocksBuilder); - state.nodeFeatures(features); return state.build(); } diff --git a/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java index 7a578650b7cbd..15ef2e150761f 100644 --- a/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.health.HealthFeatures; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthStatus; import org.elasticsearch.health.metadata.HealthMetadata; @@ -451,11 +450,7 @@ private ClusterState createClusterState( metadata.put(idxMetadata); } - var features = Set.of(HealthFeatures.SUPPORTS_SHARDS_CAPACITY_INDICATOR.id()); - return ClusterState.builder(clusterState) - .metadata(metadata) - .nodeFeatures(Map.of(dataNode.getId(), features, frozenNode.getId(), features)) - .build(); + return ClusterState.builder(clusterState).metadata(metadata).build(); } private static IndexMetadata.Builder createIndexInDataNode(int shards) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/HealthApiUsageTransportAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/HealthApiUsageTransportAction.java index 06393dfa3bade..155ea0ffdcbc3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/HealthApiUsageTransportAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/HealthApiUsageTransportAction.java @@ -13,8 +13,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.health.stats.HealthApiStatsAction; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.protocol.xpack.XPackUsageRequest; @@ -30,10 +28,7 @@ */ public class HealthApiUsageTransportAction extends XPackUsageFeatureTransportAction { - static final NodeFeature SUPPORTS_HEALTH_STATS = new NodeFeature("health.supports_health_stats"); - private final Client client; - private final FeatureService featureService; @Inject public HealthApiUsageTransportAction( @@ -42,8 +37,7 @@ public HealthApiUsageTransportAction( ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - Client client, - FeatureService featureService + Client client ) { super( XPackUsageFeatureAction.HEALTH.name(), @@ -54,7 +48,6 @@ public HealthApiUsageTransportAction( indexNameExpressionResolver ); this.client = client; - this.featureService = featureService; } @Override @@ -70,7 +63,7 @@ protected void masterOperation( client.threadPool().getThreadContext() ); - if (state.clusterRecovered() && featureService.clusterHasFeature(state, SUPPORTS_HEALTH_STATS)) { + if (state.clusterRecovered()) { HealthApiStatsAction.Request statsRequest = new HealthApiStatsAction.Request(); statsRequest.setParentTask(clusterService.localNode().getId(), task.getId()); client.execute(HealthApiStatsAction.INSTANCE, statsRequest, preservingListener.delegateFailureAndWrap((l, r) -> { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java index b885a90c30e57..f966bf97f4764 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackFeatures.java @@ -7,13 +7,11 @@ package org.elasticsearch.xpack.core; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.license.License; import org.elasticsearch.xpack.core.datatiers.NodesDataTiersUsageTransportAction; -import java.util.Map; import java.util.Set; /** @@ -32,9 +30,4 @@ public Set getFeatures() { LOGSDB_TELMETRY_STATS ); } - - @Override - public Map getHistoricalFeatures() { - return Map.of(HealthApiUsageTransportAction.SUPPORTS_HEALTH_STATS, Version.V_8_7_0); - } } From c8325727099cd683504650007ee950d0f171cbfe Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Mon, 18 Nov 2024 14:33:05 +0000 Subject: [PATCH 009/386] Remove some historical features (#116926) Historical features are now trivially true on v9 - so we can remove the features, and the check. Historical features do not affect cluster state, so this has no compatibility restrictions. --- .../datastreams/DataStreamFeatures.java | 8 -- server/src/main/java/module-info.java | 1 - .../elasticsearch/action/ActionModule.java | 2 +- .../indices/IndicesFeatures.java | 23 ---- .../elasticsearch/indices/IndicesService.java | 8 +- .../org/elasticsearch/rest/RestFeatures.java | 8 -- .../cluster/RestClusterGetSettingsAction.java | 41 +------ ...lasticsearch.features.FeatureSpecification | 1 - .../snapshots/SnapshotResiliencyTests.java | 3 +- .../apmdata/APMIndexTemplateRegistry.java | 5 +- .../xpack/apmdata/APMPlugin.java | 9 +- .../xpack/apmdata/APMDSLOnlyTests.java | 7 +- .../APMIndexTemplateRegistryTests.java | 28 +---- .../core/template/YamlTemplateRegistry.java | 21 +--- .../xpack/application/EnterpriseSearch.java | 2 - .../application/EnterpriseSearchFeatures.java | 14 --- .../analytics/AnalyticsTemplateRegistry.java | 14 --- .../connector/ConnectorTemplateRegistry.java | 14 --- .../AnalyticsTemplateRegistryTests.java | 30 +---- .../ConnectorTemplateRegistryTests.java | 25 +--- .../plugin/ilm/src/main/java/module-info.java | 2 - .../xpack/ilm/IndexLifecycle.java | 1 - .../xpack/ilm/IndexLifecycleFeatures.java | 22 ---- .../history/ILMHistoryTemplateRegistry.java | 12 -- ...lasticsearch.features.FeatureSpecification | 8 -- .../ilm/history/ILMHistoryStoreTests.java | 3 - .../oteldata/OTelIndexTemplateRegistry.java | 6 +- .../xpack/oteldata/OTelPlugin.java | 9 +- .../xpack/security/Security.java | 2 - .../xpack/security/SecurityFeatures.java | 9 -- .../security/profile/ProfileService.java | 41 ++----- .../support/SecuritySystemIndices.java | 3 - .../security/profile/ProfileServiceTests.java | 109 +++--------------- .../xpack/slm/SnapshotLifecycle.java | 1 - .../xpack/slm/SnapshotLifecycleFeatures.java | 8 -- .../SnapshotLifecycleTemplateRegistry.java | 12 -- ...napshotLifecycleTemplateRegistryTests.java | 12 +- .../stack/LegacyStackTemplateRegistry.java | 16 +-- .../xpack/stack/StackPlugin.java | 6 +- .../xpack/stack/StackTemplateRegistry.java | 22 +--- .../xpack/stack/StackTemplatesFeatures.java | 21 ---- ...lasticsearch.features.FeatureSpecification | 8 -- .../LegacyStackTemplateRegistryTests.java | 13 +-- ...StackRegistryWithNonRequiredTemplates.java | 6 +- .../stack/StackTemplateRegistryTests.java | 48 +------- 45 files changed, 51 insertions(+), 613 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/indices/IndicesFeatures.java delete mode 100644 x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleFeatures.java delete mode 100644 x-pack/plugin/ilm/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification delete mode 100644 x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplatesFeatures.java delete mode 100644 x-pack/plugin/stack/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java index f60a3e5c47a7f..f090186480b76 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java @@ -9,7 +9,6 @@ package org.elasticsearch.datastreams; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.rollover.LazyRolloverAction; import org.elasticsearch.action.datastreams.autosharding.DataStreamAutoShardingService; import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; @@ -17,7 +16,6 @@ import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import java.util.Map; import java.util.Set; /** @@ -25,14 +23,8 @@ */ public class DataStreamFeatures implements FeatureSpecification { - public static final NodeFeature DATA_STREAM_LIFECYCLE = new NodeFeature("data_stream.lifecycle"); public static final NodeFeature DATA_STREAM_FAILURE_STORE_TSDB_FIX = new NodeFeature("data_stream.failure_store.tsdb_fix"); - @Override - public Map getHistoricalFeatures() { - return Map.of(DATA_STREAM_LIFECYCLE, Version.V_8_11_0); - } - @Override public Set getFeatures() { return Set.of( diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 29c869a9f8d77..08794f5938bc2 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -426,7 +426,6 @@ org.elasticsearch.cluster.service.TransportFeatures, org.elasticsearch.cluster.metadata.MetadataFeatures, org.elasticsearch.rest.RestFeatures, - org.elasticsearch.indices.IndicesFeatures, org.elasticsearch.repositories.RepositoriesFeatures, org.elasticsearch.action.admin.cluster.allocation.AllocationStatsFeatures, org.elasticsearch.rest.action.admin.cluster.ClusterRerouteFeatures, diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 9f727f49530a1..98d6284fd91d2 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -853,7 +853,7 @@ public void initRestHandlers(Supplier nodesInCluster, Predicate< registerHandler.accept(new RestClusterStateAction(settingsFilter, threadPool)); registerHandler.accept(new RestClusterHealthAction()); registerHandler.accept(new RestClusterUpdateSettingsAction()); - registerHandler.accept(new RestClusterGetSettingsAction(settings, clusterSettings, settingsFilter, clusterSupportsFeature)); + registerHandler.accept(new RestClusterGetSettingsAction(settings, clusterSettings, settingsFilter)); registerHandler.accept(new RestClusterRerouteAction(settingsFilter)); registerHandler.accept(new RestClusterSearchShardsAction()); registerHandler.accept(new RestPendingClusterTasksAction()); diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesFeatures.java b/server/src/main/java/org/elasticsearch/indices/IndicesFeatures.java deleted file mode 100644 index bd39d125969ce..0000000000000 --- a/server/src/main/java/org/elasticsearch/indices/IndicesFeatures.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.indices; - -import org.elasticsearch.Version; -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Map; - -public class IndicesFeatures implements FeatureSpecification { - @Override - public Map getHistoricalFeatures() { - return Map.of(IndicesService.SUPPORTS_AUTO_PUT, Version.V_8_8_0); - } -} diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 3ac61bbca1a21..27d832241bfed 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.ResolvedIndices; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.TransportAutoPutMappingAction; -import org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction; import org.elasticsearch.action.admin.indices.stats.CommonStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag; @@ -79,7 +78,6 @@ import org.elasticsearch.env.ShardLock; import org.elasticsearch.env.ShardLockObtainFailedException; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.gateway.MetaStateService; import org.elasticsearch.gateway.MetadataStateFormat; import org.elasticsearch.index.CloseUtils; @@ -211,8 +209,6 @@ public class IndicesService extends AbstractLifecycleComponent Setting.Property.NodeScope ); - static final NodeFeature SUPPORTS_AUTO_PUT = new NodeFeature("indices.auto_put_supported"); - /** * The node's settings. */ @@ -910,9 +906,7 @@ public void createShard( .setConcreteIndex(shardRouting.index()) .source(mapping.source().string(), XContentType.JSON); client.execute( - featureService.clusterHasFeature(clusterService.state(), SUPPORTS_AUTO_PUT) - ? TransportAutoPutMappingAction.TYPE - : TransportPutMappingAction.TYPE, + TransportAutoPutMappingAction.TYPE, putMappingRequestAcknowledgedRequest.ackTimeout(TimeValue.MAX_VALUE).masterNodeTimeout(TimeValue.MAX_VALUE), new RefCountAwareThreadedActionListener<>(threadPool.generic(), listener.map(ignored -> null)) ); diff --git a/server/src/main/java/org/elasticsearch/rest/RestFeatures.java b/server/src/main/java/org/elasticsearch/rest/RestFeatures.java index 8d546f7aa43f8..e72b30526c8e3 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestFeatures.java +++ b/server/src/main/java/org/elasticsearch/rest/RestFeatures.java @@ -9,13 +9,10 @@ package org.elasticsearch.rest; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.rest.action.admin.cluster.RestClusterGetSettingsAction; import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; -import java.util.Map; import java.util.Set; import static org.elasticsearch.search.fetch.subphase.highlight.DefaultHighlighter.UNIFIED_HIGHLIGHTER_MATCHED_FIELDS; @@ -29,9 +26,4 @@ public Set getFeatures() { UNIFIED_HIGHLIGHTER_MATCHED_FIELDS ); } - - @Override - public Map getHistoricalFeatures() { - return Map.of(RestClusterGetSettingsAction.SUPPORTS_GET_SETTINGS_ACTION, Version.V_8_3_0); - } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java index 946931e166363..ca9e4abcaeec7 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java @@ -11,13 +11,11 @@ import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsAction; import org.elasticsearch.action.admin.cluster.settings.RestClusterGetSettingsResponse; -import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -27,7 +25,6 @@ import java.io.IOException; import java.util.List; import java.util.Set; -import java.util.function.Predicate; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; @@ -35,23 +32,14 @@ @ServerlessScope(Scope.INTERNAL) public class RestClusterGetSettingsAction extends BaseRestHandler { - public static final NodeFeature SUPPORTS_GET_SETTINGS_ACTION = new NodeFeature("rest.get_settings_action"); - private final Settings settings; private final ClusterSettings clusterSettings; private final SettingsFilter settingsFilter; - private final Predicate clusterSupportsFeature; - public RestClusterGetSettingsAction( - Settings settings, - ClusterSettings clusterSettings, - SettingsFilter settingsFilter, - Predicate clusterSupportsFeature - ) { + public RestClusterGetSettingsAction(Settings settings, ClusterSettings clusterSettings, SettingsFilter settingsFilter) { this.settings = settings; this.clusterSettings = clusterSettings; this.settingsFilter = settingsFilter; - this.clusterSupportsFeature = clusterSupportsFeature; } @Override @@ -72,10 +60,6 @@ private static void setUpRequestParams(MasterNodeReadRequest clusterRequest, public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final boolean renderDefaults = request.paramAsBoolean("include_defaults", false); - if (clusterSupportsFeature.test(SUPPORTS_GET_SETTINGS_ACTION) == false) { - return prepareLegacyRequest(request, client, renderDefaults); - } - ClusterGetSettingsAction.Request clusterSettingsRequest = new ClusterGetSettingsAction.Request(getMasterNodeTimeout(request)); setUpRequestParams(clusterSettingsRequest, request); @@ -89,29 +73,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC ); } - private RestChannelConsumer prepareLegacyRequest(final RestRequest request, final NodeClient client, final boolean renderDefaults) { - ClusterStateRequest clusterStateRequest = new ClusterStateRequest(getMasterNodeTimeout(request)).routingTable(false).nodes(false); - setUpRequestParams(clusterStateRequest, request); - return channel -> client.admin() - .cluster() - .state( - clusterStateRequest, - new RestToXContentListener(channel).map( - r -> response( - new ClusterGetSettingsAction.Response( - r.getState().metadata().persistentSettings(), - r.getState().metadata().transientSettings(), - r.getState().metadata().settings() - ), - renderDefaults, - settingsFilter, - clusterSettings, - settings - ) - ) - ); - } - @Override protected Set responseParams() { return Settings.FORMAT_PARAMS; diff --git a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification index 37b9b5836ca5f..089c0231bc593 100644 --- a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -14,7 +14,6 @@ org.elasticsearch.health.HealthFeatures org.elasticsearch.cluster.service.TransportFeatures org.elasticsearch.cluster.metadata.MetadataFeatures org.elasticsearch.rest.RestFeatures -org.elasticsearch.indices.IndicesFeatures org.elasticsearch.repositories.RepositoriesFeatures org.elasticsearch.action.admin.cluster.allocation.AllocationStatsFeatures org.elasticsearch.rest.action.admin.cluster.ClusterRerouteFeatures diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 077877f713571..cf240550e809d 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -146,7 +146,6 @@ import org.elasticsearch.index.seqno.RetentionLeaseSyncer; import org.elasticsearch.index.shard.PrimaryReplicaSyncer; import org.elasticsearch.indices.EmptySystemIndices; -import org.elasticsearch.indices.IndicesFeatures; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesServiceBuilder; @@ -2245,7 +2244,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { .scriptService(scriptService) .clusterService(clusterService) .client(client) - .featureService(new FeatureService(List.of(new IndicesFeatures()))) + .featureService(new FeatureService(List.of())) .metaStateService(new MetaStateService(nodeEnv, namedXContentRegistry)) .mapperMetrics(MapperMetrics.NOOP) .build(); diff --git a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java index 466c9e4f006dc..228ac401b96bb 100644 --- a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java +++ b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java @@ -10,7 +10,6 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ClientHelper; @@ -33,8 +32,7 @@ public APMIndexTemplateRegistry( ClusterService clusterService, ThreadPool threadPool, Client client, - NamedXContentRegistry xContentRegistry, - FeatureService featureService + NamedXContentRegistry xContentRegistry ) { super( nodeSettings, @@ -42,7 +40,6 @@ public APMIndexTemplateRegistry( threadPool, client, xContentRegistry, - featureService, templateFilter(isDataStreamsLifecycleOnlyMode(clusterService.getSettings())) ); } diff --git a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java index aefb45f6186c1..0be95c337838a 100644 --- a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java +++ b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java @@ -48,14 +48,7 @@ public Collection createComponents(PluginServices services) { Settings settings = services.environment().settings(); ClusterService clusterService = services.clusterService(); registry.set( - new APMIndexTemplateRegistry( - settings, - clusterService, - services.threadPool(), - services.client(), - services.xContentRegistry(), - services.featureService() - ) + new APMIndexTemplateRegistry(settings, clusterService, services.threadPool(), services.client(), services.xContentRegistry()) ); if (enabled) { APMIndexTemplateRegistry registryInstance = registry.get(); diff --git a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMDSLOnlyTests.java b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMDSLOnlyTests.java index 476b504339e62..b18e95b55dde0 100644 --- a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMDSLOnlyTests.java +++ b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMDSLOnlyTests.java @@ -14,8 +14,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.datastreams.DataStreamFeatures; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -24,7 +22,6 @@ import org.junit.After; import org.junit.Before; -import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; @@ -56,15 +53,13 @@ public void createRegistryAndClient() { additionalSettings, clusterSettings ); - FeatureService featureService = new FeatureService(List.of(new DataStreamFeatures())); apmIndexTemplateRegistry = new APMIndexTemplateRegistry( Settings.EMPTY, clusterService, threadPool, client, - NamedXContentRegistry.EMPTY, - featureService + NamedXContentRegistry.EMPTY ); apmIndexTemplateRegistry.setEnabled(true); } diff --git a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java index 4a2b9265b3b05..32e7c2225e19d 100644 --- a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java +++ b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.apmdata; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -30,8 +29,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.datastreams.DataStreamFeatures; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.test.ClusterServiceUtils; @@ -92,9 +89,8 @@ public void createRegistryAndClient() { threadPool = new TestThreadPool(this.getClass().getName()); client = new VerifyingClient(threadPool); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool, clusterSettings); - FeatureService featureService = new FeatureService(List.of(new DataStreamFeatures())); stackTemplateRegistryAccessor = new StackTemplateRegistryAccessor( - new StackTemplateRegistry(Settings.EMPTY, clusterService, threadPool, client, NamedXContentRegistry.EMPTY, featureService) + new StackTemplateRegistry(Settings.EMPTY, clusterService, threadPool, client, NamedXContentRegistry.EMPTY) ); apmIndexTemplateRegistry = new APMIndexTemplateRegistry( @@ -102,8 +98,7 @@ public void createRegistryAndClient() { clusterService, threadPool, client, - NamedXContentRegistry.EMPTY, - featureService + NamedXContentRegistry.EMPTY ); apmIndexTemplateRegistry.setEnabled(true); } @@ -408,25 +403,6 @@ public void testIndexTemplateConventions() throws Exception { } } - public void testThatNothingIsInstalledWhenAllNodesAreNotUpdated() { - DiscoveryNode updatedNode = DiscoveryNodeUtils.create("updatedNode"); - DiscoveryNode outdatedNode = DiscoveryNodeUtils.create("outdatedNode", ESTestCase.buildNewFakeTransportAddress(), Version.V_8_10_0); - DiscoveryNodes nodes = DiscoveryNodes.builder() - .localNodeId("updatedNode") - .masterNodeId("updatedNode") - .add(updatedNode) - .add(outdatedNode) - .build(); - - client.setVerifier((a, r, l) -> { - fail("if some cluster mode are not updated to at least v.8.11.0 nothing should happen"); - return null; - }); - - ClusterChangedEvent event = createClusterChangedEvent(Map.of(), Map.of(), nodes); - apmIndexTemplateRegistry.clusterChanged(event); - } - public void testILMComponentTemplatesInstalled() throws Exception { int ilmFallbackCount = 0; for (Map.Entry entry : apmIndexTemplateRegistry.getComponentTemplateConfigs().entrySet()) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/YamlTemplateRegistry.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/YamlTemplateRegistry.java index a30236b2fef28..cf0a73963f864 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/YamlTemplateRegistry.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/YamlTemplateRegistry.java @@ -11,15 +11,12 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -44,16 +41,12 @@ */ public abstract class YamlTemplateRegistry extends IndexTemplateRegistry { private static final Logger logger = LogManager.getLogger(YamlTemplateRegistry.class); - // this node feature is a redefinition of {@link DataStreamFeatures#DATA_STREAM_LIFECYCLE} and it's meant to avoid adding a - // dependency to the data-streams module just for this - public static final NodeFeature DATA_STREAM_LIFECYCLE = new NodeFeature("data_stream.lifecycle"); private final int version; private final Map componentTemplates; private final Map composableIndexTemplates; private final List ingestPipelines; private final List lifecyclePolicies; - private final FeatureService featureService; private volatile boolean enabled; public YamlTemplateRegistry( @@ -61,10 +54,9 @@ public YamlTemplateRegistry( ClusterService clusterService, ThreadPool threadPool, Client client, - NamedXContentRegistry xContentRegistry, - FeatureService featureService + NamedXContentRegistry xContentRegistry ) { - this(nodeSettings, clusterService, threadPool, client, xContentRegistry, featureService, ignored -> true); + this(nodeSettings, clusterService, threadPool, client, xContentRegistry, ignored -> true); } @SuppressWarnings({ "unchecked", "this-escape" }) @@ -74,7 +66,6 @@ public YamlTemplateRegistry( ThreadPool threadPool, Client client, NamedXContentRegistry xContentRegistry, - FeatureService featureService, Predicate templateFilter ) { super(nodeSettings, clusterService, threadPool, client, xContentRegistry); @@ -123,7 +114,6 @@ public YamlTemplateRegistry( .filter(templateFilter) .map(this::loadLifecyclePolicy) .collect(Collectors.toList()); - this.featureService = featureService; } catch (IOException e) { throw new ElasticsearchException(e); } @@ -152,13 +142,6 @@ public void close() { clusterService.removeListener(this); } - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - // Ensure current version of the components are installed only after versions that support data stream lifecycle - // due to the use of the feature in all the `@lifecycle` component templates - return featureService.clusterHasFeature(event.state(), DATA_STREAM_LIFECYCLE); - } - @Override protected boolean requiresMasterNode() { return true; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index a354ca4b4b31c..df1c76ccf770f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -456,7 +456,6 @@ public Collection createComponents(PluginServices services) { // Behavioral analytics components final AnalyticsTemplateRegistry analyticsTemplateRegistry = new AnalyticsTemplateRegistry( services.clusterService(), - services.featureService(), services.threadPool(), services.client(), services.xContentRegistry() @@ -466,7 +465,6 @@ public Collection createComponents(PluginServices services) { // Connector components final ConnectorTemplateRegistry connectorTemplateRegistry = new ConnectorTemplateRegistry( services.clusterService(), - services.featureService(), services.threadPool(), services.client(), services.xContentRegistry() diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchFeatures.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchFeatures.java index 86882a28ec39f..ba121f2cf865e 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchFeatures.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchFeatures.java @@ -7,15 +7,11 @@ package org.elasticsearch.xpack.application; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.xpack.application.analytics.AnalyticsTemplateRegistry; -import org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry; import org.elasticsearch.xpack.application.rules.action.ListQueryRulesetsAction; import org.elasticsearch.xpack.application.rules.retriever.QueryRuleRetrieverBuilder; -import java.util.Map; import java.util.Set; import static org.elasticsearch.xpack.application.rules.action.TestQueryRulesetAction.QUERY_RULES_TEST_API; @@ -30,14 +26,4 @@ public Set getFeatures() { ListQueryRulesetsAction.QUERY_RULE_LIST_TYPES ); } - - @Override - public Map getHistoricalFeatures() { - return Map.of( - ConnectorTemplateRegistry.CONNECTOR_TEMPLATES_FEATURE, - Version.V_8_10_0, - AnalyticsTemplateRegistry.ANALYTICS_TEMPLATE_FEATURE, - Version.V_8_12_0 - ); - } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java index d9f433b8052bf..99a239dd617a2 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java @@ -7,13 +7,10 @@ package org.elasticsearch.xpack.application.analytics; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -36,8 +33,6 @@ public class AnalyticsTemplateRegistry extends IndexTemplateRegistry { - public static final NodeFeature ANALYTICS_TEMPLATE_FEATURE = new NodeFeature("behavioral_analytics.templates"); - // This number must be incremented when we make changes to built-in templates. static final int REGISTRY_VERSION = 3; @@ -100,17 +95,13 @@ protected List getIngestPipelines() { ) ); - private final FeatureService featureService; - public AnalyticsTemplateRegistry( ClusterService clusterService, - FeatureService featureService, ThreadPool threadPool, Client client, NamedXContentRegistry xContentRegistry ) { super(Settings.EMPTY, clusterService, threadPool, client, xContentRegistry); - this.featureService = featureService; } @Override @@ -138,9 +129,4 @@ protected boolean requiresMasterNode() { // there and the ActionNotFoundTransportException errors are then prevented. return true; } - - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - return featureService.clusterHasFeature(event.state(), ANALYTICS_TEMPLATE_FEATURE); - } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java index 41976bc6b4272..9b8cc7cfdbe4f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java @@ -8,13 +8,10 @@ package org.elasticsearch.xpack.application.connector; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -33,8 +30,6 @@ public class ConnectorTemplateRegistry extends IndexTemplateRegistry { - public static final NodeFeature CONNECTOR_TEMPLATES_FEATURE = new NodeFeature("elastic-connectors.templates"); - // This number must be incremented when we make changes to built-in templates. static final int REGISTRY_VERSION = 3; @@ -153,17 +148,13 @@ protected List getIngestPipelines() { ) ); - private final FeatureService featureService; - public ConnectorTemplateRegistry( ClusterService clusterService, - FeatureService featureService, ThreadPool threadPool, Client client, NamedXContentRegistry xContentRegistry ) { super(Settings.EMPTY, clusterService, threadPool, client, xContentRegistry); - this.featureService = featureService; } @Override @@ -186,9 +177,4 @@ protected boolean requiresMasterNode() { // Necessary to prevent conflicts in some mixed-cluster environments with pre-7.7 nodes return true; } - - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - return featureService.clusterHasFeature(event.state(), CONNECTOR_TEMPLATES_FEATURE); - } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistryTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistryTests.java index 50102b8cfcf53..fb2fb11c7460f 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistryTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistryTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.application.analytics; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -31,7 +30,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.test.ClusterServiceUtils; @@ -42,7 +40,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.application.EnterpriseSearchFeatures; import org.elasticsearch.xpack.core.ilm.IndexLifecycleMetadata; import org.elasticsearch.xpack.core.ilm.LifecyclePolicy; import org.elasticsearch.xpack.core.ilm.LifecyclePolicyMetadata; @@ -78,13 +75,7 @@ public void createRegistryAndClient() { threadPool = new TestThreadPool(this.getClass().getName()); client = new VerifyingClient(threadPool); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); - registry = new AnalyticsTemplateRegistry( - clusterService, - new FeatureService(List.of(new EnterpriseSearchFeatures())), - threadPool, - client, - NamedXContentRegistry.EMPTY - ); + registry = new AnalyticsTemplateRegistry(clusterService, threadPool, client, NamedXContentRegistry.EMPTY); } @After @@ -282,25 +273,6 @@ public void testThatNonExistingPipelinesAreAddedImmediately() throws Exception { assertBusy(() -> assertThat(calledTimes.get(), equalTo(registry.getIngestPipelines().size()))); } - public void testThatNothingIsInstalledWhenAllNodesAreNotUpdated() { - DiscoveryNode updatedNode = DiscoveryNodeUtils.create("updatedNode"); - DiscoveryNode outdatedNode = DiscoveryNodeUtils.create("outdatedNode", ESTestCase.buildNewFakeTransportAddress(), Version.V_8_7_0); - DiscoveryNodes nodes = DiscoveryNodes.builder() - .localNodeId("updatedNode") - .masterNodeId("updatedNode") - .add(updatedNode) - .add(outdatedNode) - .build(); - - client.setVerifier((a, r, l) -> { - fail("if some cluster mode are not updated to at least v.8.8.0 nothing should happen"); - return null; - }); - - ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), Collections.emptyMap(), nodes); - registry.clusterChanged(event); - } - // ------------- /** diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java index 3fbc5cd749cb2..a4c7015afafcb 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.application.connector; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -31,7 +30,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.test.ClusterServiceUtils; @@ -41,7 +39,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.application.EnterpriseSearchFeatures; import org.elasticsearch.xpack.core.ilm.IndexLifecycleMetadata; import org.elasticsearch.xpack.core.ilm.LifecyclePolicy; import org.elasticsearch.xpack.core.ilm.LifecyclePolicyMetadata; @@ -81,8 +78,7 @@ public void createRegistryAndClient() { threadPool = new TestThreadPool(this.getClass().getName()); client = new VerifyingClient(threadPool); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); - FeatureService featureService = new FeatureService(List.of(new EnterpriseSearchFeatures())); - registry = new ConnectorTemplateRegistry(clusterService, featureService, threadPool, client, NamedXContentRegistry.EMPTY); + registry = new ConnectorTemplateRegistry(clusterService, threadPool, client, NamedXContentRegistry.EMPTY); } @After @@ -310,25 +306,6 @@ public void testThatNonExistingPipelinesAreAddedImmediately() throws Exception { assertBusy(() -> assertThat(calledTimes.get(), equalTo(registry.getIngestPipelines().size()))); } - public void testThatNothingIsInstalledWhenAllNodesAreNotUpdated() { - DiscoveryNode updatedNode = DiscoveryNodeUtils.create("updatedNode"); - DiscoveryNode outdatedNode = DiscoveryNodeUtils.create("outdatedNode", ESTestCase.buildNewFakeTransportAddress(), Version.V_8_9_0); - DiscoveryNodes nodes = DiscoveryNodes.builder() - .localNodeId("updatedNode") - .masterNodeId("updatedNode") - .add(updatedNode) - .add(outdatedNode) - .build(); - - client.setVerifier((a, r, l) -> { - fail("if some cluster mode are not updated to at least v.8.10.0 nothing should happen"); - return null; - }); - - ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), Collections.emptyMap(), nodes); - registry.clusterChanged(event); - } - // ------------- /** diff --git a/x-pack/plugin/ilm/src/main/java/module-info.java b/x-pack/plugin/ilm/src/main/java/module-info.java index 591c9786247e6..aa24c2d6f333c 100644 --- a/x-pack/plugin/ilm/src/main/java/module-info.java +++ b/x-pack/plugin/ilm/src/main/java/module-info.java @@ -18,6 +18,4 @@ provides org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider with org.elasticsearch.xpack.ilm.ReservedLifecycleStateHandlerProvider; - - provides org.elasticsearch.features.FeatureSpecification with org.elasticsearch.xpack.ilm.IndexLifecycleFeatures; } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java index f41524480e2df..f830a2821d841 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java @@ -146,7 +146,6 @@ public Collection createComponents(PluginServices services) { ILMHistoryTemplateRegistry ilmTemplateRegistry = new ILMHistoryTemplateRegistry( settings, services.clusterService(), - services.featureService(), services.threadPool(), services.client(), services.xContentRegistry() diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleFeatures.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleFeatures.java deleted file mode 100644 index cc78271e2d878..0000000000000 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleFeatures.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ilm; - -import org.elasticsearch.Version; -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.xpack.ilm.history.ILMHistoryTemplateRegistry; - -import java.util.Map; - -public class IndexLifecycleFeatures implements FeatureSpecification { - @Override - public Map getHistoricalFeatures() { - return Map.of(ILMHistoryTemplateRegistry.MANAGED_BY_DATA_STREAM_LIFECYCLE, Version.V_8_12_0); - } -} diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryTemplateRegistry.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryTemplateRegistry.java index 28c28ef6e4c55..5633033e6faa1 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryTemplateRegistry.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryTemplateRegistry.java @@ -8,12 +8,9 @@ package org.elasticsearch.xpack.ilm.history; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ClientHelper; @@ -40,13 +37,11 @@ public class ILMHistoryTemplateRegistry extends IndexTemplateRegistry { // version 6: manage by data stream lifecycle // version 7: version the index template name so we can upgrade existing deployments public static final int INDEX_TEMPLATE_VERSION = 7; - public static final NodeFeature MANAGED_BY_DATA_STREAM_LIFECYCLE = new NodeFeature("ilm-history-managed-by-dsl"); public static final String ILM_TEMPLATE_VERSION_VARIABLE = "xpack.ilm_history.template.version"; public static final String ILM_TEMPLATE_NAME = "ilm-history-" + INDEX_TEMPLATE_VERSION; public static final String ILM_POLICY_NAME = "ilm-history-ilm-policy"; - private final FeatureService featureService; @Override protected boolean requiresMasterNode() { @@ -58,13 +53,11 @@ protected boolean requiresMasterNode() { public ILMHistoryTemplateRegistry( Settings nodeSettings, ClusterService clusterService, - FeatureService featureService, ThreadPool threadPool, Client client, NamedXContentRegistry xContentRegistry ) { super(nodeSettings, clusterService, threadPool, client, xContentRegistry); - this.featureService = featureService; this.ilmHistoryEnabled = LifecycleSettings.LIFECYCLE_HISTORY_INDEX_ENABLED_SETTING.get(nodeSettings); } @@ -104,9 +97,4 @@ protected List getLifecyclePolicies() { protected String getOrigin() { return ClientHelper.INDEX_LIFECYCLE_ORIGIN; } - - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - return featureService.clusterHasFeature(event.state(), MANAGED_BY_DATA_STREAM_LIFECYCLE); - } } diff --git a/x-pack/plugin/ilm/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/x-pack/plugin/ilm/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification deleted file mode 100644 index 1bf03ae25edd2..0000000000000 --- a/x-pack/plugin/ilm/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -# or more contributor license agreements. Licensed under the Elastic License -# 2.0; you may not use this file except in compliance with the Elastic License -# 2.0. -# - -org.elasticsearch.xpack.ilm.IndexLifecycleFeatures diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java index cbdda089e8328..1797f6b10f3cb 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ClusterServiceUtils; @@ -40,7 +39,6 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xpack.ilm.IndexLifecycleFeatures; import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; @@ -80,7 +78,6 @@ public void setup() { ILMHistoryTemplateRegistry registry = new ILMHistoryTemplateRegistry( clusterService.getSettings(), clusterService, - new FeatureService(List.of(new IndexLifecycleFeatures())), threadPool, client, NamedXContentRegistry.EMPTY diff --git a/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelIndexTemplateRegistry.java b/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelIndexTemplateRegistry.java index 435530542c857..ca52db9331cf3 100644 --- a/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelIndexTemplateRegistry.java +++ b/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelIndexTemplateRegistry.java @@ -10,7 +10,6 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ClientHelper; @@ -27,10 +26,9 @@ public OTelIndexTemplateRegistry( ClusterService clusterService, ThreadPool threadPool, Client client, - NamedXContentRegistry xContentRegistry, - FeatureService featureService + NamedXContentRegistry xContentRegistry ) { - super(nodeSettings, clusterService, threadPool, client, xContentRegistry, featureService); + super(nodeSettings, clusterService, threadPool, client, xContentRegistry); } @Override diff --git a/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelPlugin.java b/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelPlugin.java index 543102330bd08..67bd8c4e002d3 100644 --- a/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelPlugin.java +++ b/x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/OTelPlugin.java @@ -48,14 +48,7 @@ public Collection createComponents(PluginServices services) { Settings settings = services.environment().settings(); ClusterService clusterService = services.clusterService(); registry.set( - new OTelIndexTemplateRegistry( - settings, - clusterService, - services.threadPool(), - services.client(), - services.xContentRegistry(), - services.featureService() - ) + new OTelIndexTemplateRegistry(settings, clusterService, services.threadPool(), services.client(), services.xContentRegistry()) ); if (enabled) { OTelIndexTemplateRegistry registryInstance = registry.get(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 0b387a738a2c5..ef66392a87260 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -1048,8 +1048,6 @@ Collection createComponents( getClock(), client, systemIndices.getProfileIndexManager(), - clusterService, - featureService, realms ); components.add(profileService); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatures.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatures.java index d0292f32cd75f..53ecafa280715 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatures.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatures.java @@ -7,18 +7,14 @@ package org.elasticsearch.xpack.security; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import java.util.Map; import java.util.Set; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MIGRATION_FRAMEWORK; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_PROFILE_ORIGIN_FEATURE; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_ROLES_METADATA_FLATTENED; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_ROLE_MAPPING_CLEANUP; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.VERSION_SECURITY_PROFILE_ORIGIN; public class SecurityFeatures implements FeatureSpecification { @@ -26,9 +22,4 @@ public class SecurityFeatures implements FeatureSpecification { public Set getFeatures() { return Set.of(SECURITY_ROLE_MAPPING_CLEANUP, SECURITY_ROLES_METADATA_FLATTENED, SECURITY_MIGRATION_FRAMEWORK); } - - @Override - public Map getHistoricalFeatures() { - return Map.of(SECURITY_PROFILE_ORIGIN_FEATURE, VERSION_SECURITY_PROFILE_ORIGIN); - } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java index b347c278aae08..a3ee313c7f1d9 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java @@ -35,7 +35,6 @@ import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -45,7 +44,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.MultiMatchQueryBuilder; @@ -100,14 +98,12 @@ import static org.elasticsearch.action.bulk.TransportSingleItemBulkWriteAction.toSingleItemBulkRequest; import static org.elasticsearch.common.Strings.collectionToCommaDelimitedString; import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_PROFILE_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.security.authc.Authentication.isFileOrNativeRealm; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.Availability.PRIMARY_SHARDS; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.Availability.SEARCH_SHARDS; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_PROFILE_ALIAS; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_PROFILE_ORIGIN_FEATURE; public class ProfileService { private static final Logger logger = LogManager.getLogger(ProfileService.class); @@ -120,26 +116,14 @@ public class ProfileService { private final Clock clock; private final Client client; private final SecurityIndexManager profileIndex; - private final ClusterService clusterService; - private final FeatureService featureService; private final Function domainConfigLookup; private final Function realmRefLookup; - public ProfileService( - Settings settings, - Clock clock, - Client client, - SecurityIndexManager profileIndex, - ClusterService clusterService, - FeatureService featureService, - Realms realms - ) { + public ProfileService(Settings settings, Clock clock, Client client, SecurityIndexManager profileIndex, Realms realms) { this.settings = settings; this.clock = clock; this.client = client; this.profileIndex = profileIndex; - this.clusterService = clusterService; - this.featureService = featureService; this.domainConfigLookup = realms::getDomainConfig; this.realmRefLookup = realms::getRealmRef; } @@ -273,7 +257,7 @@ public void suggestProfile(SuggestProfilesRequest request, TaskId parentTaskId, listener::onFailure, () -> executeAsyncWithOrigin( client, - getActionOrigin(), + SECURITY_PROFILE_ORIGIN, TransportSearchAction.TYPE, searchRequest, ActionListener.wrap(searchResponse -> { @@ -403,7 +387,7 @@ public void usageStats(ActionListener> listener) { listener::onFailure, () -> executeAsyncWithOrigin( client, - getActionOrigin(), + SECURITY_PROFILE_ORIGIN, TransportMultiSearchAction.TYPE, multiSearchRequest, ActionListener.wrap(multiSearchResponse -> { @@ -484,7 +468,7 @@ private void getVersionedDocument(String uid, ActionListener listener::onFailure, () -> executeAsyncWithOrigin( client, - getActionOrigin(), + SECURITY_PROFILE_ORIGIN, TransportGetAction.TYPE, getRequest, ActionListener.wrap(response -> { @@ -514,7 +498,7 @@ private void getVersionedDocuments(Collection uids, ActionListener { frozenProfileIndex.checkIndexVersionThenExecute( listener::onFailure, - () -> new OriginSettingClient(client, getActionOrigin()).prepareMultiGet() + () -> new OriginSettingClient(client, SECURITY_PROFILE_ORIGIN).prepareMultiGet() .addIds(frozenProfileIndex.aliasName(), uids.stream().map(ProfileService::uidToDocId).toArray(String[]::new)) .execute(ActionListener.wrap(multiGetResponse -> { List retrievedDocs = new ArrayList<>(multiGetResponse.getResponses().length); @@ -589,7 +573,7 @@ private void searchVersionedDocumentsForSubjects( subjects.forEach(subject -> multiSearchRequest.add(buildSearchRequestForSubject(subject))); executeAsyncWithOrigin( client, - getActionOrigin(), + SECURITY_PROFILE_ORIGIN, TransportMultiSearchAction.TYPE, multiSearchRequest, ActionListener.wrap( @@ -742,7 +726,7 @@ void createNewProfile(Subject subject, String uid, ActionListener liste listener::onFailure, () -> executeAsyncWithOrigin( client, - getActionOrigin(), + SECURITY_PROFILE_ORIGIN, TransportBulkAction.TYPE, bulkRequest, TransportBulkAction.unwrappingSingleItemBulkResponse(ActionListener.wrap(indexResponse -> { @@ -1007,7 +991,7 @@ void doUpdate(UpdateRequest updateRequest, ActionListener listen listener::onFailure, () -> executeAsyncWithOrigin( client, - getActionOrigin(), + SECURITY_PROFILE_ORIGIN, TransportUpdateAction.TYPE, updateRequest, ActionListener.wrap(updateResponse -> { @@ -1019,15 +1003,6 @@ void doUpdate(UpdateRequest updateRequest, ActionListener listen ); } - private String getActionOrigin() { - // profile origin and user is not available before v8.3.0 - if (featureService.clusterHasFeature(clusterService.state(), SECURITY_PROFILE_ORIGIN_FEATURE)) { - return SECURITY_PROFILE_ORIGIN; - } else { - return SECURITY_ORIGIN; - } - } - private static String uidToDocId(String uid) { return DOC_ID_PREFIX + uid; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java index 609e6696bcb0f..7b3f6a8d2ae55 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.Version; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -57,8 +56,6 @@ public class SecuritySystemIndices { public static final String INTERNAL_SECURITY_PROFILE_INDEX_8 = ".security-profile-8"; public static final String SECURITY_PROFILE_ALIAS = ".security-profile"; - public static final Version VERSION_SECURITY_PROFILE_ORIGIN = Version.V_8_3_0; - public static final NodeFeature SECURITY_PROFILE_ORIGIN_FEATURE = new NodeFeature("security.security_profile_origin"); public static final NodeFeature SECURITY_MIGRATION_FRAMEWORK = new NodeFeature("security.migration_framework"); public static final NodeFeature SECURITY_ROLES_METADATA_FLATTENED = new NodeFeature("security.roles_metadata_flattened"); public static final NodeFeature SECURITY_ROLE_MAPPING_CLEANUP = new NodeFeature("security.role_mapping_cleanup"); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java index f076dc24e5d5b..6da1ddb61f11f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java @@ -36,9 +36,6 @@ import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -48,7 +45,6 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Tuple; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.query.BoolQueryBuilder; @@ -88,7 +84,6 @@ import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.profile.ProfileDocument.ProfileDocumentUser; import org.elasticsearch.xpack.security.support.SecurityIndexManager; -import org.elasticsearch.xpack.security.support.SecuritySystemIndices; import org.elasticsearch.xpack.security.test.SecurityMocks; import org.hamcrest.Matchers; import org.junit.After; @@ -115,7 +110,6 @@ import static java.util.Collections.emptyMap; import static org.elasticsearch.common.util.concurrent.ThreadContext.ACTION_ORIGIN_TRANSIENT_NAME; import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; -import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_PROFILE_ORIGIN; import static org.elasticsearch.xpack.core.security.support.Validation.VALID_NAME_CHARS; import static org.elasticsearch.xpack.security.Security.SECURITY_CRYPTO_THREAD_POOL_NAME; @@ -187,7 +181,6 @@ public class ProfileServiceTests extends ESTestCase { private SecurityIndexManager profileIndex; private ProfileService profileService; Function realmRefLookup; - private boolean useProfileOrigin; @Before public void prepare() { @@ -208,29 +201,11 @@ public void prepare() { when(client.threadPool()).thenReturn(threadPool); when(client.prepareSearch(SECURITY_PROFILE_ALIAS)).thenReturn(new SearchRequestBuilder(client).setIndices(SECURITY_PROFILE_ALIAS)); this.profileIndex = SecurityMocks.mockSecurityIndexManager(SECURITY_PROFILE_ALIAS); - final ClusterService clusterService = mock(ClusterService.class); - final ClusterState clusterState = mock(ClusterState.class); - when(clusterService.state()).thenReturn(clusterState); - final DiscoveryNodes discoveryNodes = mock(DiscoveryNodes.class); - when(clusterState.nodes()).thenReturn(discoveryNodes); - useProfileOrigin = randomBoolean(); - FeatureService featureService = mock(FeatureService.class); - when(featureService.clusterHasFeature(any(), eq(SecuritySystemIndices.SECURITY_PROFILE_ORIGIN_FEATURE))).thenReturn( - useProfileOrigin - ); realmRefLookup = realmIdentifier -> null; Realms realms = mock(Realms.class); when(realms.getDomainConfig(anyString())).then(args -> new DomainConfig(args.getArgument(0), Set.of(), false, null)); when(realms.getRealmRef(any(RealmConfig.RealmIdentifier.class))).then(args -> realmRefLookup.apply(args.getArgument(0))); - this.profileService = new ProfileService( - Settings.EMPTY, - Clock.systemUTC(), - client, - profileIndex, - clusterService, - featureService, - realms - ); + this.profileService = new ProfileService(Settings.EMPTY, Clock.systemUTC(), client, profileIndex, realms); } @After @@ -331,10 +306,7 @@ public void testGetProfileSubjectsWithMissingUids() throws Exception { final Collection allProfileUids = randomList(1, 5, () -> randomAlphaOfLength(20)); final Collection missingProfileUids = randomSubsetOf(allProfileUids); doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final MultiGetRequest multiGetRequest = (MultiGetRequest) invocation.getArguments()[1]; List responses = new ArrayList<>(); for (MultiGetRequest.Item item : multiGetRequest.getItems()) { @@ -397,10 +369,7 @@ public void testGetProfileSubjectsWithMissingUids() throws Exception { public void testGetProfileSubjectWithFailures() throws Exception { final ElasticsearchException mGetException = new ElasticsearchException("mget Exception"); doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onFailure(mGetException); return null; @@ -413,10 +382,7 @@ public void testGetProfileSubjectWithFailures() throws Exception { final Collection errorProfileUids = randomSubsetOf(allProfileUids); final Collection missingProfileUids = Sets.difference(Set.copyOf(allProfileUids), Set.copyOf(errorProfileUids)); doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final MultiGetRequest multiGetRequest = (MultiGetRequest) invocation.getArguments()[1]; List responses = new ArrayList<>(); for (MultiGetRequest.Item item : multiGetRequest.getItems()) { @@ -504,15 +470,7 @@ public void testLiteralUsernameWillThrowOnDuplicate() throws IOException { final Subject subject = new Subject(AuthenticationTestHelper.randomUser(), AuthenticationTestHelper.randomRealmRef(true)); Realms realms = mock(Realms.class); when(realms.getDomainConfig(anyString())).then(args -> new DomainConfig(args.getArgument(0), Set.of(), true, "suffix")); - final ProfileService service = new ProfileService( - Settings.EMPTY, - Clock.systemUTC(), - client, - profileIndex, - mock(ClusterService.class), - mock(FeatureService.class), - realms - ); + final ProfileService service = new ProfileService(Settings.EMPTY, Clock.systemUTC(), client, profileIndex, realms); final PlainActionFuture future = new PlainActionFuture<>(); service.maybeIncrementDifferentiatorAndCreateNewProfile( subject, @@ -593,10 +551,7 @@ public void testBuildSearchRequest() { public void testSecurityProfileOrigin() { // Activate profile doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); @SuppressWarnings("unchecked") final ActionListener listener = (ActionListener) invocation.getArguments()[2]; var resp = new MultiSearchResponse( @@ -616,10 +571,7 @@ public void testSecurityProfileOrigin() { final RuntimeException expectedException = new RuntimeException("expected"); doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onFailure(expectedException); return null; @@ -632,10 +584,7 @@ public void testSecurityProfileOrigin() { // Update doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onFailure(expectedException); return null; @@ -647,10 +596,7 @@ public void testSecurityProfileOrigin() { // Suggest doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onFailure(expectedException); return null; @@ -675,17 +621,7 @@ public void testActivateProfileWithDifferentUidFormats() throws IOException { return new DomainConfig(domainName, Set.of(), true, "suffix"); } }); - final ProfileService service = spy( - new ProfileService( - Settings.EMPTY, - Clock.systemUTC(), - client, - profileIndex, - mock(ClusterService.class), - mock(FeatureService.class), - realms - ) - ); + final ProfileService service = spy(new ProfileService(Settings.EMPTY, Clock.systemUTC(), client, profileIndex, realms)); doAnswer(invocation -> { @SuppressWarnings("unchecked") @@ -1098,10 +1034,7 @@ public void testProfileSearchForApiKeyOwnerWithoutDomain() throws Exception { MultiSearchResponse emptyMultiSearchResponse = new MultiSearchResponse(responseItems, randomNonNegativeLong()); try { doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); MultiSearchRequest multiSearchRequest = (MultiSearchRequest) invocation.getArguments()[1]; assertThat(multiSearchRequest.requests(), iterableWithSize(1)); assertThat(multiSearchRequest.requests().get(0).source().query(), instanceOf(BoolQueryBuilder.class)); @@ -1153,10 +1086,7 @@ public void testProfileSearchForApiKeyOwnerWithDomain() throws Exception { MultiSearchResponse emptyMultiSearchResponse = new MultiSearchResponse(responseItems, randomNonNegativeLong()); try { doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); MultiSearchRequest multiSearchRequest = (MultiSearchRequest) invocation.getArguments()[1]; assertThat(multiSearchRequest.requests(), iterableWithSize(1)); assertThat(multiSearchRequest.requests().get(0).source().query(), instanceOf(BoolQueryBuilder.class)); @@ -1218,10 +1148,7 @@ public void testProfileSearchForOwnerOfMultipleApiKeys() throws Exception { MultiSearchResponse emptyMultiSearchResponse = new MultiSearchResponse(responseItems, randomNonNegativeLong()); try { doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); MultiSearchRequest multiSearchRequest = (MultiSearchRequest) invocation.getArguments()[1]; // a single search request for a single owner of multiple keys assertThat(multiSearchRequest.requests(), iterableWithSize(1)); @@ -1277,10 +1204,7 @@ public void testProfileSearchErrorForApiKeyOwner() { MultiSearchResponse multiSearchResponseWithError = new MultiSearchResponse(responseItems, randomNonNegativeLong()); try { doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); // a single search request for a single owner of multiple keys MultiSearchRequest multiSearchRequest = (MultiSearchRequest) invocation.getArguments()[1]; // 2 search requests for the 2 Api key owners @@ -1402,10 +1326,7 @@ private void mockMultiGetRequest(List sampleDocumentPar private void mockMultiGetRequest(List sampleDocumentParameters, Map errors) { doAnswer(invocation -> { - assertThat( - threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), - equalTo(useProfileOrigin ? SECURITY_PROFILE_ORIGIN : SECURITY_ORIGIN) - ); + assertThat(threadPool.getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME), equalTo(SECURITY_PROFILE_ORIGIN)); final MultiGetRequest multiGetRequest = (MultiGetRequest) invocation.getArguments()[1]; @SuppressWarnings("unchecked") final ActionListener listener = (ActionListener) invocation.getArguments()[2]; diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java index 192807d667abb..cc01d5b101106 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java @@ -130,7 +130,6 @@ public Collection createComponents(PluginServices services) { SnapshotLifecycleTemplateRegistry templateRegistry = new SnapshotLifecycleTemplateRegistry( settings, clusterService, - services.featureService(), threadPool, client, services.xContentRegistry() diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleFeatures.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleFeatures.java index 96b962f70a1b6..274dec75865a8 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleFeatures.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleFeatures.java @@ -7,12 +7,9 @@ package org.elasticsearch.xpack.slm; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.xpack.slm.history.SnapshotLifecycleTemplateRegistry; -import java.util.Map; import java.util.Set; public class SnapshotLifecycleFeatures implements FeatureSpecification { @@ -20,9 +17,4 @@ public class SnapshotLifecycleFeatures implements FeatureSpecification { public Set getFeatures() { return Set.of(SnapshotLifecycleService.INTERVAL_SCHEDULE); } - - @Override - public Map getHistoricalFeatures() { - return Map.of(SnapshotLifecycleTemplateRegistry.MANAGED_BY_DATA_STREAM_LIFECYCLE, Version.V_8_12_0); - } } diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistry.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistry.java index f40ea5a56463a..31c624df67813 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistry.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistry.java @@ -8,13 +8,10 @@ package org.elasticsearch.xpack.slm.history; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ilm.IndexLifecycleMetadata; @@ -47,13 +44,11 @@ public class SnapshotLifecycleTemplateRegistry extends IndexTemplateRegistry { // version 6: manage by data stream lifecycle // version 7: version the index template name so we can upgrade existing deployments public static final int INDEX_TEMPLATE_VERSION = 7; - public static final NodeFeature MANAGED_BY_DATA_STREAM_LIFECYCLE = new NodeFeature("slm-history-managed-by-dsl"); public static final String SLM_TEMPLATE_VERSION_VARIABLE = "xpack.slm.template.version"; public static final String SLM_TEMPLATE_NAME = ".slm-history-" + INDEX_TEMPLATE_VERSION; public static final String SLM_POLICY_NAME = "slm-history-ilm-policy"; - private final FeatureService featureService; @Override protected boolean requiresMasterNode() { @@ -65,13 +60,11 @@ protected boolean requiresMasterNode() { public SnapshotLifecycleTemplateRegistry( Settings nodeSettings, ClusterService clusterService, - FeatureService featureService, ThreadPool threadPool, Client client, NamedXContentRegistry xContentRegistry ) { super(nodeSettings, clusterService, threadPool, client, xContentRegistry); - this.featureService = featureService; slmHistoryEnabled = SLM_HISTORY_INDEX_ENABLED_SETTING.get(nodeSettings); } @@ -122,9 +115,4 @@ public boolean validate(ClusterState state) { boolean allPoliciesPresent = maybePolicies.map(policies -> policies.keySet().containsAll(policyNames)).orElse(false); return allTemplatesPresent && allPoliciesPresent; } - - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - return featureService.clusterHasFeature(event.state(), MANAGED_BY_DATA_STREAM_LIFECYCLE); - } } diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistryTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistryTests.java index d5a8faea1c0a0..8f25a4e70388e 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistryTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistryTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; @@ -48,7 +47,6 @@ import org.elasticsearch.xpack.core.ilm.TimeseriesLifecycleType; import org.elasticsearch.xpack.core.ilm.action.ILMActions; import org.elasticsearch.xpack.core.ilm.action.PutLifecycleRequest; -import org.elasticsearch.xpack.slm.SnapshotLifecycleFeatures; import org.junit.After; import org.junit.Before; @@ -102,14 +100,7 @@ public void createRegistryAndClient() { ) ); xContentRegistry = new NamedXContentRegistry(entries); - registry = new SnapshotLifecycleTemplateRegistry( - Settings.EMPTY, - clusterService, - new FeatureService(List.of(new SnapshotLifecycleFeatures())), - threadPool, - client, - xContentRegistry - ); + registry = new SnapshotLifecycleTemplateRegistry(Settings.EMPTY, clusterService, threadPool, client, xContentRegistry); } @After @@ -124,7 +115,6 @@ public void testDisabledDoesNotAddTemplates() { SnapshotLifecycleTemplateRegistry disabledRegistry = new SnapshotLifecycleTemplateRegistry( settings, clusterService, - new FeatureService(List.of(new SnapshotLifecycleFeatures())), threadPool, client, xContentRegistry diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java index b2dc04c1178e4..c89a8237d40b7 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java @@ -10,12 +10,10 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -35,7 +33,6 @@ import java.util.Map; import static org.elasticsearch.xpack.stack.StackTemplateRegistry.STACK_TEMPLATES_ENABLED; -import static org.elasticsearch.xpack.stack.StackTemplateRegistry.STACK_TEMPLATES_FEATURE; @Deprecated(since = "8.12.0", forRemoval = true) public class LegacyStackTemplateRegistry extends IndexTemplateRegistry { @@ -48,7 +45,6 @@ public class LegacyStackTemplateRegistry extends IndexTemplateRegistry { public static final String TEMPLATE_VERSION_VARIABLE = "xpack.stack.template.version"; private final ClusterService clusterService; - private final FeatureService featureService; private volatile boolean stackTemplateEnabled; private static final Map ADDITIONAL_TEMPLATE_VARIABLES = Map.of("xpack.stack.template.deprecated", "true"); @@ -95,12 +91,10 @@ public LegacyStackTemplateRegistry( ClusterService clusterService, ThreadPool threadPool, Client client, - NamedXContentRegistry xContentRegistry, - FeatureService featureService + NamedXContentRegistry xContentRegistry ) { super(nodeSettings, clusterService, threadPool, client, xContentRegistry); this.clusterService = clusterService; - this.featureService = featureService; this.stackTemplateEnabled = STACK_TEMPLATES_ENABLED.get(nodeSettings); } @@ -282,12 +276,4 @@ protected boolean requiresMasterNode() { // there and the ActionNotFoundTransportException errors are then prevented. return true; } - - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - // Ensure current version of the components are installed only once all nodes are updated to 8.9.0. - // This is necessary to prevent an error caused nby the usage of the ignore_missing_pipeline property - // in the pipeline processor, which has been introduced only in 8.9.0 - return featureService.clusterHasFeature(event.state(), STACK_TEMPLATES_FEATURE); - } } diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackPlugin.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackPlugin.java index 71d01798323d3..73c18a3cc2619 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackPlugin.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackPlugin.java @@ -33,8 +33,7 @@ public Collection createComponents(PluginServices services) { services.clusterService(), services.threadPool(), services.client(), - services.xContentRegistry(), - services.featureService() + services.xContentRegistry() ); legacyStackTemplateRegistry.initialize(); StackTemplateRegistry stackTemplateRegistry = new StackTemplateRegistry( @@ -42,8 +41,7 @@ public Collection createComponents(PluginServices services) { services.clusterService(), services.threadPool(), services.client(), - services.xContentRegistry(), - services.featureService() + services.xContentRegistry() ); stackTemplateRegistry.initialize(); return List.of(legacyStackTemplateRegistry, stackTemplateRegistry); diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java index ce1b664a46887..aeb9bf2bfa5cb 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java @@ -10,14 +10,11 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -38,13 +35,6 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { private static final Logger logger = LogManager.getLogger(StackTemplateRegistry.class); - // Historical node feature kept here as LegacyStackTemplateRegistry is deprecated - public static final NodeFeature STACK_TEMPLATES_FEATURE = new NodeFeature("stack.templates_supported"); - - // this node feature is a redefinition of {@link DataStreamFeatures#DATA_STREAM_LIFECYCLE} and it's meant to avoid adding a - // dependency to the data-streams module just for this - public static final NodeFeature DATA_STREAM_LIFECYCLE = new NodeFeature("data_stream.lifecycle"); - // The stack template registry version. This number must be incremented when we make changes // to built-in templates. public static final int REGISTRY_VERSION = 14; @@ -58,7 +48,6 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { ); private final ClusterService clusterService; - private final FeatureService featureService; private final Map componentTemplateConfigs; private volatile boolean stackTemplateEnabled; @@ -121,12 +110,10 @@ public StackTemplateRegistry( ClusterService clusterService, ThreadPool threadPool, Client client, - NamedXContentRegistry xContentRegistry, - FeatureService featureService + NamedXContentRegistry xContentRegistry ) { super(nodeSettings, clusterService, threadPool, client, xContentRegistry); this.clusterService = clusterService; - this.featureService = featureService; this.stackTemplateEnabled = STACK_TEMPLATES_ENABLED.get(nodeSettings); this.componentTemplateConfigs = loadComponentTemplateConfigs(); } @@ -355,11 +342,4 @@ protected boolean requiresMasterNode() { // there and the ActionNotFoundTransportException errors are then prevented. return true; } - - @Override - protected boolean isClusterReady(ClusterChangedEvent event) { - // Ensure current version of the components are installed only after versions that support data stream lifecycle - // due to .kibana-reporting making use of the feature - return featureService.clusterHasFeature(event.state(), DATA_STREAM_LIFECYCLE); - } } diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplatesFeatures.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplatesFeatures.java deleted file mode 100644 index 7b05231fcfd15..0000000000000 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplatesFeatures.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.stack; - -import org.elasticsearch.Version; -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Map; - -public class StackTemplatesFeatures implements FeatureSpecification { - @Override - public Map getHistoricalFeatures() { - return Map.of(StackTemplateRegistry.STACK_TEMPLATES_FEATURE, Version.V_8_9_0); - } -} diff --git a/x-pack/plugin/stack/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/x-pack/plugin/stack/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification deleted file mode 100644 index 30a1498a54725..0000000000000 --- a/x-pack/plugin/stack/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -# or more contributor license agreements. Licensed under the Elastic License -# 2.0; you may not use this file except in compliance with the Elastic License -# 2.0. -# - -org.elasticsearch.xpack.stack.StackTemplatesFeatures diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java index 39f58e638aa68..b8c64f945db0a 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; @@ -25,8 +24,6 @@ import org.junit.After; import org.junit.Before; -import java.util.List; - public class LegacyStackTemplateRegistryTests extends ESTestCase { private LegacyStackTemplateRegistry registry; private ThreadPool threadPool; @@ -36,15 +33,7 @@ public void createRegistryAndClient() { threadPool = new TestThreadPool(this.getClass().getName()); Client client = new NoOpClient(threadPool); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); - var featureService = new FeatureService(List.of(new StackTemplatesFeatures())); - registry = new LegacyStackTemplateRegistry( - Settings.EMPTY, - clusterService, - threadPool, - client, - NamedXContentRegistry.EMPTY, - featureService - ); + registry = new LegacyStackTemplateRegistry(Settings.EMPTY, clusterService, threadPool, client, NamedXContentRegistry.EMPTY); } @After diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackRegistryWithNonRequiredTemplates.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackRegistryWithNonRequiredTemplates.java index c1c855867599a..7f674e24658dd 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackRegistryWithNonRequiredTemplates.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackRegistryWithNonRequiredTemplates.java @@ -11,7 +11,6 @@ import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.template.IndexTemplateConfig; @@ -24,10 +23,9 @@ class StackRegistryWithNonRequiredTemplates extends StackTemplateRegistry { ClusterService clusterService, ThreadPool threadPool, Client client, - NamedXContentRegistry xContentRegistry, - FeatureService featureService + NamedXContentRegistry xContentRegistry ) { - super(nodeSettings, clusterService, threadPool, client, xContentRegistry, featureService); + super(nodeSettings, clusterService, threadPool, client, xContentRegistry); } @Override diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java index 25ff3b5311fa2..35e81f6f4c8c7 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.stack; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -29,8 +28,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.datastreams.DataStreamFeatures; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.test.ClusterServiceUtils; @@ -71,7 +68,6 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -82,22 +78,13 @@ public class StackTemplateRegistryTests extends ESTestCase { private ClusterService clusterService; private ThreadPool threadPool; private VerifyingClient client; - private FeatureService featureService; @Before public void createRegistryAndClient() { threadPool = new TestThreadPool(this.getClass().getName()); client = new VerifyingClient(threadPool); clusterService = ClusterServiceUtils.createClusterService(threadPool); - featureService = new FeatureService(List.of(new StackTemplatesFeatures(), new DataStreamFeatures())); - registry = new StackTemplateRegistry( - Settings.EMPTY, - clusterService, - threadPool, - client, - NamedXContentRegistry.EMPTY, - featureService - ); + registry = new StackTemplateRegistry(Settings.EMPTY, clusterService, threadPool, client, NamedXContentRegistry.EMPTY); } @After @@ -114,8 +101,7 @@ public void testDisabledDoesNotAddIndexTemplates() { clusterService, threadPool, client, - NamedXContentRegistry.EMPTY, - featureService + NamedXContentRegistry.EMPTY ); assertThat(disabledRegistry.getComposableTemplateConfigs(), anEmptyMap()); } @@ -127,8 +113,7 @@ public void testDisabledStillAddsComponentTemplatesAndIlmPolicies() { clusterService, threadPool, client, - NamedXContentRegistry.EMPTY, - featureService + NamedXContentRegistry.EMPTY ); assertThat(disabledRegistry.getComponentTemplateConfigs(), not(anEmptyMap())); assertThat( @@ -371,8 +356,7 @@ public void testMissingNonRequiredTemplates() throws Exception { clusterService, threadPool, client, - NamedXContentRegistry.EMPTY, - featureService + NamedXContentRegistry.EMPTY ); DiscoveryNode node = DiscoveryNodeUtils.create("node"); @@ -519,25 +503,6 @@ public void testThatMissingMasterNodeDoesNothing() { registry.clusterChanged(event); } - public void testThatNothingIsInstalledWhenAllNodesAreNotUpdated() { - DiscoveryNode updatedNode = DiscoveryNodeUtils.create("updatedNode"); - DiscoveryNode outdatedNode = DiscoveryNodeUtils.create("outdatedNode", ESTestCase.buildNewFakeTransportAddress(), Version.V_8_10_0); - DiscoveryNodes nodes = DiscoveryNodes.builder() - .localNodeId("updatedNode") - .masterNodeId("updatedNode") - .add(updatedNode) - .add(outdatedNode) - .build(); - - client.setVerifier((a, r, l) -> { - fail("if some cluster mode are not updated to at least v.8.11.0 nothing should happen"); - return null; - }); - - ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), nodes); - registry.clusterChanged(event); - } - public void testThatTemplatesAreNotDeprecated() { for (ComposableIndexTemplate it : registry.getComposableTemplateConfigs().values()) { assertFalse(it.isDeprecated()); @@ -555,11 +520,6 @@ public void testThatTemplatesAreNotDeprecated() { .forEach(p -> assertFalse((Boolean) p.get("deprecated"))); } - public void testDataStreamLifecycleNodeFeatureId() { - // let's make sure these ids remain in-sync - assertThat(StackTemplateRegistry.DATA_STREAM_LIFECYCLE.id(), is(DataStreamFeatures.DATA_STREAM_LIFECYCLE.id())); - } - // ------------- /** From d6cc86aa7c5098ed401f0b5c8b159ccff2d8715f Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 18 Nov 2024 14:40:15 +0000 Subject: [PATCH 010/386] Improve message about insecure S3 settings (#116915) Clarifies that insecure settings are stored in plaintext and must not be used. Also removes the mention of the (wrong) system property from the error message if insecure settings are not permitted. --- docs/changelog/116915.yaml | 5 +++++ .../repositories/s3/S3Repository.java | 8 ++++++-- .../s3/RepositoryCredentialsTests.java | 15 +++++---------- .../org/elasticsearch/common/ReferenceDocs.java | 1 + .../common/settings/SecureSetting.java | 4 +--- .../elasticsearch/common/reference-docs-links.txt | 1 + 6 files changed, 19 insertions(+), 15 deletions(-) create mode 100644 docs/changelog/116915.yaml diff --git a/docs/changelog/116915.yaml b/docs/changelog/116915.yaml new file mode 100644 index 0000000000000..9686f0023a14a --- /dev/null +++ b/docs/changelog/116915.yaml @@ -0,0 +1,5 @@ +pr: 116915 +summary: Improve message about insecure S3 settings +area: Snapshot/Restore +type: enhancement +issues: [] diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index fde15d5d6e6bc..591350c34ab85 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -318,8 +318,7 @@ class S3Repository extends MeteredBlobStoreRepository { deprecationLogger.critical( DeprecationCategory.SECURITY, "s3_repository_secret_settings", - "Using s3 access/secret key from repository settings. Instead " - + "store these in named clients and the elasticsearch keystore for secure settings." + INSECURE_CREDENTIALS_DEPRECATION_WARNING ); } @@ -336,6 +335,11 @@ class S3Repository extends MeteredBlobStoreRepository { ); } + static final String INSECURE_CREDENTIALS_DEPRECATION_WARNING = Strings.format(""" + This repository's settings include a S3 access key and secret key, but repository settings are stored in plaintext and must not be \ + used for security-sensitive information. Instead, store all secure settings in the keystore. See [%s] for more information.\ + """, ReferenceDocs.SECURE_SETTINGS); + private static Map buildLocation(RepositoryMetadata metadata) { return Map.of("base_path", BASE_PATH_SETTING.get(metadata.settings()), "bucket", BUCKET_SETTING.get(metadata.settings())); } diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java index 52fe152ba41e3..8e5f6634372db 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java @@ -107,10 +107,9 @@ public void testRepositoryCredentialsOverrideSecureCredentials() { assertThat(credentials.getAWSSecretKey(), is("insecure_aws_secret")); assertCriticalWarnings( + "[access_key] setting was deprecated in Elasticsearch and will be removed in a future release.", "[secret_key] setting was deprecated in Elasticsearch and will be removed in a future release.", - "Using s3 access/secret key from repository settings. Instead store these in named clients and" - + " the elasticsearch keystore for secure settings.", - "[access_key] setting was deprecated in Elasticsearch and will be removed in a future release." + S3Repository.INSECURE_CREDENTIALS_DEPRECATION_WARNING ); } @@ -194,10 +193,9 @@ public void testReinitSecureCredentials() { if (hasInsecureSettings) { assertCriticalWarnings( + "[access_key] setting was deprecated in Elasticsearch and will be removed in a future release.", "[secret_key] setting was deprecated in Elasticsearch and will be removed in a future release.", - "Using s3 access/secret key from repository settings. Instead store these in named clients and" - + " the elasticsearch keystore for secure settings.", - "[access_key] setting was deprecated in Elasticsearch and will be removed in a future release." + S3Repository.INSECURE_CREDENTIALS_DEPRECATION_WARNING ); } } @@ -238,10 +236,7 @@ public void sendResponse(RestResponse response) { throw error.get(); } - assertWarnings( - "Using s3 access/secret key from repository settings. Instead store these in named clients and" - + " the elasticsearch keystore for secure settings." - ); + assertWarnings(S3Repository.INSECURE_CREDENTIALS_DEPRECATION_WARNING); } private void createRepository(final String name, final Settings repositorySettings) { diff --git a/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java b/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java index 926056fec3ec8..c0fe0bc32fb08 100644 --- a/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java +++ b/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java @@ -82,6 +82,7 @@ public enum ReferenceDocs { CIRCUIT_BREAKER_ERRORS, ALLOCATION_EXPLAIN_NO_COPIES, ALLOCATION_EXPLAIN_MAX_RETRY, + SECURE_SETTINGS, // this comment keeps the ';' on the next line so every entry above has a trailing ',' which makes the diff for adding new links cleaner ; diff --git a/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java b/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java index 36ca2df08724d..3d4f0d2d9dbf7 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java @@ -185,9 +185,7 @@ private InsecureStringSetting(String name) { @Override public SecureString get(Settings settings) { if (ALLOW_INSECURE_SETTINGS == false && exists(settings)) { - throw new IllegalArgumentException( - "Setting [" + name + "] is insecure, " + "but property [allow_insecure_settings] is not set" - ); + throw new IllegalArgumentException("Setting [" + name + "] is insecure, use the elasticsearch keystore instead"); } return super.get(settings); } diff --git a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt index f9a8237d63717..69aa5102dec8d 100644 --- a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt +++ b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt @@ -44,3 +44,4 @@ FORMING_SINGLE_NODE_CLUSTERS modules-discover CIRCUIT_BREAKER_ERRORS circuit-breaker-errors.html ALLOCATION_EXPLAIN_NO_COPIES cluster-allocation-explain.html#no-valid-shard-copy ALLOCATION_EXPLAIN_MAX_RETRY cluster-allocation-explain.html#maximum-number-of-retries-exceeded +SECURE_SETTINGS secure-settings.html From 5d9385f1ca2487e29a1f5a44148a460b8ac5e1ec Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Tue, 19 Nov 2024 01:59:46 +1100 Subject: [PATCH 011/386] [Test] Flush master queue before checking snapshots (#116938) The block-on-data-node returns once the data node begins to process the cluster state update for new snapshot. This is before master can see the chnages. In edge cases, the listener may be completed too early before the master can see the new snapshot. This PR flushes the master queue to ensure the snapshot is visible. Resolves: #116730 --- muted-tests.yml | 3 --- .../java/org/elasticsearch/snapshots/SnapshotShutdownIT.java | 1 + 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 625813642eb60..4382b133522c6 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -209,9 +209,6 @@ tests: - class: org.elasticsearch.reservedstate.service.RepositoriesFileSettingsIT method: testSettingsApplied issue: https://github.com/elastic/elasticsearch/issues/116694 -- class: org.elasticsearch.snapshots.SnapshotShutdownIT - method: testRestartNodeDuringSnapshot - issue: https://github.com/elastic/elasticsearch/issues/116730 - class: org.elasticsearch.xpack.security.authc.ldap.ActiveDirectoryGroupsResolverTests issue: https://github.com/elastic/elasticsearch/issues/116182 - class: org.elasticsearch.xpack.test.rest.XPackRestIT diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java index 980ef2a87c9c2..e5e641bfdda21 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java @@ -109,6 +109,7 @@ public void testRestartNodeDuringSnapshot() throws Exception { final var clusterService = internalCluster().getCurrentMasterNodeInstance(ClusterService.class); final var snapshotFuture = startFullSnapshotBlockedOnDataNode(randomIdentifier(), repoName, originalNode); + safeAwait((ActionListener l) -> flushMasterQueue(clusterService, l)); final var snapshotCompletesWithoutPausingListener = ClusterServiceUtils.addTemporaryStateListener(clusterService, state -> { final var entriesForRepo = SnapshotsInProgress.get(state).forRepo(repoName); if (entriesForRepo.isEmpty()) { From e019fc03e0bd951c9a210e06a9c13116175f0760 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cau=C3=AA=20Marcondes?= <55978943+cauemarcondes@users.noreply.github.com> Date: Mon, 18 Nov 2024 15:28:30 +0000 Subject: [PATCH 012/386] Remove apm_user role (#116712) Co-authored-by: Elastic Machine --- .../authorization/built-in-roles.asciidoc | 5 -- .../authz/store/ReservedRolesStore.java | 61 -------------- .../authz/store/ReservedRolesStoreTests.java | 83 ------------------- 3 files changed, 149 deletions(-) diff --git a/docs/reference/security/authorization/built-in-roles.asciidoc b/docs/reference/security/authorization/built-in-roles.asciidoc index 6db08b307f193..d730587e7db17 100644 --- a/docs/reference/security/authorization/built-in-roles.asciidoc +++ b/docs/reference/security/authorization/built-in-roles.asciidoc @@ -14,11 +14,6 @@ roles have a fixed set of privileges and cannot be updated. Grants access necessary for the APM system user to send system-level data (such as monitoring) to {es}. -[[built-in-roles-apm-user]] `apm_user` :: -Grants the privileges required for APM users (such as `read` and -`view_index_metadata` privileges on the `apm-*` and `.ml-anomalies*` indices). -deprecated:[7.13.0,"See {kibana-ref}/apm-app-users.html[APM app users and privileges\] for alternatives."]. - [[built-in-roles-beats-admin]] `beats_admin` :: Grants access to the `.management-beats` index, which contains configuration information for the Beats. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index 2380c13e147d5..fc14ec6811014 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -402,67 +402,6 @@ private static Map initializeReservedRoles() { "Grants access necessary for the APM system user to send system-level data (such as monitoring) to Elasticsearch.\n" ) ), - entry( - "apm_user", - new RoleDescriptor( - "apm_user", - null, - new RoleDescriptor.IndicesPrivileges[] { - // Self managed APM Server - // Can be removed in 8.0 - RoleDescriptor.IndicesPrivileges.builder().indices("apm-*").privileges("read", "view_index_metadata").build(), - - // APM Server under fleet (data streams) - RoleDescriptor.IndicesPrivileges.builder().indices("logs-apm.*").privileges("read", "view_index_metadata").build(), - RoleDescriptor.IndicesPrivileges.builder().indices("logs-apm-*").privileges("read", "view_index_metadata").build(), - RoleDescriptor.IndicesPrivileges.builder() - .indices("metrics-apm.*") - .privileges("read", "view_index_metadata") - .build(), - RoleDescriptor.IndicesPrivileges.builder() - .indices("metrics-apm-*") - .privileges("read", "view_index_metadata") - .build(), - RoleDescriptor.IndicesPrivileges.builder() - .indices("traces-apm.*") - .privileges("read", "view_index_metadata") - .build(), - RoleDescriptor.IndicesPrivileges.builder() - .indices("traces-apm-*") - .privileges("read", "view_index_metadata") - .build(), - - // Machine Learning indices. Only needed for legacy reasons - // Can be removed in 8.0 - RoleDescriptor.IndicesPrivileges.builder() - .indices(".ml-anomalies*") - .privileges("read", "view_index_metadata") - .build(), - - // Annotations - RoleDescriptor.IndicesPrivileges.builder() - .indices("observability-annotations") - .privileges("read", "view_index_metadata") - .build() }, - new RoleDescriptor.ApplicationResourcePrivileges[] { - RoleDescriptor.ApplicationResourcePrivileges.builder() - .application("kibana-*") - .resources("*") - .privileges("reserved_ml_apm_user") - .build() }, - null, - null, - MetadataUtils.getDeprecatedReservedMetadata( - "This role will be removed in a future major release. Please use editor and viewer roles instead" - ), - null, - null, - null, - null, - "Grants the privileges required for APM users (such as read and view_index_metadata privileges " - + "on the apm-* and .ml-anomalies* indices)." - ) - ), entry( "inference_admin", new RoleDescriptor( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index fb4d822b7655c..9818a890d465f 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -3058,89 +3058,6 @@ public void testAPMSystemRole() { assertNoAccessAllowed(APMSystemRole, XPackPlugin.ASYNC_RESULTS_INDEX + randomAlphaOfLengthBetween(0, 2)); } - public void testAPMUserRole() { - final TransportRequest request = mock(TransportRequest.class); - final Authentication authentication = AuthenticationTestHelper.builder().build(); - - final RoleDescriptor roleDescriptor = ReservedRolesStore.roleDescriptor("apm_user"); - assertNotNull(roleDescriptor); - assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); - - final String allowedApplicationActionPattern = "example/custom/action/*"; - final String kibanaApplicationWithRandomIndex = "kibana-" + randomFrom(randomAlphaOfLengthBetween(8, 24), ".kibana"); - Role role = Role.buildFromRoleDescriptor( - roleDescriptor, - new FieldPermissionsCache(Settings.EMPTY), - RESTRICTED_INDICES, - List.of( - new ApplicationPrivilegeDescriptor( - kibanaApplicationWithRandomIndex, - "reserved_ml_apm_user", - Set.of(allowedApplicationActionPattern), - Map.of() - ) - ) - ); - - assertThat(role.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); - assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 12)), is(false)); - - assertNoAccessAllowed(role, "foo"); - assertNoAccessAllowed(role, "foo-apm"); - assertNoAccessAllowed(role, "foo-logs-apm.bar"); - assertNoAccessAllowed(role, "foo-logs-apm-bar"); - assertNoAccessAllowed(role, "foo-traces-apm.bar"); - assertNoAccessAllowed(role, "foo-traces-apm-bar"); - assertNoAccessAllowed(role, "foo-metrics-apm.bar"); - assertNoAccessAllowed(role, "foo-metrics-apm-bar"); - - assertOnlyReadAllowed(role, "logs-apm." + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, "logs-apm-" + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, "traces-apm." + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, "traces-apm-" + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, "metrics-apm." + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, "metrics-apm-" + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, "apm-" + randomIntBetween(0, 5)); - assertOnlyReadAllowed(role, AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT); - - assertOnlyReadAllowed(role, "observability-annotations"); - - assertThat( - role.application().grants(ApplicationPrivilegeTests.createPrivilege(kibanaApplicationWithRandomIndex, "app-foo", "foo"), "*"), - is(false) - ); - assertThat( - role.application() - .grants( - ApplicationPrivilegeTests.createPrivilege( - kibanaApplicationWithRandomIndex, - "app-reserved_ml_apm_user", - allowedApplicationActionPattern - ), - "*" - ), - is(true) - ); - - final String otherApplication = "logstash-" + randomAlphaOfLengthBetween(8, 24); - assertThat( - role.application().grants(ApplicationPrivilegeTests.createPrivilege(otherApplication, "app-foo", "foo"), "*"), - is(false) - ); - assertThat( - role.application() - .grants( - ApplicationPrivilegeTests.createPrivilege( - otherApplication, - "app-reserved_ml_apm_user", - allowedApplicationActionPattern - ), - "*" - ), - is(false) - ); - } - public void testMachineLearningAdminRole() { final TransportRequest request = mock(TransportRequest.class); final Authentication authentication = AuthenticationTestHelper.builder().build(); From 8f6fe646b645196973d13b1eb8ab4a2be1b0ac32 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Mon, 18 Nov 2024 15:30:58 +0000 Subject: [PATCH 013/386] Fix handling of bulk requests with semantic text fields and delete ops (#116942) Previously, delete operations were not processed correctly when followed by operations containing semantic text fields. This issue caused the positions of subsequent operations in the items array to shift incorrectly by one. This PR resolves the discrepancy and includes additional tests to ensure proper behavior. --- docs/changelog/116942.yaml | 5 ++ .../ShardBulkInferenceActionFilterIT.java | 23 +++++--- .../xpack/inference/InferenceFeatures.java | 3 +- .../ShardBulkInferenceActionFilter.java | 6 +-- .../mapper/SemanticTextFieldMapper.java | 2 +- .../inference/30_semantic_text_inference.yml | 52 +++++++++++++++++++ 6 files changed, 80 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/116942.yaml diff --git a/docs/changelog/116942.yaml b/docs/changelog/116942.yaml new file mode 100644 index 0000000000000..5037e8c59cd85 --- /dev/null +++ b/docs/changelog/116942.yaml @@ -0,0 +1,5 @@ +pr: 116942 +summary: Fix handling of bulk requests with semantic text fields and delete ops +area: Relevance +type: bug +issues: [] diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 54d83af8f5d95..3b0fc869c8124 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.delete.DeleteRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -30,8 +31,10 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.Locale; import java.util.Map; +import java.util.Set; import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.randomSemanticTextInput; import static org.hamcrest.Matchers.equalTo; @@ -87,30 +90,38 @@ public void testBulkOperations() throws Exception { int totalBulkReqs = randomIntBetween(2, 100); long totalDocs = 0; + Set ids = new HashSet<>(); for (int bulkReqs = 0; bulkReqs < totalBulkReqs; bulkReqs++) { BulkRequestBuilder bulkReqBuilder = client().prepareBulk(); int totalBulkSize = randomIntBetween(1, 100); for (int bulkSize = 0; bulkSize < totalBulkSize; bulkSize++) { - String id = Long.toString(totalDocs); + if (ids.size() > 0 && rarely(random())) { + String id = randomFrom(ids); + ids.remove(id); + DeleteRequestBuilder request = new DeleteRequestBuilder(client(), INDEX_NAME).setId(id); + bulkReqBuilder.add(request); + continue; + } + String id = Long.toString(totalDocs++); boolean isIndexRequest = randomBoolean(); Map source = new HashMap<>(); source.put("sparse_field", isIndexRequest && rarely() ? null : randomSemanticTextInput()); source.put("dense_field", isIndexRequest && rarely() ? null : randomSemanticTextInput()); if (isIndexRequest) { bulkReqBuilder.add(new IndexRequestBuilder(client()).setIndex(INDEX_NAME).setId(id).setSource(source)); - totalDocs++; + ids.add(id); } else { boolean isUpsert = randomBoolean(); UpdateRequestBuilder request = new UpdateRequestBuilder(client()).setIndex(INDEX_NAME).setDoc(source); - if (isUpsert || totalDocs == 0) { + if (isUpsert || ids.size() == 0) { request.setDocAsUpsert(true); - totalDocs++; } else { // Update already existing document - id = Long.toString(randomLongBetween(0, totalDocs - 1)); + id = randomFrom(ids); } request.setId(id); bulkReqBuilder.add(request); + ids.add(id); } } BulkResponse bulkResponse = bulkReqBuilder.get(); @@ -135,7 +146,7 @@ public void testBulkOperations() throws Exception { SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().size(0).trackTotalHits(true); SearchResponse searchResponse = client().search(new SearchRequest(INDEX_NAME).source(sourceBuilder)).get(); try { - assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(totalDocs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) ids.size())); } finally { searchResponse.decRef(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index ad89cba945143..d9d1a87e714a3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -37,7 +37,8 @@ public Set getFeatures() { public Set getTestFeatures() { return Set.of( SemanticTextFieldMapper.SEMANTIC_TEXT_IN_OBJECT_FIELD_FIX, - SemanticTextFieldMapper.SEMANTIC_TEXT_SINGLE_FIELD_UPDATE_FIX + SemanticTextFieldMapper.SEMANTIC_TEXT_SINGLE_FIELD_UPDATE_FIX, + SemanticTextFieldMapper.SEMANTIC_TEXT_DELETE_FIX ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java index b3bbe3a7df9bc..dd59230e575c4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java @@ -413,8 +413,8 @@ private void applyInferenceResponses(BulkItemRequest item, FieldInferenceRespons */ private Map> createFieldInferenceRequests(BulkShardRequest bulkShardRequest) { Map> fieldRequestsMap = new LinkedHashMap<>(); - int itemIndex = 0; - for (var item : bulkShardRequest.items()) { + for (int itemIndex = 0; itemIndex < bulkShardRequest.items().length; itemIndex++) { + var item = bulkShardRequest.items()[itemIndex]; if (item.getPrimaryResponse() != null) { // item was already aborted/processed by a filter in the chain upstream (e.g. security) continue; @@ -441,6 +441,7 @@ private Map> createFieldInferenceRequests(Bu // ignore delete request continue; } + final Map docMap = indexRequest.sourceAsMap(); for (var entry : fieldInferenceMap.values()) { String field = entry.getName(); @@ -483,7 +484,6 @@ private Map> createFieldInferenceRequests(Bu } } } - itemIndex++; } return fieldRequestsMap; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index 890856d0b6e80..2a9fcfed49d2f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -89,8 +89,8 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie public static final NodeFeature SEMANTIC_TEXT_SEARCH_INFERENCE_ID = new NodeFeature("semantic_text.search_inference_id"); public static final NodeFeature SEMANTIC_TEXT_DEFAULT_ELSER_2 = new NodeFeature("semantic_text.default_elser_2"); public static final NodeFeature SEMANTIC_TEXT_IN_OBJECT_FIELD_FIX = new NodeFeature("semantic_text.in_object_field_fix"); - public static final NodeFeature SEMANTIC_TEXT_SINGLE_FIELD_UPDATE_FIX = new NodeFeature("semantic_text.single_field_update_fix"); + public static final NodeFeature SEMANTIC_TEXT_DELETE_FIX = new NodeFeature("semantic_text.delete_fix"); public static final String CONTENT_TYPE = "semantic_text"; public static final String DEFAULT_ELSER_2_INFERENCE_ID = DEFAULT_ELSER_ID; diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml index 71c9e5a23aea1..534e4831c4a0a 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml @@ -624,3 +624,55 @@ setup: - match: { _source.level_1.dense_field.text: "another inference test" } - exists: _source.level_1.dense_field.inference.chunks.0.embeddings - match: { _source.level_1.dense_field.inference.chunks.0.text: "another inference test" } + +--- +"Deletes on bulk operation": + - requires: + cluster_features: semantic_text.delete_fix + reason: Delete operations are properly applied when subsequent operations include a semantic text field. + + - do: + bulk: + index: test-index + refresh: true + body: | + {"index":{"_id": "1"}} + {"dense_field": ["you know, for testing", "now with chunks"]} + {"index":{"_id": "2"}} + {"dense_field": ["some more tests", "that include chunks"]} + + - do: + search: + index: test-index + body: + query: + semantic: + field: dense_field + query: "you know, for testing" + + - match: { hits.total.value: 2 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0._source.dense_field.text: ["you know, for testing", "now with chunks"] } + - match: { hits.hits.1._source.dense_field.text: ["some more tests", "that include chunks"] } + + - do: + bulk: + index: test-index + refresh: true + body: | + {"delete":{ "_id": "2"}} + {"update":{"_id": "1"}} + {"doc":{"dense_field": "updated text"}} + + - do: + search: + index: test-index + body: + query: + semantic: + field: dense_field + query: "you know, for testing" + + - match: { hits.total.value: 1 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0._source.dense_field.text: "updated text" } From 41121ac4587bc09003ffabd840462dbdbdcb633c Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Mon, 18 Nov 2024 16:43:13 +0100 Subject: [PATCH 014/386] Skip test with per agg WHERE on older nodes (#116949) This is technically not needed because on main/9.0 and 8.x/8.17 the nodes have the capability to perform `STATS COUNT() WHERE ...` (no pipe) - but let's add this, anyway, to avoid confusion, be in line with 8.x and avoid issues if we later perform this test in CCQ scenarios. Relates https://github.com/elastic/elasticsearch/pull/116947. --- .../esql/qa/testFixtures/src/main/resources/stats.csv-spec | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index ad9de4674f8e1..7a046786a4f19 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -2468,6 +2468,7 @@ count:long |values:keyword |job_positions:keyword ; prunedStatsFollowedByStats +required_capability: per_agg_filtering from employees | eval my_length = length(concat(first_name, null)) | stats count = count(my_length) where false, From cc35f1dc6a122768cb964c3e2a11c45a512afa1a Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Mon, 18 Nov 2024 16:19:14 +0000 Subject: [PATCH 015/386] Remove transport versions fixup listener and associated code (#116941) --- .../upgrades/QueryBuilderBWCIT.java | 19 +- server/src/main/java/module-info.java | 1 - .../cluster/service/TransportFeatures.java | 25 -- .../TransportVersionsFixupListener.java | 229 ------------- .../elasticsearch/node/NodeConstruction.java | 4 - ...lasticsearch.features.FeatureSpecification | 1 - .../TransportVersionsFixupListenerTests.java | 313 ------------------ .../test/rest/RestTestLegacyFeatures.java | 12 - ...TransportVersionClusterStateUpgradeIT.java | 171 ---------- 9 files changed, 1 insertion(+), 774 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/cluster/service/TransportFeatures.java delete mode 100644 server/src/main/java/org/elasticsearch/cluster/service/TransportVersionsFixupListener.java delete mode 100644 server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java delete mode 100644 x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransportVersionClusterStateUpgradeIT.java diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java index 9ca420efe1156..aac2c661dea9f 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java @@ -12,8 +12,6 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.Strings; @@ -23,7 +21,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.ConstantScoreQueryBuilder; import org.elasticsearch.index.query.DisMaxQueryBuilder; @@ -43,7 +40,6 @@ import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; import org.elasticsearch.test.cluster.local.distribution.DistributionType; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.XContentBuilder; import org.junit.ClassRule; @@ -249,23 +245,10 @@ public void testQueryBuilderBWC() throws Exception { InputStream in = new ByteArrayInputStream(qbSource, 0, qbSource.length); StreamInput input = new NamedWriteableAwareStreamInput(new InputStreamStreamInput(in), registry) ) { - - @UpdateForV9(owner = UpdateForV9.Owner.SEARCH_FOUNDATIONS) // condition will always be true - var originalClusterHasTransportVersion = oldClusterHasFeature(RestTestLegacyFeatures.TRANSPORT_VERSION_SUPPORTED); - final TransportVersion transportVersion; - if (originalClusterHasTransportVersion == false) { - transportVersion = TransportVersion.fromId( - parseLegacyVersion(getOldClusterVersion()).map(Version::id).orElse(TransportVersions.MINIMUM_COMPATIBLE.id()) - ); - } else { - transportVersion = TransportVersion.readVersion(input); - } - - input.setTransportVersion(transportVersion); + input.setTransportVersion(TransportVersion.readVersion(input)); QueryBuilder queryBuilder = input.readNamedWriteable(QueryBuilder.class); assert in.read() == -1; assertEquals(expectedQueryBuilder, queryBuilder); - } } } diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 08794f5938bc2..35d1a44624b0f 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -423,7 +423,6 @@ org.elasticsearch.action.bulk.BulkFeatures, org.elasticsearch.features.FeatureInfrastructureFeatures, org.elasticsearch.health.HealthFeatures, - org.elasticsearch.cluster.service.TransportFeatures, org.elasticsearch.cluster.metadata.MetadataFeatures, org.elasticsearch.rest.RestFeatures, org.elasticsearch.repositories.RepositoriesFeatures, diff --git a/server/src/main/java/org/elasticsearch/cluster/service/TransportFeatures.java b/server/src/main/java/org/elasticsearch/cluster/service/TransportFeatures.java deleted file mode 100644 index 6e0a8afd6cf8e..0000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/service/TransportFeatures.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.service; - -import org.elasticsearch.Version; -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Map; - -public class TransportFeatures implements FeatureSpecification { - @Override - public Map getHistoricalFeatures() { - // transport version was introduced in 8.8.0, but we need to wait until all nodes are >8.8.0 - // to properly detect when we need to fix transport versions - return Map.of(TransportVersionsFixupListener.FIX_TRANSPORT_VERSION, Version.V_8_8_1); - } -} diff --git a/server/src/main/java/org/elasticsearch/cluster/service/TransportVersionsFixupListener.java b/server/src/main/java/org/elasticsearch/cluster/service/TransportVersionsFixupListener.java deleted file mode 100644 index 0ae0f8b10aed7..0000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/service/TransportVersionsFixupListener.java +++ /dev/null @@ -1,229 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.service; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.client.internal.ClusterAdminClient; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.ClusterStateTaskExecutor; -import org.elasticsearch.cluster.ClusterStateTaskListener; -import org.elasticsearch.cluster.version.CompatibilityVersions; -import org.elasticsearch.common.Priority; -import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; -import org.elasticsearch.threadpool.Scheduler; -import org.elasticsearch.threadpool.ThreadPool; - -import java.util.Collections; -import java.util.HashSet; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.Executor; -import java.util.stream.Collectors; - -import static org.elasticsearch.cluster.ClusterState.INFERRED_TRANSPORT_VERSION; - -/** - * This fixes up the transport version from pre-8.8.0 cluster state that was inferred as the minimum possible, - * due to the master node not understanding cluster state with transport versions added in 8.8.0. - * Any nodes with the inferred placeholder cluster state is then refreshed with their actual transport version - */ -@UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // this can be removed in v9 -public class TransportVersionsFixupListener implements ClusterStateListener { - - private static final Logger logger = LogManager.getLogger(TransportVersionsFixupListener.class); - - static final NodeFeature FIX_TRANSPORT_VERSION = new NodeFeature("transport.fix_transport_version"); - - private static final TimeValue RETRY_TIME = TimeValue.timeValueSeconds(30); - - private final MasterServiceTaskQueue taskQueue; - private final ClusterAdminClient client; - private final Scheduler scheduler; - private final Executor executor; - private final Set pendingNodes = Collections.synchronizedSet(new HashSet<>()); - private final FeatureService featureService; - - public TransportVersionsFixupListener( - ClusterService service, - ClusterAdminClient client, - FeatureService featureService, - ThreadPool threadPool - ) { - // there tends to be a lot of state operations on an upgrade - this one is not time-critical, - // so use LOW priority. It just needs to be run at some point after upgrade. - this( - service.createTaskQueue("fixup-transport-versions", Priority.LOW, new TransportVersionUpdater()), - client, - featureService, - threadPool, - threadPool.executor(ThreadPool.Names.CLUSTER_COORDINATION) - ); - } - - TransportVersionsFixupListener( - MasterServiceTaskQueue taskQueue, - ClusterAdminClient client, - FeatureService featureService, - Scheduler scheduler, - Executor executor - ) { - this.taskQueue = taskQueue; - this.client = client; - this.featureService = featureService; - this.scheduler = scheduler; - this.executor = executor; - } - - class NodeTransportVersionTask implements ClusterStateTaskListener { - private final Map results; - private final int retryNum; - - NodeTransportVersionTask(Map results, int retryNum) { - this.results = results; - this.retryNum = retryNum; - } - - @Override - public void onFailure(Exception e) { - logger.error("Could not apply transport version for nodes {} to cluster state", results.keySet(), e); - scheduleRetry(results.keySet(), retryNum); - } - - public Map results() { - return results; - } - } - - private static class TransportVersionUpdater implements ClusterStateTaskExecutor { - @Override - public ClusterState execute(BatchExecutionContext context) throws Exception { - ClusterState.Builder builder = ClusterState.builder(context.initialState()); - boolean modified = false; - for (var c : context.taskContexts()) { - for (var e : c.getTask().results().entrySet()) { - // this node's transport version might have been updated already/node has gone away - var cvMap = builder.compatibilityVersions(); - TransportVersion recordedTv = Optional.ofNullable(cvMap.get(e.getKey())) - .map(CompatibilityVersions::transportVersion) - .orElse(null); - assert (recordedTv != null) || (context.initialState().nodes().nodeExists(e.getKey()) == false) - : "Node " + e.getKey() + " is in the cluster but does not have an associated transport version recorded"; - if (Objects.equals(recordedTv, INFERRED_TRANSPORT_VERSION)) { - builder.putCompatibilityVersions(e.getKey(), e.getValue(), Map.of()); // unknown mappings versions - modified = true; - } - } - c.success(() -> {}); - } - return modified ? builder.build() : context.initialState(); - } - } - - @SuppressForbidden(reason = "maintaining ClusterState#compatibilityVersions requires reading them") - private static Map getCompatibilityVersions(ClusterState clusterState) { - return clusterState.compatibilityVersions(); - } - - @Override - public void clusterChanged(ClusterChangedEvent event) { - if (event.localNodeMaster() == false) return; // only if we're master - - // if the min node version > 8.8.0, and the cluster state has some transport versions == 8.8.0, - // then refresh all inferred transport versions to their real versions - // now that everything should understand cluster state with transport versions - if (featureService.clusterHasFeature(event.state(), FIX_TRANSPORT_VERSION) - && event.state().getMinTransportVersion().equals(INFERRED_TRANSPORT_VERSION)) { - - // find all the relevant nodes - Set nodes = getCompatibilityVersions(event.state()).entrySet() - .stream() - .filter(e -> e.getValue().transportVersion().equals(INFERRED_TRANSPORT_VERSION)) - .map(Map.Entry::getKey) - .collect(Collectors.toSet()); - - updateTransportVersions(nodes, 0); - } - } - - private void scheduleRetry(Set nodes, int thisRetryNum) { - // just keep retrying until this succeeds - logger.debug("Scheduling retry {} for nodes {}", thisRetryNum + 1, nodes); - scheduler.schedule(() -> updateTransportVersions(nodes, thisRetryNum + 1), RETRY_TIME, executor); - } - - private void updateTransportVersions(Set nodes, int retryNum) { - // some might already be in-progress - Set outstandingNodes = Sets.newHashSetWithExpectedSize(nodes.size()); - synchronized (pendingNodes) { - for (String n : nodes) { - if (pendingNodes.add(n)) { - outstandingNodes.add(n); - } - } - } - if (outstandingNodes.isEmpty()) { - // all nodes already have in-progress requests - return; - } - - NodesInfoRequest request = new NodesInfoRequest(outstandingNodes.toArray(String[]::new)); - request.clear(); // only requesting base data - client.nodesInfo(request, new ActionListener<>() { - @Override - public void onResponse(NodesInfoResponse response) { - pendingNodes.removeAll(outstandingNodes); - handleResponse(response, retryNum); - } - - @Override - public void onFailure(Exception e) { - pendingNodes.removeAll(outstandingNodes); - logger.warn("Could not read transport versions for nodes {}", outstandingNodes, e); - scheduleRetry(outstandingNodes, retryNum); - } - }); - } - - private void handleResponse(NodesInfoResponse response, int retryNum) { - if (response.hasFailures()) { - Set failedNodes = new HashSet<>(); - for (FailedNodeException fne : response.failures()) { - logger.warn("Failed to read transport version info from node {}", fne.nodeId(), fne); - failedNodes.add(fne.nodeId()); - } - scheduleRetry(failedNodes, retryNum); - } - // carry on and read what we can - - Map results = response.getNodes() - .stream() - .collect(Collectors.toUnmodifiableMap(n -> n.getNode().getId(), NodeInfo::getTransportVersion)); - - if (results.isEmpty() == false) { - taskQueue.submitTask("update-transport-version", new NodeTransportVersionTask(results, retryNum), null); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index e8b9d18a1dd08..62f923d673dc7 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -62,7 +62,6 @@ import org.elasticsearch.cluster.routing.allocation.DiskThresholdMonitor; import org.elasticsearch.cluster.routing.allocation.WriteLoadForecaster; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.cluster.service.TransportVersionsFixupListener; import org.elasticsearch.cluster.version.CompatibilityVersions; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.component.LifecycleComponent; @@ -788,9 +787,6 @@ private void construct( if (DiscoveryNode.isMasterNode(settings)) { clusterService.addListener(new SystemIndexMappingUpdateService(systemIndices, client)); - clusterService.addListener( - new TransportVersionsFixupListener(clusterService, client.admin().cluster(), featureService, threadPool) - ); clusterService.addListener(new NodeFeaturesFixupListener(clusterService, client.admin().cluster(), threadPool)); } diff --git a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification index 089c0231bc593..3955fc87bf392 100644 --- a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -11,7 +11,6 @@ org.elasticsearch.action.admin.indices.stats.IndicesStatsFeatures org.elasticsearch.action.bulk.BulkFeatures org.elasticsearch.features.FeatureInfrastructureFeatures org.elasticsearch.health.HealthFeatures -org.elasticsearch.cluster.service.TransportFeatures org.elasticsearch.cluster.metadata.MetadataFeatures org.elasticsearch.rest.RestFeatures org.elasticsearch.repositories.RepositoriesFeatures diff --git a/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java b/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java deleted file mode 100644 index 9eec8309bbb83..0000000000000 --- a/server/src/test/java/org/elasticsearch/cluster/service/TransportVersionsFixupListenerTests.java +++ /dev/null @@ -1,313 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.service; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; -import org.elasticsearch.Version; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.client.internal.ClusterAdminClient; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.DiscoveryNodeUtils; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.service.TransportVersionsFixupListener.NodeTransportVersionTask; -import org.elasticsearch.cluster.version.CompatibilityVersions; -import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.common.util.Maps; -import org.elasticsearch.features.FeatureService; -import org.elasticsearch.indices.SystemIndexDescriptor; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.Scheduler; -import org.mockito.ArgumentCaptor; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.Executor; - -import static java.util.Map.entry; -import static org.elasticsearch.test.LambdaMatchers.transformedMatch; -import static org.hamcrest.Matchers.arrayContainingInAnyOrder; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.everyItem; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.same; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.hamcrest.MockitoHamcrest.argThat; - -public class TransportVersionsFixupListenerTests extends ESTestCase { - - private static final Version NEXT_VERSION = Version.V_8_8_1; - private static final TransportVersion NEXT_TRANSPORT_VERSION = TransportVersion.fromId(NEXT_VERSION.id); - - @SuppressWarnings("unchecked") - private static MasterServiceTaskQueue newMockTaskQueue() { - return mock(MasterServiceTaskQueue.class); - } - - private static DiscoveryNodes node(Version... versions) { - var builder = DiscoveryNodes.builder(); - for (int i = 0; i < versions.length; i++) { - builder.add(DiscoveryNodeUtils.create("node" + i, new TransportAddress(TransportAddress.META_ADDRESS, 9200 + i), versions[i])); - } - builder.localNodeId("node0").masterNodeId("node0"); - return builder.build(); - } - - @SafeVarargs - private static Map versions(T... versions) { - Map tvs = new HashMap<>(); - for (int i = 0; i < versions.length; i++) { - tvs.put("node" + i, versions[i]); - } - return tvs; - } - - private static NodesInfoResponse getResponse(Map responseData) { - return new NodesInfoResponse( - ClusterName.DEFAULT, - responseData.entrySet() - .stream() - .map( - e -> new NodeInfo( - "", - e.getValue(), - null, - null, - null, - DiscoveryNodeUtils.create(e.getKey(), new TransportAddress(TransportAddress.META_ADDRESS, 9200)), - null, - null, - null, - null, - null, - null, - null, - null, - null, - null, - null, - null - ) - ) - .toList(), - List.of() - ); - } - - public void testNothingFixedWhenNothingToInfer() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(Version.V_8_8_0)) - .nodeIdsToCompatibilityVersions(versions(new CompatibilityVersions(TransportVersions.V_8_8_0, Map.of()))) - .build(); - - TransportVersionsFixupListener listeners = new TransportVersionsFixupListener( - taskQueue, - client, - new FeatureService(List.of(new TransportFeatures())), - null, - null - ); - listeners.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - - verify(taskQueue, never()).submitTask(anyString(), any(), any()); - } - - public void testNothingFixedWhenOnNextVersion() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(NEXT_VERSION)) - .nodeIdsToCompatibilityVersions(versions(new CompatibilityVersions(NEXT_TRANSPORT_VERSION, Map.of()))) - .build(); - - TransportVersionsFixupListener listeners = new TransportVersionsFixupListener( - taskQueue, - client, - new FeatureService(List.of(new TransportFeatures())), - null, - null - ); - listeners.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - - verify(taskQueue, never()).submitTask(anyString(), any(), any()); - } - - public void testNothingFixedWhenOnPreviousVersion() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(Version.V_8_7_0, Version.V_8_8_0)) - .nodeIdsToCompatibilityVersions( - Maps.transformValues( - versions(TransportVersions.V_8_7_0, TransportVersions.V_8_8_0), - transportVersion -> new CompatibilityVersions(transportVersion, Map.of()) - ) - ) - .build(); - - TransportVersionsFixupListener listeners = new TransportVersionsFixupListener( - taskQueue, - client, - new FeatureService(List.of(new TransportFeatures())), - null, - null - ); - listeners.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - - verify(taskQueue, never()).submitTask(anyString(), any(), any()); - } - - @SuppressWarnings("unchecked") - public void testVersionsAreFixed() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(NEXT_VERSION, NEXT_VERSION, NEXT_VERSION)) - .nodeIdsToCompatibilityVersions( - Maps.transformValues( - versions(NEXT_TRANSPORT_VERSION, TransportVersions.V_8_8_0, TransportVersions.V_8_8_0), - transportVersion -> new CompatibilityVersions(transportVersion, Map.of()) - ) - ) - .build(); - - ArgumentCaptor> action = ArgumentCaptor.forClass(ActionListener.class); - ArgumentCaptor task = ArgumentCaptor.forClass(NodeTransportVersionTask.class); - - TransportVersionsFixupListener listeners = new TransportVersionsFixupListener( - taskQueue, - client, - new FeatureService(List.of(new TransportFeatures())), - null, - null - ); - listeners.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - verify(client).nodesInfo( - argThat(transformedMatch(NodesInfoRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), - action.capture() - ); - action.getValue() - .onResponse( - getResponse( - Map.ofEntries( - entry("node1", new CompatibilityVersions(NEXT_TRANSPORT_VERSION, Map.of())), - entry("node2", new CompatibilityVersions(NEXT_TRANSPORT_VERSION, Map.of())) - ) - ) - ); - verify(taskQueue).submitTask(anyString(), task.capture(), any()); - - assertThat(task.getValue().results().keySet(), equalTo(Set.of("node1", "node2"))); - assertThat(task.getValue().results().values(), everyItem(equalTo(NEXT_TRANSPORT_VERSION))); - } - - public void testConcurrentChangesDoNotOverlap() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState1 = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(NEXT_VERSION, NEXT_VERSION, NEXT_VERSION)) - .nodeIdsToCompatibilityVersions( - Maps.transformValues( - versions(NEXT_TRANSPORT_VERSION, TransportVersions.V_8_8_0, TransportVersions.V_8_8_0), - transportVersion -> new CompatibilityVersions(transportVersion, Map.of()) - ) - ) - .build(); - - TransportVersionsFixupListener listeners = new TransportVersionsFixupListener( - taskQueue, - client, - new FeatureService(List.of(new TransportFeatures())), - null, - null - ); - listeners.clusterChanged(new ClusterChangedEvent("test", testState1, ClusterState.EMPTY_STATE)); - verify(client).nodesInfo(argThat(transformedMatch(NodesInfoRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), any()); - // don't send back the response yet - - ClusterState testState2 = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(NEXT_VERSION, NEXT_VERSION, NEXT_VERSION)) - .nodeIdsToCompatibilityVersions( - Maps.transformValues( - versions(NEXT_TRANSPORT_VERSION, NEXT_TRANSPORT_VERSION, TransportVersions.V_8_8_0), - transportVersion -> new CompatibilityVersions(transportVersion, Map.of()) - ) - ) - .build(); - // should not send any requests - listeners.clusterChanged(new ClusterChangedEvent("test", testState2, testState1)); - verifyNoMoreInteractions(client); - } - - @SuppressWarnings("unchecked") - public void testFailedRequestsAreRetried() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - Scheduler scheduler = mock(Scheduler.class); - Executor executor = mock(Executor.class); - - var compatibilityVersions = new CompatibilityVersions( - TransportVersion.current(), - Map.of(".system-index-1", new SystemIndexDescriptor.MappingsVersion(1, 1234)) - ); - ClusterState testState1 = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(node(Version.CURRENT, Version.CURRENT, Version.CURRENT)) - .nodeIdsToCompatibilityVersions( - Map.ofEntries( - entry("node0", compatibilityVersions), - entry("node1", new CompatibilityVersions(TransportVersions.V_8_8_0, Map.of())), - entry("node2", new CompatibilityVersions(TransportVersions.V_8_8_0, Map.of())) - ) - ) - .build(); - - ArgumentCaptor> action = ArgumentCaptor.forClass(ActionListener.class); - ArgumentCaptor retry = ArgumentCaptor.forClass(Runnable.class); - - TransportVersionsFixupListener listeners = new TransportVersionsFixupListener( - taskQueue, - client, - new FeatureService(List.of(new TransportFeatures())), - scheduler, - executor - ); - listeners.clusterChanged(new ClusterChangedEvent("test", testState1, ClusterState.EMPTY_STATE)); - verify(client, times(1)).nodesInfo(any(), action.capture()); - // do response immediately - action.getValue().onFailure(new RuntimeException("failure")); - verify(scheduler).schedule(retry.capture(), any(), same(executor)); - - // running retry should cause another check - retry.getValue().run(); - verify(client, times(2)).nodesInfo( - argThat(transformedMatch(NodesInfoRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), - any() - ); - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java index e43aa940a4881..a10394b4156d6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java @@ -17,7 +17,6 @@ import java.util.Map; import static java.util.Map.entry; -import static org.elasticsearch.cluster.ClusterState.VERSION_INTRODUCING_TRANSPORT_VERSIONS; /** * This class groups historical features that have been removed from the production codebase, but are still used by the test @@ -30,15 +29,6 @@ public class RestTestLegacyFeatures implements FeatureSpecification { public static final NodeFeature COMPONENT_TEMPLATE_SUPPORTED = new NodeFeature("indices.component_template_supported"); public static final NodeFeature ML_NEW_MEMORY_FORMAT = new NodeFeature("ml.new_memory_format"); - /** These are "pure test" features: normally we would not need them, and test for TransportVersion/fallback to Version (see for example - * {@code ESRestTestCase#minimumTransportVersion()}. However, some tests explicitly check and validate the content of a response, so - * we need these features to support them. - */ - public static final NodeFeature TRANSPORT_VERSION_SUPPORTED = new NodeFeature("transport_version_supported"); - public static final NodeFeature STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION = new NodeFeature( - "state.transport_version_to_nodes_version" - ); - // Ref: https://github.com/elastic/elasticsearch/pull/86416 public static final NodeFeature ML_MEMORY_OVERHEAD_FIXED = new NodeFeature("ml.memory_overhead_fixed"); @@ -103,8 +93,6 @@ public Map getHistoricalFeatures() { entry(SECURITY_UPDATE_API_KEY, Version.V_8_4_0), entry(SECURITY_BULK_UPDATE_API_KEY, Version.V_8_5_0), entry(ML_NEW_MEMORY_FORMAT, Version.V_8_11_0), - entry(TRANSPORT_VERSION_SUPPORTED, VERSION_INTRODUCING_TRANSPORT_VERSIONS), - entry(STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION, Version.V_8_11_0), entry(ML_MEMORY_OVERHEAD_FIXED, Version.V_8_2_1), entry(REST_ELASTIC_PRODUCT_HEADER_PRESENT, Version.V_8_0_1), entry(DESIRED_NODE_API_SUPPORTED, Version.V_8_1_0), diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransportVersionClusterStateUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransportVersionClusterStateUpgradeIT.java deleted file mode 100644 index e864a579bd0b0..0000000000000 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransportVersionClusterStateUpgradeIT.java +++ /dev/null @@ -1,171 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.upgrades; - -import org.elasticsearch.Build; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.client.Request; -import org.elasticsearch.common.util.Maps; -import org.elasticsearch.test.rest.ObjectPath; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; - -import java.util.Map; - -import static org.elasticsearch.cluster.ClusterState.INFERRED_TRANSPORT_VERSION; -import static org.elasticsearch.cluster.ClusterState.VERSION_INTRODUCING_TRANSPORT_VERSIONS; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.everyItem; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.oneOf; - -public class TransportVersionClusterStateUpgradeIT extends AbstractUpgradeTestCase { - - public void testReadsInferredTransportVersions() throws Exception { - // waitUntil because the versions fixup on upgrade happens in the background so may need a retry - assertTrue(waitUntil(() -> { - try { - // check several responses in order to sample from a selection of nodes - for (int i = getClusterHosts().size(); i > 0; i--) { - if (runTransportVersionsTest() == false) { - return false; - } - } - return true; - } catch (Exception e) { - throw new AssertionError(e); - } - })); - } - - private boolean runTransportVersionsTest() throws Exception { - final var clusterState = ObjectPath.createFromResponse( - client().performRequest(new Request("GET", "/_cluster/state" + randomFrom("", "/nodes") + randomFrom("", "?local"))) - ); - final var description = clusterState.toString(); - - final var nodeIds = clusterState.evaluateMapKeys("nodes"); - final Map versionsByNodeId = Maps.newHashMapWithExpectedSize(nodeIds.size()); - for (final var nodeId : nodeIds) { - versionsByNodeId.put(nodeId, clusterState.evaluate("nodes." + nodeId + ".version")); - } - - final var hasTransportVersions = clusterState.evaluate("transport_versions") != null; - final var hasNodesVersions = clusterState.evaluate("nodes_versions") != null; - assertFalse(description, hasNodesVersions && hasTransportVersions); - - switch (CLUSTER_TYPE) { - case OLD -> { - if (clusterHasFeature(RestTestLegacyFeatures.TRANSPORT_VERSION_SUPPORTED) == false) { - // Before 8.8.0 there was only DiscoveryNode#version - assertFalse(description, hasTransportVersions); - assertFalse(description, hasNodesVersions); - } else if (clusterHasFeature(RestTestLegacyFeatures.STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION) == false) { - // In [8.8.0, 8.11.0) we exposed just transport_versions - assertTrue(description, hasTransportVersions); - assertFalse(description, hasNodesVersions); - } else { - // From 8.11.0 onwards we exposed nodes_versions - assertFalse(description, hasTransportVersions); - assertTrue(description, hasNodesVersions); - } - } - case MIXED -> { - if (clusterHasFeature(RestTestLegacyFeatures.TRANSPORT_VERSION_SUPPORTED) == false) { - // Responding node might be <8.8.0 (so no extra versions) or >=8.11.0 (includes nodes_versions) - assertFalse(description, hasTransportVersions); - } else if (clusterHasFeature(RestTestLegacyFeatures.STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION) == false) { - // Responding node might be in [8.8.0, 8.11.0) (transport_versions) or >=8.11.0 (includes nodes_versions) but not both - assertTrue(description, hasNodesVersions || hasTransportVersions); - } else { - // Responding node is ≥8.11.0 so has nodes_versions for sure - assertFalse(description, hasTransportVersions); - assertTrue(description, hasNodesVersions); - } - } - case UPGRADED -> { - // All nodes are Version.CURRENT, ≥8.11.0, so we definitely have nodes_versions - assertFalse(description, hasTransportVersions); - assertTrue(description, hasNodesVersions); - assertThat(description, versionsByNodeId.values(), everyItem(equalTo(Build.current().version()))); - } - } - - if (hasTransportVersions) { - // Upgrading from [8.8.0, 8.11.0) and the responding node is still on the old version - assertFalse(description, clusterHasFeature(RestTestLegacyFeatures.STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION)); - assertTrue(description, clusterHasFeature(RestTestLegacyFeatures.TRANSPORT_VERSION_SUPPORTED)); - assertNotEquals(description, ClusterType.UPGRADED, CLUSTER_TYPE); - - // transport_versions includes the correct version for all nodes, no inference is needed - assertEquals(description, nodeIds.size(), clusterState.evaluateArraySize("transport_versions")); - for (int i = 0; i < nodeIds.size(); i++) { - final var path = "transport_versions." + i; - final String nodeId = clusterState.evaluate(path + ".node_id"); - final var nodeDescription = nodeId + "/" + description; - final var transportVersion = TransportVersion.fromString(clusterState.evaluate(path + ".transport_version")); - final var nodeVersion = versionsByNodeId.get(nodeId); - assertNotNull(nodeDescription, nodeVersion); - if (nodeVersion.equals(Build.current().version())) { - assertEquals(nodeDescription, TransportVersion.current(), transportVersion); - } else { - // There's no relationship between node versions and transport versions anymore, although we can be sure of this: - assertThat(nodeDescription, transportVersion, greaterThanOrEqualTo(INFERRED_TRANSPORT_VERSION)); - } - } - } else if (hasNodesVersions) { - // Either upgrading from ≥8.11.0 (the responding node might be old or new), or from <8.8.0 (the responding node is new) - assertFalse( - description, - clusterHasFeature(RestTestLegacyFeatures.STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION) == false - && CLUSTER_TYPE == ClusterType.OLD - ); - - // nodes_versions includes _a_ version for all nodes; it might be correct, or it might be inferred if we're upgrading from - // <8.8.0 and the master is still an old node or the TransportVersionsFixupListener hasn't run yet - assertEquals(description, nodeIds.size(), clusterState.evaluateArraySize("nodes_versions")); - for (int i = 0; i < nodeIds.size(); i++) { - final var path = "nodes_versions." + i; - final String nodeId = clusterState.evaluate(path + ".node_id"); - final var nodeDescription = nodeId + "/" + description; - final var transportVersion = TransportVersion.fromString(clusterState.evaluate(path + ".transport_version")); - final var nodeVersion = versionsByNodeId.get(nodeId); - assertNotNull(nodeDescription, nodeVersion); - if (nodeVersion.equals(Build.current().version())) { - // Either the responding node is upgraded or the upgrade is trivial; if the responding node is upgraded but the master - // is not then its transport version may be temporarily inferred as 8.8.0 until TransportVersionsFixupListener runs. - assertThat( - nodeDescription, - transportVersion, - clusterHasFeature(RestTestLegacyFeatures.TRANSPORT_VERSION_SUPPORTED) - ? equalTo(TransportVersion.current()) - : oneOf(TransportVersion.current(), INFERRED_TRANSPORT_VERSION) - ); - if (CLUSTER_TYPE == ClusterType.UPGRADED && transportVersion.equals(INFERRED_TRANSPORT_VERSION)) { - // TransportVersionsFixupListener should run soon, retry - logger.info("{} - not fixed up yet, retrying", nodeDescription); - return false; - } - } else { - var version = parseLegacyVersion(nodeVersion); - // All non-semantic versions are after 8.8.0 and have transport version - var transportVersionIntroduced = version.map(v -> v.after(VERSION_INTRODUCING_TRANSPORT_VERSIONS)).orElse(true); - if (transportVersionIntroduced) { - // There's no relationship between node versions and transport versions anymore, although we can be sure of this: - assertThat(nodeDescription, transportVersion, greaterThan(INFERRED_TRANSPORT_VERSION)); - } else { - // Responding node is not upgraded, and no later than 8.8.0, so we infer its version correctly. - assertEquals(nodeDescription, TransportVersion.fromId(version.get().id()), transportVersion); - } - } - } - } - - return true; - } -} From 99689281e01d8c8f67edb727f930ff383febb19f Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 18 Nov 2024 17:36:39 +0100 Subject: [PATCH 016/386] Remove support for deprecated force_source highlighting parameter (#116943) force_source is being parsed as a no-op since 8.8. This commit removes support for it at REST, meaning a search request that provides it gets now an error back. --- docs/changelog/116943.yaml | 11 +++++++++++ .../search/search-your-data/highlighting.asciidoc | 2 -- .../highlight/AbstractHighlighterBuilder.java | 8 -------- .../subphase/highlight/HighlightBuilderTests.java | 11 ----------- 4 files changed, 11 insertions(+), 21 deletions(-) create mode 100644 docs/changelog/116943.yaml diff --git a/docs/changelog/116943.yaml b/docs/changelog/116943.yaml new file mode 100644 index 0000000000000..3fd0793610cdd --- /dev/null +++ b/docs/changelog/116943.yaml @@ -0,0 +1,11 @@ +pr: 116943 +summary: Remove support for deprecated `force_source` highlighting parameter +area: Highlighting +type: breaking +issues: [] +breaking: + title: Remove support for deprecated `force_source` highlighting parameter + area: REST API + details: The deprecated highlighting `force_source` parameter is no longer supported. + impact: Users should remove usages of the `force_source` parameter from their search requests. + notable: false diff --git a/docs/reference/search/search-your-data/highlighting.asciidoc b/docs/reference/search/search-your-data/highlighting.asciidoc index 7ee13d971b035..6a432e6104524 100644 --- a/docs/reference/search/search-your-data/highlighting.asciidoc +++ b/docs/reference/search/search-your-data/highlighting.asciidoc @@ -176,8 +176,6 @@ fragmenter:: Specifies how text should be broken up in highlight snippets: `simple` or `span`. Only valid for the `plain` highlighter. Defaults to `span`. -force_source:: deprecated; this parameter has no effect - `simple`::: Breaks up text into same-sized fragments. `span`::: Breaks up text into same-sized fragments, but tries to avoid breaking up text between highlighted terms. This is helpful when you're diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java index e14177adba467..a8db0f26d2966 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java @@ -64,7 +64,6 @@ public abstract class AbstractHighlighterBuilder> BiFunction {}, FORCE_SOURCE_FIELD); // force_source is ignored parser.declareInt(HB::phraseLimit, PHRASE_LIMIT_FIELD); parser.declareInt(HB::maxAnalyzedOffset, MAX_ANALYZED_OFFSET_FIELD); parser.declareObject(HB::options, (XContentParser p, Void c) -> { diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index 3699cdee3912b..d1bbc1ec5910b 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -607,17 +607,6 @@ public void testOrderSerialization() throws Exception { } } - public void testForceSourceDeprecation() throws IOException { - String highlightJson = """ - { "fields" : { }, "force_source" : true } - """; - try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightJson)) { - HighlightBuilder.fromXContent(parser); - } - - assertWarnings("Deprecated field [force_source] used, this field is unused and will be removed entirely"); - } - protected static XContentBuilder toXContent(HighlightBuilder highlight, XContentType contentType) throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(contentType); if (randomBoolean()) { From c8049531058dcce1a8e924b6e0aebf6502c2cca8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Peter=20Stra=C3=9Fer?= Date: Mon, 18 Nov 2024 17:38:49 +0100 Subject: [PATCH 017/386] Provide access to new settings for HyphenationCompoundWordTokenFilter (#115585) Allow the new flags added in Lucene in the HyphenationCompoundWordTokenFilter Adds access to the two new flags no_sub_matches and no_overlapping_matches. Lucene issue: https://github.com/apache/lucene/issues/9231 --- docs/changelog/115585.yaml | 6 + ...henation-decompounder-tokenfilter.asciidoc | 12 + ...enationCompoundWordTokenFilterFactory.java | 9 +- .../common/CompoundAnalysisTests.java | 65 +- .../elasticsearch/analysis/common/de_DR.xml | 1130 +++++++++++++++++ .../elasticsearch/analysis/common/test1.json | 59 +- .../elasticsearch/analysis/common/test1.yml | 25 + 7 files changed, 1295 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/115585.yaml create mode 100644 modules/analysis-common/src/test/resources/org/elasticsearch/analysis/common/de_DR.xml diff --git a/docs/changelog/115585.yaml b/docs/changelog/115585.yaml new file mode 100644 index 0000000000000..02eecfc3d7d2b --- /dev/null +++ b/docs/changelog/115585.yaml @@ -0,0 +1,6 @@ +pr: 115459 +summary: Adds access to flags no_sub_matches and no_overlapping_matches to hyphenation-decompounder-tokenfilter +area: Search +type: enhancement +issues: + - 97849 diff --git a/docs/reference/analysis/tokenfilters/hyphenation-decompounder-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/hyphenation-decompounder-tokenfilter.asciidoc index eed66d81e9132..1bd36f801aa17 100644 --- a/docs/reference/analysis/tokenfilters/hyphenation-decompounder-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/hyphenation-decompounder-tokenfilter.asciidoc @@ -111,6 +111,18 @@ output. Defaults to `5`. (Optional, Boolean) If `true`, only include the longest matching subword. Defaults to `false`. +`no_sub_matches`:: +(Optional, Boolean) +If `true`, do not match sub tokens in tokens that are in the word list. +Defaults to `false`. + +`no_overlapping_matches`:: +(Optional, Boolean) +If `true`, do not allow overlapping tokens. +Defaults to `false`. + +Typically users will only want to include one of the three flags as enabling `no_overlapping_matches` is the most restrictive and `no_sub_matches` is more restrictive than `only_longest_match`. When enabling a more restrictive option the state of the less restrictive does not have any effect. + [[analysis-hyp-decomp-tokenfilter-customize]] ==== Customize and add to an analyzer diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java index b2b7f86ce34e6..e091f0175009e 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java @@ -28,6 +28,8 @@ */ public class HyphenationCompoundWordTokenFilterFactory extends AbstractCompoundWordTokenFilterFactory { + private final boolean noSubMatches; + private final boolean noOverlappingMatches; private final HyphenationTree hyphenationTree; HyphenationCompoundWordTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { @@ -46,6 +48,9 @@ public class HyphenationCompoundWordTokenFilterFactory extends AbstractCompoundW } catch (Exception e) { throw new IllegalArgumentException("Exception while reading hyphenation_patterns_path.", e); } + + noSubMatches = settings.getAsBoolean("no_sub_matches", false); + noOverlappingMatches = settings.getAsBoolean("no_overlapping_matches", false); } @Override @@ -57,7 +62,9 @@ public TokenStream create(TokenStream tokenStream) { minWordSize, minSubwordSize, maxSubwordSize, - onlyLongestMatch + onlyLongestMatch, + noSubMatches, + noOverlappingMatches ); } } diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CompoundAnalysisTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CompoundAnalysisTests.java index ad98c2f8ffe1e..69dd8e91b52b2 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CompoundAnalysisTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CompoundAnalysisTests.java @@ -31,6 +31,9 @@ import org.hamcrest.MatcherAssert; import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -42,6 +45,7 @@ import static org.hamcrest.Matchers.instanceOf; public class CompoundAnalysisTests extends ESTestCase { + public void testDefaultsCompoundAnalysis() throws Exception { Settings settings = getJsonSettings(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); @@ -63,6 +67,44 @@ public void testDictionaryDecompounder() throws Exception { assertWarnings("Setting [version] on analysis component [custom7] has no effect and is deprecated"); } + public void testHyphenationDecompoundingAnalyzerOnlyLongestMatch() throws Exception { + Settings[] settingsArr = new Settings[] { getJsonSettings(), getYamlSettings() }; + for (Settings settings : settingsArr) { + List terms = analyze(settings, "hyphenationDecompoundingAnalyzerOnlyLongestMatch", "kaffeemaschine fussballpumpe"); + MatcherAssert.assertThat( + terms, + hasItems("kaffeemaschine", "kaffee", "fee", "maschine", "fussballpumpe", "fussball", "ballpumpe", "pumpe") + ); + } + assertWarnings("Setting [version] on analysis component [custom7] has no effect and is deprecated"); + } + + /** + * For example given a word list of: ["kaffee", "fee", "maschine"] + * no_sub_matches should prevent the token "fee" as a token in "kaffeemaschine". + */ + public void testHyphenationDecompoundingAnalyzerNoSubMatches() throws Exception { + Settings[] settingsArr = new Settings[] { getJsonSettings(), getYamlSettings() }; + for (Settings settings : settingsArr) { + List terms = analyze(settings, "hyphenationDecompoundingAnalyzerNoSubMatches", "kaffeemaschine fussballpumpe"); + MatcherAssert.assertThat(terms, hasItems("kaffeemaschine", "kaffee", "maschine", "fussballpumpe", "fussball", "ballpumpe")); + } + assertWarnings("Setting [version] on analysis component [custom7] has no effect and is deprecated"); + } + + /** + * For example given a word list of: ["fuss", "fussball", "ballpumpe", "ball", "pumpe"] + * no_overlapping_matches should prevent the token "ballpumpe" as a token in "fussballpumpe. + */ + public void testHyphenationDecompoundingAnalyzerNoOverlappingMatches() throws Exception { + Settings[] settingsArr = new Settings[] { getJsonSettings(), getYamlSettings() }; + for (Settings settings : settingsArr) { + List terms = analyze(settings, "hyphenationDecompoundingAnalyzerNoOverlappingMatches", "kaffeemaschine fussballpumpe"); + MatcherAssert.assertThat(terms, hasItems("kaffeemaschine", "kaffee", "maschine", "fussballpumpe", "fussball", "pumpe")); + } + assertWarnings("Setting [version] on analysis component [custom7] has no effect and is deprecated"); + } + private List analyze(Settings settings, String analyzerName, String text) throws IOException { IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); AnalysisModule analysisModule = createAnalysisModule(settings); @@ -92,20 +134,25 @@ public Map> getTokenFilters() { } private Settings getJsonSettings() throws IOException { - String json = "/org/elasticsearch/analysis/common/test1.json"; - return Settings.builder() - .loadFromStream(json, getClass().getResourceAsStream(json), false) - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); + return getSettings("/org/elasticsearch/analysis/common/test1.json"); } private Settings getYamlSettings() throws IOException { - String yaml = "/org/elasticsearch/analysis/common/test1.yml"; + return getSettings("/org/elasticsearch/analysis/common/test1.yml"); + } + + private Settings getSettings(String filePath) throws IOException { + String hypenationRulesFileName = "de_DR.xml"; + InputStream hypenationRules = getClass().getResourceAsStream(hypenationRulesFileName); + Path home = createTempDir(); + Path config = home.resolve("config"); + Files.createDirectory(config); + Files.copy(hypenationRules, config.resolve(hypenationRulesFileName)); + return Settings.builder() - .loadFromStream(yaml, getClass().getResourceAsStream(yaml), false) + .loadFromStream(filePath, getClass().getResourceAsStream(filePath), false) .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), home.toString()) .build(); } } diff --git a/modules/analysis-common/src/test/resources/org/elasticsearch/analysis/common/de_DR.xml b/modules/analysis-common/src/test/resources/org/elasticsearch/analysis/common/de_DR.xml new file mode 100644 index 0000000000000..37bcde1246a81 --- /dev/null +++ b/modules/analysis-common/src/test/resources/org/elasticsearch/analysis/common/de_DR.xml @@ -0,0 +1,1130 @@ + + + + + + + + + + + + aA + bB + cC + dD + eE + fF + gG + hH + iI + jJ + kK + lL + mM + nN + oO + pP + qQ + rR + sS + tT + uU + vV + wW + xX + yY + zZ + �� + �� + �� + �� + � + + + + .aa6l .ab3a4s .ab3ei .abi2 .ab3it .ab1l .ab1r .ab3u .ad3o4r .alti6 + .ana3c .an5alg .an1e + .ang8s2t1 + .an1s .ap1p .ar6sc .ar6ta .ar6tei .as2z + .au2f1 .au2s3 .be5erb .be3na .ber6t5r .bie6r5 .bim6s5t .brot3 .bru6s + .ch6 .che6f5 .da8c .da2r .dar5in .dar5u .den6ka .de5r6en .des6pe + .de8spo .de3sz .dia3s4 .dien4 .dy2s1 .ehren5 .eine6 .ei6n5eh .ei8nen + .ein5sa .en6der .en6d5r .en3k4 .en8ta8 .en8tei .en4t3r .epo1 .er6ban + .er6b5ei .er6bla .er6d5um .er3ei .er5er .er3in .er3o4b .erwi5s .es1p + .es8t1l .es8t1n + .ex1a2 .ex3em .fal6sc .fe6st5a .flu4g3 .furch8 .ga6ner .ge3n4a + .ge5r� + .ges6 + .halb5 .halbe6 .hal6br .haup4 .hau4t .heima6 .he4r3e + .her6za .he5x .hin3 .hir8sc .ho4c .hu3sa .hy5o .ibe5 .ima6ge .in1 + .ini6 .is5chi .jagd5 .kal6k5o .ka6ph .ki4e .kop6f3 .kraf6 .k�5ra + .lab6br .liie6 .lo6s5k .l�4s3t .ma5d .mi2t1 .no6th .no6top + .obe8ri .ob1l .obs2 .ob6st5e .or3c .ort6s5e .ost3a .oste8r .pe4re + .pe3ts .ph6 .po8str .rau4m3 .re5an .ro8q .ru5the .r�5be + + .sch8 .se6e .se5n6h .se5ra .si2e .spi6ke .st4 .sy2n + .tages5 .tan6kl .ta8th .te6e .te8str .to6der .to8nin .to6we .um1 + .umpf4 .un1 .une6 .unge5n .ur1c .ur5en .ve6rin .vora8 .wah6l5 .we8ges + .we8s2t .wes3te + .wo6r .wor3a .wun4s .zi4e .zuch8 .�nde8re .�ch8 aa1c aa2gr + aal5e aa6r5a a5arti aa2s1t aat2s 6aba ab3art 1abdr 6abel aben6dr + ab5erk ab5err ab5esse 1abf 1abg 1abh� ab1ir 1abko a1bl ab1la + 5ablag a6bla� ab4ler ab1lu a8bl� 5a6bl� abma5c + 1abn ab1ra ab1re 5a6brec ab1ro + ab1s + ab8sk abs2z 3abtei ab1ur 1abw + 5abze 5abzu ab1�n ab�u8 a4ce. a5chal ach5art ach5au a1che + a8chent ach6er. a6ch5erf a1chi ach1l ach3m ach5n a1cho ach3re a1chu + ach1w a1chy ach5�f ack1o acks6t ack5sta a1d 8ad. a6d5ac ad3ant + ad8ar 5addi a8dein ade5o8 adi5en 1adj 1adle ad1op a2dre 3adres adt1 + 1adv a6d� a1e2d ae1r a1er. 1aero 8afa a3fal af1an a5far a5fat + af1au a6fentl a2f1ex af1fr af5rau af1re 1afri af6tent af6tra aft5re + a6f5um 8af� ag5abe 5a4gent ag8er ages5e 1aggr ag5las ag1lo a1gn + ag2ne 1agog a6g5und a1ha a1he ah5ein a4h3erh a1hi ahl1a ah1le ah4m3ar + ahn1a a5ho ahra6 ahr5ab ah1re ah8rei ahren8s ahre4s3 ahr8ti ah1ru a1hu + ah8� ai3d2s ai1e aif6 a3inse ai4re. a5isch. ais8e a3ismu ais6n + aiso6 a1j 1akad a4kade a1ke a1ki 1akko 5akro1 a5lal al5ans 3al8arm + al8beb al8berw alb5la 3album al1c a1le a6l5e6be a4l3ein a8lel a8lerb + a8lerh a6lert 5a6l5eth 1algi al4gli al3int al4lab al8lan al4l3ar + alle3g a1lo a4l5ob al6schm al4the + + al4t3re 8a1lu alu5i a6lur + alu3ta a1l� a6mate 8ame. 5a6meise am6m5ei am6mum am2n ampf3a + am6schw am2ta a1mu a1m� a3nac a1nad anadi5e an3ako an3alp 3analy + an3ame an3ara a1nas an5asti a1nat anat5s an8dent ande4s3 an1ec an5eis + an1e2k 4aner. a6n5erd a8nerf a6n5erke 1anfa 5anfert 1anf� 3angab + 5angebo an3gli ang6lis an2gn 3angri ang5t6 5anh� ani5g ani4ka + an5i8on an1kl an6kno an4kro 1anl anma5c anmar4 3annah anne4s3 a1no + 5a6n1o2d 5a6n3oma 5a6nord 1anr an1sa 5anschl an4soz an1st 5anstal + an1s2z 5antenn an1th 5anw� a5ny an4z3ed 5anzeig 5anzieh 3anzug + an1� 5an�s a1n� an�8d a1os a1pa 3apfel a2ph1t + aph5�6 a1pi 8apl apo1c apo1s + a6pos2t + a6poth 1appa ap1pr a1pr + a5p� a3p� a1ra a4r3af ar3all 3arbei 2arbt ar1c 2a1re ar3ein + ar2gl 2a1ri ari5es ar8kers ar6les ar4nan ar5o6ch ar1o2d a1rol ar3ony + a8ror a3ros ar5ox ar6schl 8artei ar6t5ri a1ru a1ry 1arzt arz1w + ar8z� ar�8m ar�6 ar5�m ar1�2 a1sa a6schec + asch5l asch3m a6schn a3s4hi as1pa asp5l + + as5tev 1asth + + a1str ast3re 8a1ta ata5c ata3la a6tapf ata5pl a1te a6teli aten5a + ate5ran 6atf 6atg a1th at3hal 1athl 2a1ti 5atlant 3atlas 8atmus 6atn + a1to a6t5ops ato6ra a6t5ort. 4a1tr a6t5ru at2t1h at5t6h� 6a1tu + atz1w a1t� a1t� au1a au6bre auch3a au1e aue4l 5aufent + 3auff� 3aufga 1aufn auf1t 3auftr 1aufw 3auge. au4kle aule8s 6aum + au8mar aum5p 1ausb 3ausd 1ausf 1ausg au8sin + + au4sta 1ausw 1ausz + aut5eng au1th 1auto au�e8 a1v ave5r6a aver6i a1w a6wes a1x + a2xia a6xio a1ya a1z azi5er. 8a� 1ba 8ba8del ba1la ba1na + ban6k5r ba5ot bardi6n ba1ro basten6 bau3sp 2b1b bb6le b2bli 2b1c 2b1d + 1be be1a be8at. be1ch 8becht 8becke. be5el be1en bee8rei be5eta bef2 + 8beff be1g2 beh�8 bei1s 6b5eisen bei3tr b8el bel8o belu3t be3nac + bend6o be6ners be6nerw be4nor ben4se6 bens5el be1n� be1n� + be1o2 b8er. be1ra be8rac ber8gab. ber1r be1r� bes8c bes5erh + bes2p be5tha bet5sc be1un be1ur 8bex be6zwec 2b1f8 + + 2b1g2 + bga2s5 bge1 2b1h bhole6 1bi bi1bl b6ie bi1el bi1la bil�5 bi1na + bi4nok + + bi6stu bi5tr bit4t5r b1j 2b1k2 bk�6 bl8 b6la. + 6b1lad 6blag 8blam 1blat b8latt 3blau. b6lav 3ble. b1leb b1led + 8b1leg 8b1leh 8bleid 8bleih 6b3lein + + ble4m3o 4blich b4lind + 8bling b2lio 5blit b4litz b1loh 8b1los 1blu 5blum 2blun blut3a blut5sc + 3bl� bl�s5c 5bl� 3bl� bl�8sc 2b1m 2b1n 1bo + bo1ch bo5d6s boe5 8boff 8bonk bo1ra b1ort 2b1p2 b1q 1br brail6 brast8 + bre4a b5red 8bref 8b5riem b6riga bro1s b1rup b2ruz 8br�h + br�s5c 8bs b1sa b8sang b2s1ar b1sc bs3erl bs3erz b8sof b1s2p + bst1h b3stru b5st� b6sun 2b1t b2t1h 1bu bu1ie bul6k b8ure bu6sin + 6b1v 2b1w 1by1 by6te. 8b1z + + 1b� b5�6s5 1b� + b6�5bere b�ge6 b�gel5e b�r6sc 1ca cag6 ca5la ca6re + ca5y c1c 1ce celi4c celich5 ce1ro c8h 2ch. 1chae ch1ah ch3akt cha6mer + 8chanz 5chara 3chari 5chato 6chb 1chef 6chei ch3eil ch3eis 6cherkl + 6chf 4chh 5chiad 5chias 6chins 8chj chl6 5chlor 6ch2m 2chn6 ch8nie + 5cho. 8chob choi8d 6chp ch3ren ch6res ch3r� 2chs 2cht cht5ha + cht3hi 5chthon ch6tin 6chuh chu4la 6ch3unt chut6t 8chw 1ci ci5tr c2k + 2ck. ck1ei 4ckh ck3l ck3n ck5o8f ck1r 2cks ck5stra ck6s5u c2l 1c8o + con6ne 8corb cos6t c3q 1c6r 8c1t 1cu 1cy 5c�1 c�5 1da. + 8daas 2dabg 8dabr 6dabt 6dabw 1dac da2gr 6d5alk 8d5amt dan6ce. + dani5er dan8ker 2danl danla6 6dans 8danzi 6danzu d1ap da2r1a8 2d1arb + d3arc dar6men 4d3art 8darz 1dat 8datm 2d1auf 2d1aus 2d1b 2d1c 2d1d + d5de d3d2h dd�mme8 1de 2deal de5an de3cha de1e defe6 6deff 2d1ehr + 5d4eic de5isc de8lar del6s5e del6spr de4mag de8mun de8nep dene6r + 8denge. 8dengen de5o6d 2deol de5ram 8derdb der5ein de1ro der1r d8ers + der5um de4s3am de4s3an de4sau de6sil de4sin de8sor de4spr de2su 8deul + de5us. 2d1f df2l 2d1g 2d1h 1di dia5c di5ara dice5 di3chr di5ena di1gn + di1la dil8s di1na 8dind 6dinf 4d3inh 2d1ins di5o6d di3p4t di8sen dis1p + di5s8per di6s5to + dis3tr + di8tan di8tin d1j 6dje 2dju 2d1k 2d1l 2d1m + 2d1n6 dni6 dnje6 1do 6d5obe do6berf 6d5ony do3ran 6dord 2d1org dor4t3h + + 6doth dott8e 2d1p d5q dr4 1drah 8drak d5rand 6dre. 4drech + d6reck 4d3reg 8d3reic d5reife 8drem 8d1ren 2drer 8dres. 6d5rh 1dria + d1ric 8drind droi6 dro5x 1dru 8drut dr�s5c 1dr� dr�5b + dr�8sc 2ds d1sa d6san dsat6 d1sc 5d6scha. 5dschik dse8e d8serg + 8dsl d1sp d4spak ds2po d8sp� d1st d1s� 2dt d1ta d1te d1ti + d1to dt1s6 d1tu d5t� 1du du5als du1b6 du1e duf4t3r 4d3uh du5ie + 8duml 8dumw 2d1und du8ni 6d5unt dur2c durch3 6durl 6dursa 8durt + dus1t + du8schr 2d1v 2d1w dwa8l 2d1z 1d� 6d�h 8d�nd d�6r + d�8bl d5�l d�r6fl d�8sc d5�4st + + 1d� ea4ben e1ac e1ah e1akt e1al. e5alf e1alg e5a8lin e1alk e1all + e5alp e1alt e5alw e1am e1and ea6nim e1ar. e5arf e1ark e5arm e3art + e5at. e6ate e6a5t6l e8ats e5att e6au. e1aus e1b e6b5am ebens5e + eb4lie eb4ser eb4s3in e1che e8cherz e1chi ech3m 8ech3n ech1r ech8send + ech4su e1chu eck5an e5cl e1d ee5a ee3e ee5g e1ei ee5isc eei4s3t + ee6lend e1ell ee5l� e1erd ee3r4e ee8reng eere6s5 ee5r� + ee6tat e1ex e1f e6fau e8fe8b 3effek ef3rom ege6ra eglo6si 1egy e1ha + e6h5ach eh5ans e6hap eh5auf e1he e1hi ehl3a eh1le ehl5ein eh1mu ehn5ec + e1ho ehr1a eh1re ehre6n eh1ri eh1ru ehr5um e1hu eh1w e1hy e1h� + e1h� e3h�t ei1a eia6s ei6bar eich3a eich5r ei4dar ei6d5ei + ei8derf ei3d4sc ei1e 8eifen 3eifri 1eign eil1d ei6mab ei8mag ein1a4 + ei8nat ei8nerh ei8ness ei6nete ein1g e8ini ein1k ei6n5od ei8nok ei4nor + e3ins� ei1o e1irr ei5ru ei8sab ei5schn ei6s5ent ei8sol ei4t3al + eit3ar eit1h ei6thi ei8tho eit8samt ei6t5um e1j 1ekd e1ke e1ki e1k2l + e1kn ekni4 e1la e2l1al 6elan e6lanf e8lanl e6l5ans el3arb el3arm + e6l3art 5e6lasti e6lauge elbst5a e1le 6elef ele6h e6l5ehe e8leif + e6l5einh 1elek e8lel 3eleme e6lemen e6lente el5epi e4l3err e6l5ersc + elf2l elg2 e6l5ins ell8er 4e1lo e4l3ofe el8soh el8tent 5eltern e1lu + elut2 e1l� e1l� em8dei em8meis 4emo emo5s 1emp1f 1empt 1emto + e1mu emurk4 emurks5 e1m� en5a6ben en5achs en5ack e1nad en5af + en5all en3alt en1am en3an. en3ant en3anz en1a6p en1ar en1a6s 6e1nat + en3auf en3aus en2ce enda6l end5erf end5erg en8dess 4ene. en5eck + e8neff e6n5ehr e6n5eim en3eis 6enem. 6enen e4nent 4ener. e8nerd + e6n3erf e4nerg 5energi e6n5erla en5ers e6nerst en5erw 6enes e6n5ess + e2nex en3glo 2eni enni6s5 ennos4 enns8 e1no e6nober eno8f en5opf + e4n3ord en8sers ens8kl en1sp ens6por en5t6ag enta5go en8terbu en6tid + 3entla ent5ric 5entwic 5entwu 1entz enu5i e3ny en8zan en1�f + e1n�s e1n�g eo1c e5o6fe e5okk e1on. e3onf e5onk e5onl e5onr + e5opf e5ops e5or. e1ord e1org eo5r6h eo1t e1pa e8pee e6p5e6g ep5ent + e1p2f e1pi 5epid e6pidem e1pl 5epos e6pos. ep4p3a e1pr e1p� e1q + e1ra. er5aal 8eraba e5rabel er5a6ben e5rabi er3abs er3ach era5e + era5k6l er3all er3amt e3rand e3rane er3ans e5ranz. e1rap er3arc + e3rari er3a6si e1rat erat3s er3auf e3raum 3erbse er1c e1re 4e5re. + er3eck er5egg er5e2h 2erei e3rei. e8reine er5einr 6eren. e4r3enm + 4erer. e6r5erm er5ero er5erst e4r3erz er3ess 5erf�l er8gan. + 5ergebn er2g5h 5erg�nz 5erh�hu 2e1ri eri5ak e6r5iat e4r3ind + e6r5i6n5i6 er5ins e6r5int er5itio er1kl 3erkl� 5erl�s. + ermen6s er6nab 3ernst 6e1ro. e1rod er1o2f e1rog 6e3roi ero8ide e3rol + e1rom e1ron e3rop8 e2r1or e1ros e1rot er5ox ersch4 5erstat er6t5ein + er2t1h er5t6her 2e1ru eruf4s3 e4r3uhr er3ums e5rus 5erwerb e1ry er5zwa + er3zwu er�8m er5�s er�8 e3r�s. e6r1�2b e1sa + esa8b e8sap e6s5a6v e1sc esch4l ese1a es5ebe eserve5 e8sh es5ill + es3int es4kop e2sl eso8b e1sp espei6s5 es2po es2pu 5essenz e6stabs + e6staf e6st5ak est3ar e8stob e1str est5res es3ur e2sz e1s� e1ta + et8ag etari5e eta8ta e1te eten6te et5hal e5thel e1ti 1etn e1to e1tr + et3rec e8tscha et8se et6tei et2th et2t1r e1tu etu1s et8zent et8zw + e1t� e1t� e1t� eu1a2 eu1e eue8rei eu5fe euin5 euk2 + e1um. eu6nio e5unter eu1o6 eu5p 3europ eu1sp eu5str eu8zo e1v eval6s + eve5r6en ever4i e1w e2wig ex1or 1exp 1extr ey3er. e1z e1�2 + e5�8 e1� e8�es fa6ch5i fade8 fa6del fa5el. + fal6lo falt8e fa1na fan4gr 6fanl 6fap far6ba far4bl far6r5a 2f1art + fa1sc fau8str fa3y 2f1b2 6f1c 2f1d 1fe 2f1eck fe6dr feh6lei f6eim + 8feins f5eis fel5en 8feltern 8femp fe5rant 4ferd. ferri8 fe8stof + fe6str fe6stum fe8tag fet6ta fex1 2ff f1fa f6f5arm f5fe ffe5in ffe6la + ffe8ler ff1f f1fla ff3lei ff4lie ff8sa ff6s5ta 2f1g2 fgewen6 4f1h 1fi + fid4 fi3ds fieb4 fi1la fi8lei fil4m5a f8in. fi1na 8finf fi8scho fi6u + 6f1j 2f1k2 f8lanz fl8e 4f3lein 8flib 4fling f2lix 6f3lon 5flop 1flor + 5f8l�c 3fl�t 2f1m 2f1n 1fo foh1 f2on fo6na 2f1op fo5ra + for8mei for8str for8th for6t5r fo5ru 6f5otte 2f1p8 f1q fr6 f5ram + 1f8ran f8ra� f8re. frei1 5frei. f3reic f3rest f1rib + 8f1ric 6frig 1fris fro8na fr�s5t 2fs f1sc f2s1er f5str + fs3t�t 2ft f1tak f1te ft5e6h ftere6 ft1h f1ti f5to f1tr ft5rad + ft1sc ft2so f1tu ftwi3d4 ft1z 1fu 6f5ums 6funf fun4ka fu8�end + 6f1v 2f1w 2f1z 1f� f�1c 8f�rm 6f�ug + f�8� f�de3 8f�f 3f�r 1f� + f�n4f3u 1ga ga6bl 6gabw 8gabz g3a4der ga8ho ga5isc 4gak ga1la + 6g5amt ga1na gan5erb gan6g5a ga5nj 6ganl 8gansc 6garb 2g1arc 2g1arm + ga5ro 6g3arti ga8sa ga8sc ga6stre 2g1atm 6g5auf gau5fr g5aus 2g1b g5c + 6gd g1da 1ge ge1a2 ge6an ge8at. ge1e2 ge6es gef2 8geff ge1g2l ge1im + 4g3eise geist5r gel8bra gelt8s ge5l� ge8nin gen3k 6g5entf + ge3n� ge1or ge1ra ge6rab ger8au 8gerh� ger8ins ge1ro 6g5erz. + ge1r� ge1r� ge1s ges2p + ge2s7te. ge2s7ten ge2s7ter ge2s7tik + ge5unt 4g3ex3 2g1f8 2g1g g1ha 6g1hei + 5ghel. g5henn 6g1hi g1ho 1ghr g1h� 1gi gi5la gi8me. gi1na + 4g3ins + gis1tr + g1j 2g1k 8gl. 1glad g5lag glan4z3 1glas 6glass 5glaub + g3lauf 1gle. g5leb 3gleic g3lein 5gleis 1glem 2gler 8g3leu gli8a + g2lie 3glied 1g2lik 1g2lim g6lio 1gloa 5glom 1glon 1glop g1los g4loss + g5luf 1g2ly 1gl� 2g1m gn8 6gn. 1gna 8gnach 2gnah g1nas g8neu + g2nie g3nis 1gno 8gnot 1go goe1 8gof 2gog 5gogr 6g5oh goni5e 6gonist + go1ra 8gord 2g1p2 g1q 1gr4 g5rahm gra8m gra4s3t 6g1rec gre6ge 4g3reic + g5reit 8grenn gri4e g5riem 5grif 2grig g5ring 6groh 2grot gro6� + 4grut 2gs gs1ab g5sah gs1ak gs1an gs8and gs1ar gs1au g1sc + gs1ef g5seil gs5ein g2s1er gs1in g2s1o gso2r gs1pr g2s1u 2g1t g3te + g2t1h 1gu gu5as gu2e 2gue. 6gued 4g3uh 8gums 6g5unt + + gut3h gu2tu + 4g1v 2g1w gy1n g1z 1g� 8g�8m 6g�rm 1g� 1g� + 6g�b 1haa hab8r ha8del hade4n 8hae ha5el. haf6tr 2hal. ha1la + hal4b5a 6hale 8han. ha1na han6dr han6ge. 2hani h5anth 6hanz 6harb + h3arbe h3arme ha5ro ha2t1h h1atm hau6san ha8� h1b2 h1c h1d + he2bl he3cho h3echt he5d6s 5heft h5e6he. hei8ds h1eif 2hein he3ism + he5ist. heit8s3 hek6ta hel8lau 8helt he6mer 1hemm 6h1emp hen5end + hen5klo hen6tri he2nu 8heo he8q her3ab he5rak her3an 4herap her3au + h3erbi he1ro he8ro8b he4r3um her6z5er he4spe he1st heta6 het5am he5th + heu3sc he1xa hey5e h1f2 h1g hgol8 h1h h1iat hie6r5i hi5kt hil1a2 + hil4fr hi5nak hin4ta hi2nu hi5ob hirn5e hir6ner hi1sp hi1th hi5tr + 5hitz h1j h6jo h1k2 hlabb4 hla4ga hla6gr h5lai hl8am h1las h1la� + hl1c h1led h3lein h5ler. h2lif h2lim h8linf hl5int h2lip + h2lit h4lor h3lose h1l�s hme5e h2nee h2nei hn3eig h2nel hne8n + hne4p3f hn8erz h6netz h2nip h2nit h1nol hn5sp h2nuc h2nud h2nul hoch1 + 1hoh hoh8lei 2hoi ho4l3ar 1holz h2on ho1ra 6horg 5horn. ho3sl hos1p + ho4spi h1p hpi6 h1q 6hr h1rai h8rank h5raum hr1c hrcre8 h1red h3reg + h8rei. h4r3erb h8rert hrg2 h1ric hr5ins h2rom hr6t5erl hr2t1h hr6t5ra + hr8tri h6rum hr1z hs3ach h6s5amt h1sc h6s5ec h6s5erl hs8erle h4sob + h1sp h8spa� h8spel hs6po h4spun h1str h4s3tum hs3und + h1s� h5ta. h5tab ht3ac ht1ak ht3ang h5tanz ht1ar ht1at h5taub + h1te h2t1ec ht3eff ht3ehe h4t3eif h8teim h4t3ein ht3eis h6temp h8tentf + hte8ren h6terf� h8tergr h4t3erh h6t5ersc h8terst h8tese h8tess + h2t1eu h4t3ex ht1he ht5hu h1ti ht5rak hts3ah ht1sc ht6sex ht8sk ht8so + h1tu htz8 h5t�m hub5l hu6b5r huh1l h5uhr. huld5a6 hu8lent + hu8l� h5up. h1v h5weib h3weis h1z h�8kl h�l8s + h�ma8tu8 h�8sche. h�t1s h�u4s3c 2h�. + 2h�e 8h�i h�6s h�s5c h�hne6 h�l4s3t + h�tte8re i5adn i1af i5ak. i1al. i1al1a i1alb i1ald i5alei i1alf + i1alg i3alh i1alk i1all i1alp i1alr i1als i1alt i1alv i5alw i3alz + i1an. ia5na i3and ian8e ia8ne8b i1ang i3ank i5ann i1ant i1anz i6apo + i1ar. ia6rab i5arr i1as. i1asm i1ass i5ast. i1at. i5ats i1au i5azz + i6b5eig i6b5eis ib2le i4blis i6brig i6b5unt i6b�b i1che ich5ei + i6cherb i1chi ich5ins ich1l ich3m ich1n i1cho icht5an icht3r i1chu + ich1w ick6s5te ic5l i1d id3arm 3ideal ide8na 3ideol ide5r� i6diot + id5rec id1t ie1a ie6b5ar iebe4s3 ie2bl ieb1r ie8bra ie4bre ie8b� + ie2dr ie1e8 ie6f5ad ief5f ie2f1l ie4fro ief1t i1ei ie4l3ec ie8lei + ie4lek i3ell i1en. i1end ien6e i3enf i5enn ien6ne. i1enp i1enr + i5ensa ien8stal i5env i1enz ie5o ier3a4b ie4rap i2ere ie4rec ie6r5ein + ie6r5eis ier8er i3ern. ie8rum ie8rund ie6s5che ie6tau ie8tert ie5the + ie6t5ri i1ett ie5un iex5 2if i1fa if5ang i6fau if1fr if5lac i5f6lie + i1fre ift5a if6t5r ig3art 2ige i8gess ig5he i5gla ig2ni i5go ig3rot + ig3s2p i1ha i8ham i8hans i1he i1hi ih1n ih1r i1hu i8hum ih1w 8i1i ii2s + ii2t i1j i1k i6kak i8kerz i6kes ik4ler i6k5unt 2il i5lac i1lag il3ans + i5las i1lau il6auf i1le ile8h i8lel il2fl il3ipp il6l5enn i1lo ilt8e + i1lu i1l� i8mart imb2 i8mele i8mid imme6l5a i1mu i1m� + i5m� ina5he i1nat in1au inau8s 8ind. in4d3an 5index ind2r 3indus + i5nec i2n1ei i8nerw 3infek 1info 5ingeni ing5s6o 5inhab ini5er. 5inj + in8k�t in8nan i1no inoi8d in3o4ku in5sau in1sp 5inspe 5instit + 5instru ins4ze 5intere 5interv in3the in5t2r i5ny in�2 i1n�r + in1�s in�8 in5�d i1n�s 2io io1a8 io1c iode4 io2di + ioi8 i1ol. i1om. i1on. i5onb ion2s1 i1ont i5ops i5o8pt i1or. + i3oral io3rat i5orc i1os. i1ot. i1o8x 2ip i1pa i1pi i1p2l i1pr i1q + i1ra ir6bl i1re i1ri ir8me8d ir2m1o2 ir8nak i1ro ir5rho ir6schl + ir6sch5r i5rus i5ry i5r� i1sa i8samt i6sar i2s1au i8scheh i8schei + isch5m isch3r isch�8 is8ele ise3ra i4s3erh is3err isi6de i8sind + is4kop ison5e is6por i8s5tum i5sty i5s� i1ta it5ab. i2t1a2m + i8tax i1te i8tersc i1thi i1tho i5thr it8h� i1ti i8ti8d iti6kl + itmen4 i1to i8tof it3ran it3rau i1tri itri5o it1sc it2se it5spa it8tru + i1tu it6z5erg it6z1w i1t� it�6r5e it�t2 it�ts5 + i1t� i1u iu6r 2i1v i6vad iva8tin i8vei i6v5ene i8verh i2vob i8vur + i1w iwi2 i5xa i1xe i1z ize8n i8zir i6z5w i�8m i1�6r + i5�t. i5�v i1�8 i�8 i6�5ers ja5la + je2t3r 6jm 5jo jo5as jo1ra jou6l ju5cha jugen4 jugend5 jung5s6 + + 3j� 1ka 8kachs 8kakz ka1la kal5d kam5t ka1na 2kanl 8kapf ka6pl + ka5r6a 6k3arbe ka1ro kar6p5f 4k3arti 8karz ka1r� kasi5e ka6teb + kat8ta kauf6s kau3t2 2k1b 2k1c 4k1d kehr6s kehrs5a 8keic 2k1eig 6k5ein + 6k5eis ke6lar ke8leis ke8lo 8kemp k5ente. k3entf 8k5ents 6kentz ke1ra + k5erlau 2k1f8 2k1g 2k1h ki5fl 8kik king6s5 6kinh ki5os ki5sp ki5th + 8ki8� 2k1k2 kl8 1kla 8klac k5lager kle4br k3leib 3kleid kle5isc + 4k3leit k3lek 6k5ler. 5klet 2klic 8klig k2lim k2lin 5klip 5klop k3lor + 1kl� 2k1m kmani5e kn8 6kner k2ni kn�8 1k2o ko1a2 ko6de. + ko1i koi8t ko6min ko1op ko1or ko6pht ko3ra kor6d5er ko5ru ko5t6sc k3ou + 3kow 6k5ox 2k1p2 k1q 1kr8 4k3rad 2k1rec 4k3reic kre5ie 2krib 6krig + 2krip 6kroba 2ks k1sa k6sab ksal8s k8samt k6san k1sc k2s1ex k5spat + k5spe k8spil ks6por k1spr kst8 k2s1uf 2k1t kta8l kt5a6re k8tein kte8re + k2t1h k8tinf kt3rec kt1s 1ku ku1ch kuck8 k3uhr ku5ie kum2s1 kunfts5 + kun2s kunst3 ku8rau ku4ro kurz1 + + 4kusti ku1ta ku8� + 6k1v 2k1w ky5n 2k1z 1k� k�4m 4k3�mi k�se5 1k� + k�1c k�1s 1k� k�1c k�r6sc + + 1la. + 8labf 8labh lab2r 2l1abs lach3r la8dr 5ladu 8ladv 6laff laf5t la2gn + 5laken 8lamb la6mer 5lampe. 2l1amt la1na 1land lan4d3a lan4d3r lan4gr + 8lanme 6lann 8lanw 6lan� 8lappa lap8pl lap6pr l8ar. la5ra lar4af + la8rag la8ran la6r5a6s l3arbe la8rei 6larm. la8sa la1sc la8sta lat8i + 6l5atm 4lauss 4lauto 1law 2lb l8bab l8bauf l8bede l4b3ins l5blo + lbst5an lbst3e 8lc l1che l8chert l1chi lch3m l5cho lch5w 6ld l4d3ei + ld1re l6d�b le2bl le8bre lecht6s5 led2r 6leff le4gas 1lehr lei6br + le8inf 8leinn 5leistu 4lektr le6l5ers lemo2 8lemp l8en. 8lends + 6lendun le8nend len8erw 6l5ents 4l3entw 4lentz 8lenzy 8leoz 6lepi + le6pip 8lepo 1ler l6er. 8lerbs 6l5erde le8reis le8rend le4r3er 4l3erg + l8ergr 6lerkl 6l5erzie 8ler� 8lesel lesi5e le3sko le3tha let1s + 5leuc 4leuro leu4s3t le5xe 6lexp l1f 2l1g lgend8 l8gh lglie3 lglied6 + 6l1h 1li li1ar li1as 2lick li8dr li1en lien6n li8ers li8ert 2lie� + 3lig li8ga8b li1g6n li1l8a 8limb li1na 4l3indu lings5 + 4l3inh 6linj link4s3 4linkt 2lint 8linv + + 4lipp 5lipt 4lisam + livi5e 6l1j 6l1k l8keim l8kj lk2l lko8f lkor8 lk2sa lk2se 6ll l1la + ll3a4be l8labt ll8anl ll1b ll1c ll1d6 l1le l4l3eim l6l5eise ller3a + l4leti l5lip l1lo ll3ort ll5ov ll6spr llte8 l1lu ll3urg l1l� + l5l� l6l�b 2l1m l6m5o6d 6ln l1na l1no 8lobl lo6br 3loch. + l5o4fen 5loge. 5lohn 4l3ohr 1lok l2on 4l3o4per lo1ra 2l1ord 6lorg + 4lort lo1ru 1los. lo8sei 3losig lo6ve lowi5 6l1p lp2f l8pho l8pn + lp4s3te l2pt l1q 8l1r 2ls l1sa l6sarm l1sc l8sec l6s5erg l4s3ers l8sh + l5s6la l1sp ls4por ls2pu l1str l8suni l1s� 2l1t lt5amp l4t3ein + l5ten l6t5eng l6t5erp l4t3hei lt3her l2t1ho l6t5i6b lti1l l8tr� + lt1sc lt6ser lt4s3o lt5ums lu8br lu2dr lu1en8 8lu8fe luft3a luf8tr + lu6g5r 2luh l1uhr lu5it 5luk 2l1umf 2l1umw 1lun 6l5u6nio 4l3unte lu5ol + 4lurg 6lurs l3urt lu4sto + lus1tr + lu6st5re lu8su lu6tal lu6t5e6g lu8terg + lu3the lu6t5or lu2t1r lu6�5 l1v lve5r6u 2l1w 1ly lya6 + 6lymp ly1no l8zess l8zo8f l3zwei lz5wu 3l�nd l�5on + l�6sc l�t1s 5l�uf 2l�ug l�u6s5c l�5v + l1�l 1l�s l�1�6t 6l1�be 1ma + 8mabg ma5chan mad2 ma5el 4magg mag8n ma1la ma8lau mal5d 8malde mali5e + malu8 ma8lut 2m1amp 3man mand2 man3ds 8mangr mani5o 8m5anst 6mappa + 4m3arbe mar8kr ma1r4o mar8schm 3mas ma1sc ma1t� 4m5auf ma5yo 2m1b + mb6r 2m1c 2m1d md6s� 1me me1ch me5isc 5meld mel8sa 8memp me5nal + men4dr men8schl men8schw 8mentsp me1ra mer4gl me1ro 3mes me6s5ei me1th + me8� 2m1f6 2m1g 2m1h 1mi mi1a mi6ale mi1la 2m1imm mi1na + mi5n� mi4s3an mit1h mi5t6ra 3mitt mitta8 mi6�5 6mj + 2m1k8 2m1l 2m1m m6mad m6m5ak m8menth m8mentw mme6ra m2mn mm5sp mm5ums + mmut5s m8m�n m1n8 m5ni 1mo mo5ar mo4dr 8mof mo8gal mo4kla mol5d + m2on mon8do mo4n3od + mon2s1tr + mont8a 6m5ony mopa6 mo1ra mor8d5a mo1sc mo1sp 5mot + moy5 2mp m1pa mpfa6 mpf3l mphe6 m1pi mpin6 m1pl mp2li m2plu mpo8ste + m1pr mpr�5 mp8th mput6 mpu5ts m1p� 8m1q 2m1r 2ms ms5au m1sc + msch4l ms6po m3spri m1str 2m1t mt1ar m8tein m2t1h mt6se mt8s� + mu5e 6m5uh mumi1 1mun mun6dr muse5e mu1ta 2m1v mvol2 mvoll3 2m1w 1my + 2m1z m�6kl 1m�n m�1s m�5tr m�u4s3c 3m�� + m�b2 6m�l 1m� 5m�n 3m�t 1na. + n5ab. 8nabn n1abs n1abz na6b� na2c nach3e 3nacht 1nae na5el + n1afr 1nag 1n2ah na8ha na8ho 1nai 6nair na4kol n1akt nal1a 8naly 1nama + na4mer na1mn n1amp 8n1amt 5nanc nan6ce n1and n6and. 2n1ang 1nani + 1nann n1ans 8nanw 5napf. 1n2ar. na2ra 2n1arc n8ard 1nari n8ark + 6n1arm 5n6ars 2n1art n8arv 6natm nat6s5e 1naue 4nauf n3aug 5naui n5auk + na5um 6nausb 6nauto 1nav 2nax 3naz 1na� n1b2 nbau5s n1c + nche5e nch5m 2n1d nda8d n2d1ak nd5ans n2d1ei nde8lac ndel6sa n8derhi + nde4se nde8stal n2dj ndnis5 n6d5or6t nd3rec nd3rot nd8samt nd6sau + ndt1h n8dumd 1ne ne5as ne2bl 6n5ebn 2nec 5neei ne5en ne1g4l 2negy + 4n1ein 8neis 4n3e4lem 8nemb 2n1emp nen1a 6n5energ nen3k 8nentb + 4n3en3th 8nentl 8n5entn 8n5ents ne1ra ne5r8al ne8ras 8nerbi 6n5erde. + nere5i6d nerfor6 6n5erh� 8nerl� 2n1err n8ers. 6n5ertra + 2n1erz nesi3e net1h neu4ra neu5sc 8neu� n1f nf5f nf2l + nflei8 nf5lin nft8st n8g5ac ng5d ng8en nge8ram ngg2 ng1h n6glic ng3rip + ng8ru ng2se4 ng2si n2g1um n1gy n8g�l n1h nhe6r5e 1ni ni1bl + ni5ch� ni8dee n6ie ni1en nie6s5te niet5h ni8etn 4n3i6gel n6ik + ni1la 2n1imp ni5na 2n1ind 8ninf 6n5inh ni8nit 6n5inn 2n1ins 4n1int + n6is + nis1tr + ni1th ni1tr n1j n6ji n8kad nk5ans n1ke n8kerla n1ki nk5inh + n5kl� n1k2n n8k5not nk3rot n8kr� nk5spo nk6t5r n8kuh + n6k�b n5l6 nli4mi n1m nmen4s n1na n8nerg nni5o n1no nn4t3ak nnt1h + nnu1e n1ny n1n� n1n� n1n� no5a no4b3la 4n3obs 2nobt + noche8 no6die no4dis no8ia no5isc 6n5o6leu no4mal noni6er 2n1onk n1ony + 4n3o4per 6nopf 6nopti no3ra no4ram nor6da 4n1org 2n1ort n6os no1st + 8nost. no8tan no8ter noty6pe 6n5ox n1p2 n1q n1r nr�s3 6ns n1sac + ns3ang n1sc n8self n8s5erf n8serg n6serk ns5erw n8sint n1s2pe n1spr + n6s5tat. + + n6stob n1str n1ta n4t3a4go nt5anh nt3ark nt3art + n1te nt3eis nte5n6ar nte8nei nter3a nte6rei nt1ha nt6har n3ther nt5hie + n3thus n1ti nti1c n8tinh nti1t ntlo6b ntmen8 n1to nt3o4ti n1tr ntra5f + ntra5ut nt8rea nt3rec nt8rep n4t3rin nt8rop n4t3rot n4tr� nt1s + nts6an nt2sk n1tu nt1z n1t� n1t� n8t�l n1t� 1nu + nu1a nu5el nu5en 4n1uhr nu5ie 8numl 6n5ums 6n5umw 2n1und 6nuni 6n5unr + 2n1unt 2nup 2nu6r n5uri nu3skr nu5ta n1v 8n1w 1nys n1za n6zab n2z1ar + n6zaus nzi4ga n8zof n6z5unt n1zw n6zwir 1n�c 5n�e 5n�i + n8�l n�6m n�6re n5�rz 5n�us n1�l + 1n�t n5�z 5n�. 6n1�2b 5n�� + o5ab. oa2l o8ala o1a2m o1an ob1ac obe4ra o6berh 5o4bers o4beru + obe6ser 1obj o1bl o2bli ob5sk 3obst. ob8sta obst5re ob5sz o1che + oche8b o8chec o3chi och1l och3m ocho8f o3chro och3to o3chu och1w o1d + o2d1ag od2dr ode5i ode6n5e od1tr o5e6b o5e6der. oe8du o1ef o1e2l + o1e2p o1er. o5e8x o1fa of8fan 1offi of8fin of6f5la o5fla o1fr 8o1g + og2n o1ha o1he o6h5eis o1hi ohl1a oh1le oh4l3er 5ohm. oh2ni o1ho + oh1re oh1ru o1hu oh1w o1hy o1h� o5ia o1id. o8idi oi8dr o5ids + o5isch. oiset6 o1ism o3ist. o5i6tu o1j o1k ok2l ok3lau o8kl� + 1okta o1la old5am old5r o1le ole5in ole1r ole3u ol6gl ol2kl olk4s1 + ol8lak ol8lauf. ol6lel ol8less o1lo + ol1s ol2ster + ol6sk o1lu oly1e2 5olym + o2mab om6an o8mau ombe4 o8merz om5sp o1mu o8munt o1m� o1m� + o1na ona8m on1ax on8ent o6n5erb 8oni oni5er. on1k on6n5a6b o1no ono1c + o4nokt 1ons onts8 o1n� oo8f 1oog oo2pe oo2sa o1pa 3o4pera o3pfli + opf3lo opf3r o1pi o1pl o2pli o5p6n op8pa op6pl o1pr o3p4ter 1opti + o1p� o5p� o1q o1ra. o3rad o8radd 1oram o6rang o5ras o8rauf + or5cha or4d3a4m or8dei or8deu 1ordn or4dos o1re o5re. ore2h o8r5ein + ore5isc or6enn or8fla or8fli 1orga 5orgel. or2gl o1ri 5o6rient or8nan + or8n� o1ro or1r2h or6t5an or8tau or8tere o1rus o1ry o1r� + or1�2 o1sa osa3i 6ose o8serk o1sk o6ske o6ski os2kl os2ko os2kr + osni5e o2s1o2d o3s4per o4stam o6stau o3stra ost3re osu6 o6s5ur o5s6ze + o1ta ot3auf o6taus o1te o6terw o1th othe5u o2th1r o1ti o1to oto1a + ot1re o1tri o1tro ot1sc o3tsu ot6t5erg ot2t3h ot2t5r ot8t� o1tu + ou3e ouf1 ou5f6l o5u6gr ou5ie ou6rar ou1t6a o1v o1wa o1we o6wer. o1wi + owid6 o1wo o5wu o1xe oy5al. oy1e oy1i o5yo o1z oza2r 1o2zea ozo3is + o�8 o�5elt o�1t 3paa pa6ce 5pad pag2 1pak + pa1la pa8na8t pani5el pa4nor pan1s2 1pap pap8s pa8rei par8kr paro8n + par5o6ti part8e 5partei 3partn pas6sep pa4tha 1pau 6paug pau3sc p1b + 8p5c 4p1d 1pe 4peic pe5isc 2pek pen3k pen8to8 p8er pe1ra pere6 per5ea + per5eb pe4rem 2perr per8ran 3pers 4persi pe3r� pe4sta pet2s + p2f1ec p4fei pf1f pf2l 5pflanz pf8leg pf3lei 2pft pf3ta p1g 1ph 2ph. + 2p1haf 6phb 8phd 6p5heit ph5eme 6phg phi6e 8phk 6phn p5holl pht2 + ph3tha 4ph3the phu6 6phz pi1en pi5err pi1la pi1na 5pinse pioni8e 1pis + pi1s2k pi1th p1k pl8 5pla p2lau 4plei p3lein 2pler 6p5les 2plig p6lik + 6p5ling p2liz plo8min 6p1m p1n 1p2o 8poh 5pol po8lan poly1 po3ny po1ra + 2porn por4t3h po5r� 5poti p1pa p6p5ei ppe6la pp5f p2p1h p1pi pp1l + ppp6 pp5ren + pp1s pp2ste + p5p� pr6 3preis 1pres 2p3rig 5prinz 1prob 1prod + 5prog pro8pt pro6t5a prote5i 8pro� pr�3l 1pr�s + pr�te4 1pr�f p5schl 2pst 1p2sy p1t p8to8d pt1s 5p6ty 1pu + pu1b2 2puc pu2dr puf8fr 6p5uh pun8s pu8rei pu5s6h pu1ta p1v p3w 5py + py5l p1z p�6der p5�6m p�8nu 8p�r p�t5h + p�t1s qu6 1qui 8rabk ra6bla 3rable ra2br r1abt 6rabz ra4dan ra2dr + 5rafal ra4f3er ra5gla ra2g3n 6raha ral5am 5rald 4ralg ra8lins 2rall + ral5t 8ramei r3anal r6and ran8der ran4dr 8ranf 6ranga 5rangi ran8gli + r3angr rans5pa 8ranw r8anz. ra5or 6rapf ra5pl rap6s5er 2r1arb 1rarh + r1arm ra5ro 2r1art 6r1arz ra8tei ra6t5he 6ratl ra4t3ro r5atta raue4n + 6raus. r5austa rau8tel raut5s ray1 r1b rb5lass r6bler rb4lie rbon6n + r8brecht rb6s5t� r8ces r1che rch1l rch3m rch3re rch3tr rch1w 8rd + r1da r8dachs r8dap rda5ro rde5ins rdio5 r8dir rd3ost r1dr r8drau 1re. + re1ak 3reakt re3als re6am. re1as 4reben re6bl rech5a r8edi re3er + 8reff 3refl 2reh 5reha r4ei. reich6s5 8reier 6reign re5imp 4r3eina + 6r3einb 6reing 6r5einn 6reinr 4r3eins r3eint reli3e 8r5elt 6rempf + 2remt ren5a6b ren8gl r3enni 1reno 5rente 4r3enth 8rentl 4r3entw 8rentz + ren4zw re1on requi5 1rer rer4bl 6rerbs 4r3erd 8rerh� 8rerkl + 4r3erla 8rerl� 4r3erns 6r5ern� rer5o 6r5erreg r5ertr r5erwec + r5er� re2sa re8schm 2ress re5u8ni 6rewo 2r1ex r1f r8ferd rf4lie + 8r1g r8gah rge4bl rge5na rgest4 rg6ne r2gni2 r8gob r4g3ret rg8sel r1h8 + r2hy 5rhyt ri1ar ri5cha rid2g r2ie rieg4s5 ri8ei ri1el ri6ele ri1en + ri3er. ri5ers. ri6fan ri8fer ri8fr 1r2ig ri8kn ri5la rim�8 + ri1na r8inde rin4ga rin6gr 1rinn 6rinner rino1 r8insp 4rinst + ri1n� ri5o6ch ri1o2d ri3o6st 2r1ir r2is ri3sko ri8spr + + ri5sv r2it 6r5i6tal ri5tr ri6ve. 8r1j 6rk r1ke rkehrs5 r1ki r3klin + r1k2n rk3str rk4t3an rk6to r6kuh rk�4s3t r1l r5li rline5a 6r1m + r6manl rma4p r4m3aph r8minf r8mob rm5sa 2rn r1na rna8be r5ne rn2ei + r6neif r6nex r6nh rn1k r1no r6n5oc rn1sp r1n� r1n� ro6bern + 6robs ro1ch 3rock. ro5de ro1e 4rofe ro8hert 1rohr ro5id ro1in ro5isc + 6rolym r2on 6roog ro6phan r3ort ro1s2p ro5s6w ro4tau ro1tr ro6ts 5rout + r1p rpe8re rp2f r2ps r2pt r1q 2rr r1ra r1re rrer6 + rr6hos r5rh� + r1ri r1ro rro8f rr8or rror5a r1ru r3ry r1r� r1r� r1r� + 2r1s + r2ste r2sti + r6sab r4sanf rse6e rse5na r2sh r6ska r6ski rs2kl r8sko r2sl rs2p + r6stauf r8sterw r8stran rswi3d4 r2sz 2r1t rt3art r8taut r5tei rt5eige + r8tepe r4t3erh r8terla r4t3hei r5t6hu r4t3int rt5reif rt1sc rt6ser + rt6s5o rt6s5u rt5und r8turt rube6 ru1en 1r4uf ruf4st ru1ie 2r1umg + 2r1uml 2rums run8der run4d5r 6rundz 6runf 8runs 2r1unt 2r1ur r6us + ru6sta + rus1tr + ru6tr 1ruts r1v rven1 rvi2c r1w r1x r1za rz5ac r6z5al + r8z1ar r8zerd r6z5erf rz8erh rz4t3h r8zum r�4ste r�u8sc + r1�f 5r�hr r�5le 3r�ll 5r�mis r1�r + r�2sc 3r�mp 1sa. 1saa s3a4ben sa2bl 2s1abs 6s1abt 6sabw + 3sack. 6s3a4der 1saf sa1fa 4s1aff sa5fr 1sag 1sai sa1i2k1 4s1akt 1sal + sa1la 4s3alpi 6salter salz3a 1sam s5anb san2c 1sand s5angeh 6sanl + 2s1ans 6s3antr 8s1anw s1ap s6aph 8sapo sap5p6 s8ar. 2s1arb 3sarg + s1arm sa5ro 2s1art 6s1arz 1sas 1sat sat8a 2s1atl sa8tom 3s8aue s5auff + sau5i s6aur 2s1aus 5s6ause 2s1b2 2sca s4ce 8sch. 3scha. 5schade + 3schaf 3schal sch5ame 8schanc 8schb 1sche 6schef 8schex 2schf 2schg + 2schh 1schi 2schk 5schlag 5schlu 6schm�� + 6schna� 1scho 6schord 6schp 3schri 8schric 8schrig + 8schrou 6schs 2scht sch3ta sch3tr 1schu 8schunt 6schv 2schz 5sch� + 5sch� 2sco scre6 6scu 2s1d 1se se5an se1ap se6ben se5ec see5i6g + se3erl 8seff se6han se8hi se8h� 6s5eid. 2s1eig s8eil 5sein. + sei5n6e 6s5einh 3s8eit 3sel. se4lar selb4 6s3e4lem se8lerl 2s1emp + sen3ac se5nec 6s5ents 4sentz s8er. se8reim ser5inn 8serm� + 8s5erzi 6ser�f se1um 8sexa 6sexp 2s1f2 sfal8ler 2s3g2 sge5b2 s1h + s8hew 5s6hip 5s4hop 1si 2siat si1b sicht6s 6s5i6dee siege6s5 si1en + si5err si1f2 si1g2n si6g5r si8kau sik1i si4kin si2kl si8k� si1la + sil6br si1na 2s1inf sin5gh 2s1inh sinne6s5 2s1ins si5ru si5str 4s1j + s1k2 6sk. 2skau skel6c skelch5 s6kele 1s2ki. 3s4kin. s6kiz s8kj + 6skn 2skow 3skrib 3skrip 2sku 8sk� s1l s8lal slei3t s4low 2s1m + s1n 6sna 6snot 1so so1ch 2s1odo so4dor 6s5o4fen solo3 s2on so5of 4sope + so1ra 2s1ord 4sorga sou5c so3un 4s3ox sp2 8spaa 5spal 1span 2spap + s2pec s4peis 1spek s6perg 4spers s6pes 2s1pf 8sphi 1s2ph� 1spi + spi4e 6s5pig 6spinse 2spis 2spla 2spol 5s6pom 6s5pos 6spoti 1spra + 3s8prec 6spreis 5spring 6sprob 1spru s2pul 1s2pur 6spy 5sp�n + 1sp� s1q 2s1r + + + 2ssa 2sse 2ssi 2sso 2ss� 2ss� 2ss� 2s1sch + sse8nu ssini6s ssoi6r 2st. + 1sta 4stafe 2stag + sta3la 6stale + 4s2talg + 8stalk 8stamt 6st5anf 4stans 6stanw 6starb sta4te + 6staus 2stb 6stc 6std + s1te + 4steil + + 6steppi + + 8stesse 6stf 2stg 2sth st1ha st3hei s8t1hi st1ho st5hu + s1ti + s2ti4el + 4s2tigm + + 6s2tind + 4s2tinf + s2ti8r + 2stk 2stl 2stm + 1sto 6stoll. 4st3ope + 6stopf. 6stord 6stp + + 4strai + s3tral + 6s5traum 3stra� + 3strec 6s3tref 8streib 5streif 6streno 6stres 6strev + + 2st5rig + + 8s2t1ris + + s8troma st5rose 4struf 3strum + 6str�g 2st1s6 2stt + 1stu stu5a 4stuc 2stue 8stun. 2stv 2stw s2tyl + 6stz 1st� 8st�g + 1st� + 1st� 8st�ch 4st�r. + 1su su2b1 3suc su1e su2fe su8mar 6sumfa 8sumk 2s1unt sup1p2 6s5u6ran + 6surte 2s1v 2s1w 1sy 8syl. sy5la syn1 sy2na syne4 s1z s4zend 5s6zene. + 8szu 1s� 6s5�nd 6s�ugi 6s�u� + 5s�m 2s1�2b 1s�c s�8di 1s�n 5s�� + taats3 4tab. taba6k ta8ban tab2l ta6bre 4tabs t3absc + 8tabz 6t3acht ta6der 6tadr tad6s tad2t 1tafe4 1tag ta6ga6 ta8gei + tage4s tag6s5t tah8 tahl3 tai6ne. ta5ir. tak8ta tal3au 1tale ta8leng + tal5ert 6t5a6mer 6tamp tampe6 2t1amt tan5d6a tan8dr tands5a tani5e + 6tanl 2tanr t3ans 8t5antr tanu6 t5anw 8tanwa tan8zw ta8rau 6tarbe + 1tari 2tark 2t1arm ta1ro 2tart t3arti 6tarz ta1sc ta6sien ta8stem + ta8sto t5aufb 4taufn 8taus. 5tause 8tausf 6tausg t5ausl 2t1b2 2t1c + t6chu 2t1d te2am tea4s te8ben 5techn 4teff te4g3re te6hau 2tehe te4hel + 2t1ehr te5id. teig5l 6teign tei8gr 1teil 4teinh t5einhe 4teis t5eisen + 8teiw te8lam te4lar 4telek 8telem te6man te6n5ag ten8erw ten5k tens4p + ten8tro 4t3entw 8tentz te6pli 5teppi ter5a6b te3ral ter5au 8terbar + t5erbe. 6terben 8terbs 4t3erbt t5erde. ter5ebe ter5ein te8rers terf4 + 8terh� 6terkl� ter8nor ter6re. t8erscha t5e6sel te8stau + t3euro te1xa tex3e 8texp tex6ta 2t1f2 2t1g2 2th. th6a 5tha. 2thaa + 6t1hab 6t5haf t5hah 8thak 3thal. 6thals 6t3hand 2t1hau 1the. 3t4hea + t1heb t5heil t3heit t3helf 1theo 5therap 5therf 6t5herz 1thes 1thet + 5thi. 2t1hil t3him 8thir 3this t5hj 2th1l 2th1m th1n t5hob t5hof + 4tholz 6thopti 1thr6 4ths t1hum 1thy 4t1h� 2t1h� t1h� + ti1a2m ti1b tie6fer ti1en ti8gerz tig3l ti8kin ti5lat 1tilg t1ind + tin4k3l ti3spa ti5str 5tite ti5tr ti8vel ti8vr 2t1j 2t1k2 2t1l tl8a + 2t1m8 2t1n 3tobe 8tobj to3cha 5tocht 8tock tode4 to8del to8du to1e + 6t5o6fen to1in toi6r 5toll. to8mene t2ons 2t1ony to4per 5topf. 6topt + to1ra + to1s to2ste + to6ska tos2l 2toti to1tr t8ou 2t1p2 6t1q tr6 tra5cha + tra8far traf5t 1trag tra6gl tra6gr t3rahm 1trai t6rans tra3sc tra6st + 3traue t4re. 2trec t3rech t8reck 6t1red t8ree 4t1reg 3treib 4treif + 8t3reis 8trepo tre6t5r t3rev 4t3rez 1trib t6rick tri6er 2trig t8rink + tri6o5d trizi5 tro1a 3troc trocke6 troi8d tro8man. tro3ny 5tropf + 6t5rosa t5ro� 5trub 5trup trut5 1tr�g 6t1r�h + 5tr�b tr�3bu t1r�c t1r�s 2ts ts1ab t1sac tsa8d + ts1ak t6s5alt ts1an ts1ar ts3auf t3schr t5sch� tse6e tsee5i + tsein6s ts3ent ts1er t8serf t4serk t8sh 5t6sik t4s3int ts5ort. + t5s6por t6sprei + t1st t2ste + t6s5tanz ts1th t6stit t4s3tor 1t2sua t2s1uf + t8sum. t2s1u8n t2s1ur 2t1t tt5eif tte6sa tt1ha tt8ret tt1sc tt8ser + tt5s6z 1tuc tuch5a 1tu1e 6tuh t5uhr tu1i tu6it 1tumh 6t5umr 1tums + 8tumt 6tund 6tunf 2t1unt tu5ra tu6rau tu6re. tu4r3er 2t1v 2t1w 1ty1 + ty6a ty8la 8tym 6ty6o 2tz tz5al tz1an tz1ar t8zec tzeh6 tzehn5 t6z5ei. + t6zor t4z3um t6z�u 5t�g 6t�h t5�lt t8�n + t�re8 8t�8st 6t�u� t5�ffen + 8t�8k 1t�n 4t�b t6�5ber. 5t�ch 1t�r. + u3al. u5alb u5alf u3alh u5alk u3alp u3an. ua5na u3and u5ans u5ar. + ua6th u1au ua1y u2bab ubi5er. u6b5rit ubs2k u5b� u8b�b 2uc + u1che u6ch5ec u1chi uch1l uch3m uch5n uch1r uch5to ucht5re u1chu uch1w + uck1a uck5in u1d ud4a u1ei u6ela uene8 u6ep u1er uer1a ue8rerl uer5o + u8esc u2est u8ev u1fa u2f1ei u4f3ent u8ferh uf1fr uf1l uf1ra uf1re + uf1r� uf1r� uf1s2p uf1st uft1s u8gabt u8gad u6gap ugeb8 u8gn + ugo3s4 u1ha u1he u1hi uh1le u1ho uh1re u1hu uh1w u1h� u1h� + 6ui ui5en u1ig u3ins uin8tes u5isch. u1j 6uk u1ke u1ki u1kl u8klu + u1k6n u5ky u1la uld8se u1le ul8lac ul6lau ul6le6l ul6lo ulni8 u1lo + ulo6i ult6a ult8e u1lu ul2vr u1l� u1l� 3umfan 5umlau umo8f + um8pho u1mu umu8s u5m� u1n1a un2al un6at unau2 6und. 5undein + un4d3um 3undzw und�8 un8d�b une2b un1ec une2h un3eis 3unfal + 1unf� 5ungea 3ungl� ung2s1 un8g� 1u2nif un4it un8kro + unk5s u1no unpa2 uns2p unvol4 unvoll5 u5os. u1pa u1pi u1p2l u1pr + up4s3t up2t1a u1q u1ra ur5abs ura8d ur5ah u6rak ur3alt u6rana u6r5ans + u8rap ur5a6ri u8ratt u1re ur3eig ur8gri u1ri ur5ins 3urlau urmen6 + ur8nan u1ro 3ursac ur8sau ur8sei ur4sk 3urtei u1ru uru5i6 uru6r u1ry + ur2za ur6z� ur5�6m u5r� u1r� ur�ck3 u1sa + usa4gi u2s1ar u2s1au u8schec usch5wi u2s1ei use8kel u8sl u4st3a4b + us3tau + + u2s1uf u8surn ut1ac u1tal uta8m u1tan ut1ar u1tas ut1au + u1te u8teic u4tent u8terf u6terin u4t3hei ut5ho ut1hu u1ti utine5 + uti6q u1to uto5c u1tr ut1sa ut1s6p ut6stro u1tu utz5w u1u u1v uve5n + uve3r4� u1w u1xe u5ya uy5e6 u1yi u2z1eh u8zerh u5� u�e6n + u�en5e 8vanb 6vang 6varb var8d va6t5a va8tei + va2t1r 2v1b 6v5c 6vd 1ve 6ve5g6 ver1 ver5b verb8l ve2re2 verg8 ve2ru8 + ve1s ve2s3p ve3xe 2v1f 2v1g 6v5h vi6el vie6w5 vi1g4 vi8leh vil6le. + 8vint vi1ru vi1tr 2v1k 2v1l 2v1m 4v5n 8vo8f voi6le vol8lend vol8li + v2or1 vo2re vo8rin vo2ro 2v1p 8vra v6re + 2v2s + 2v1t 2v1v 4v3w 2v1z + waffe8 wa6g5n 1wah wah8n wa5la wal8din wal6ta wan4dr 5ware wa8ru + war4za 1was w5c w1d 5wech we6fl 1weg we8geng weg5h weg3l we2g1r + weh6r5er 5weise weit3r wel2t welt3r we6rat 8werc 5werdu wer4fl 5werk. + wer4ka wer8ku wer4ta wer8term we2sp + we8s4tend + + we8str + we8st� wet8ta wich6s5t 1wid wi2dr wiede4 wieder5 wik6 wim6ma + win4d3r 5wirt wisch5l 1wj 6wk 2w1l 8w1n wo1c woche6 wol6f wor6t5r 6ws2 + w1sk 6w5t 5wunde. wun6gr wu1sc wu2t1 6w5w wy5a w�rme5 w�1sc + 1xag x1ak x3a4men 8xamt x1an 8x1b x1c 1xe. x3e4g 1xen xe1ro x1erz + 1xes 8xf x1g 8x1h 1xi 8xid xi8so 4xiste x1k 6x1l x1m 8xn 1xo 8x5o6d + 8x3p2 x1r x1s6 8x1t x6tak x8terf x2t1h 1xu xu1e x5ul 6x3w x1z 5ya. + y5an. y5ank y1b y1c y6cha y4chia y1d yen6n y5ern y1g y5h y5in y1j + y1k2 y1lak yl1al yla8m y5lax y1le y1lo y5lu y8mn ym1p2 y3mu y1na yno2d + yn1t y1on. y1o4p y5ou ypo1 y1pr y8ps y1r yri3e yr1r2 + + ys5iat ys8ty + y1t y3w y1z y�8m z5a6b zab5l 8za6d 1zah za5is 4z3ak 6z1am 5zange. + 8zanl 2z1ara 6z5as z5auf 3zaun 2z1b 6z1c 6z1d 1ze ze4dik 4z3eff 8zein + zei4ta zei8ters ze6la ze8lec zel8th 4zemp 6z5engel zen8zin 8zerg� + zer8i ze1ro zers8 zerta8 zer8tab zer8tag 8zerz ze8ste zeu6gr 2z1ex + 2z1f8 z1g 4z1h 1zi zi1en zi5es. 4z3imp zi1na 6z5inf 6z5inni zin6s5er + 8zinsuf zist5r zi5th zi1tr 6z1j 2z1k 2z1l 2z1m 6z1n 1zo zo6gl 4z3oh + zo1on zor6na8 4z1p z5q 6z1r 2z1s8 2z1t z4t3end z4t3hei z8thi 1zu zu3al + zu1b4 zu1f2 6z5uhr zun2a 8zunem zunf8 8zungl zu1o zup8fi zu1s8 zu1z + 2z1v zw8 z1wal 5zweck zwei3s z1wel z1wer z6werg 8z5wes 1zwi zwi1s + 6z1wo 1zy 2z1z zz8a zzi1s 1z� 1z� 6z�l. z�1le + 1z� 2z1�2b �1a6 �b1l �1che �3chi + �ch8sc �ch8sp �5chu �ck5a �d1a �d5era + �6d5ia �1e �5fa �f1l �ft6s �g1h + �g3le �6g5nan �g5str �1he �1hi �h1le + �h5ne 1�hnl �h1re �h5ri �h1ru �1hu + �h1w 6�i �1isc �6ische �5ism �5j + �1k �l1c �1le �8lei �l6schl �mi1e + �m8n �m8s �5na 5�nderu �ne5i8 �ng3l + �nk5l �1no �n6s5c �1pa �p6s5c 3�q + �r1c �1re �re8m 5�rgern �r6gl �1ri + 3�rmel �1ro �rt6s5 �1ru 3�rztl �5r� + �6s5chen �sen8s �s1th �ta8b �1te �teri4 + �ter5it �6thy �1ti 3�tk �1to �t8schl + �ts1p �5tu �ub1l �u1e 1�ug �u8ga + �u5i �1um. �1us. 1�u� �1z + �1b �1che �5chi + �ch8s2tei + �ch8str �cht6 + 5�6dem 5�ffn �1he �h1l8 �h1re �1hu + �1is �1ke 1�2ko 1�l. �l6k5l �l8pl + �1mu �5na �nig6s3 �1no �5o6t �pf3l + �p6s5c �1re �r8gli �1ri �r8tr �1ru + 5�sterr �1te �5th �1ti �1tu �1v �1w + �we8 �2z �b6e2 3�4ber1 �b1l �b1r + 5�2bu �1che �1chi �8ch3l �ch6s5c �8ck + �ck1a �ck5ers �d1a2 �6deu �di8t �2d1o4 + �d5s6 �ge4l5a �g1l �h5a �1he �8heh + �6h5erk �h1le �h1re �h1ru �1hu �h1w + �3k �1le �l4l5a �l8lo �l4ps �l6s5c + �1lu �n8da �n8fei �nk5l �n8za �n6zw + �5pi �1re �8rei �r8fl �r8fr �r8geng + �1ri �1ro �r8sta + + �1ru �se8n + �8sta �8stes + + �3ta �1te �1ti + �t8tr �1tu �t8zei �1v �1a8 5�a. + �8as �1b8 �1c �1d + 1�e �5ec 8�e8g 8�e8h + 2�1ei 8�em �1f8 �1g �1h + 1�i �1k �1l �1m + + �1n �1o �1p8 �5q + �1r �1s2 �st8 �1ta + �1te �t3hei �1ti �5to + �1tr 1�u8 6�5um �1v �1w + �1z + + 2s1ta. + i2s1tal + 2s1tani 2s1tan. + fe2s1ta + te2s1ta + + nd2ste + ve2ste + 3s2tec + 4s3techn + 3s2teg + 3s2teh + 3s2tein 3s2teig 3s2teif + 3s2tell 3s2telz + a4s3tel + 3s2temm + 3s2temp + 3s2tep + s3s2ter t3s2tern + 3s2teue + 6s4teuro + + bs2ti + te2s3ti + ve2sti + 3s2tic + + 3s2tieb + 3s2tieg + + 3s2tif + 3s2til + 3s2tim + 3s2tink + 3s2titu + + a2s1to + gu2s1to + ku2s1to + i2s1tol i2s1tor + ve2s1to + + 2s1tung + 2s7tus + o2s1tul + + + + aus3s4 + ens3s4 + gs3s4 + .mis2s1 + s2s1b8 + + s2s3chen + s2s3d + s2s5ec + + + 2s2s1ei + s2s3f + s2s1g + s2s3h + s2s3k + s2s3l + s2s3m + + s2s3n + s2s3p8 + s2s5q + s2s3r + s2s3s2 + sss2t8 + + + as2s3te + is2s3te + us2s3te + �s2s3te + s2st3hei + s2s3ti + s2s1to + s2s1tr + + 6ss5um + s2s3v + s2s3w + s2s3z + + + + 1cker. + 1ckert + 1ckad + 1cke. + 1ckel + 1cken + 4ck1ent + 1ckere + 1ckern + 1ckeru + 1ckie + 1ckig + 1ckun + + + diff --git a/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json b/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json index e69c2db6ff400..58e0ea5fd9fc5 100644 --- a/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json +++ b/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json @@ -21,7 +21,52 @@ "dict_dec":{ "type":"dictionary_decompounder", "word_list":["donau", "dampf", "schiff", "spargel", "creme", "suppe"] - } + }, + "hyphenation_dec_only_longest_match": { + "type": "hyphenation_decompounder", + "hyphenation_patterns_path": "de_DR.xml", + "word_list": [ + "fuss", + "fussball", + "ballpumpe", + "ball", + "pumpe", + "kaffee", + "fee", + "maschine" + ], + "only_longest_match": true + }, + "hyphenation_dec_no_sub_matches": { + "type": "hyphenation_decompounder", + "hyphenation_patterns_path": "de_DR.xml", + "word_list": [ + "fuss", + "fussball", + "ballpumpe", + "ball", + "pumpe", + "kaffee", + "fee", + "maschine" + ], + "no_sub_matches": true + }, + "hyphenation_dec_no_overlapping_matches": { + "type": "hyphenation_decompounder", + "hyphenation_patterns_path": "de_DR.xml", + "word_list": [ + "fuss", + "fussball", + "ballpumpe", + "ball", + "pumpe", + "kaffee", + "fee", + "maschine" + ], + "no_overlapping_matches": true + } }, "analyzer":{ "standard":{ @@ -47,6 +92,18 @@ "decompoundingAnalyzer":{ "tokenizer":"standard", "filter":["dict_dec"] + }, + "hyphenationDecompoundingAnalyzerOnlyLongestMatch":{ + "tokenizer":"standard", + "filter":["hyphenation_dec_only_longest_match"] + }, + "hyphenationDecompoundingAnalyzerNoSubMatches": { + "tokenizer":"standard", + "filter":["hyphenation_dec_no_sub_matches"] + }, + "hyphenationDecompoundingAnalyzerNoOverlappingMatches":{ + "tokenizer":"standard", + "filter":["hyphenation_dec_no_overlapping_matches"] } } } diff --git a/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.yml b/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.yml index 82f933296a314..095b27e0fa071 100644 --- a/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.yml +++ b/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.yml @@ -15,6 +15,21 @@ index : dict_dec : type : dictionary_decompounder word_list : [donau, dampf, schiff, spargel, creme, suppe] + hyphenation_dec_only_longest_match : + type : hyphenation_decompounder + hyphenation_patterns_path : de_DR.xml + word_list : [fuss, fussball, ballpumpe, ball, pumpe, kaffee, fee, maschine] + only_longest_match : true + hyphenation_dec_no_sub_matches : + type : hyphenation_decompounder + hyphenation_patterns_path : de_DR.xml + word_list : [fuss, fussball, ballpumpe, ball, pumpe, kaffee, fee, maschine] + no_sub_matches : true + hyphenation_dec_no_overlapping_matches : + type : hyphenation_decompounder + hyphenation_patterns_path : de_DR.xml + word_list : [fuss, fussball, ballpumpe, ball, pumpe, kaffee, fee, maschine] + no_overlapping_matches: true analyzer : standard : type : standard @@ -37,3 +52,13 @@ index : decompoundingAnalyzer : tokenizer : standard filter : [dict_dec] + hyphenationDecompoundingAnalyzerOnlyLongestMatch : + tokenizer : standard + filter : [hyphenation_dec_only_longest_match] + hyphenationDecompoundingAnalyzerNoSubMatches: + tokenizer: standard + filter : [hyphenation_dec_no_sub_matches] + hyphenationDecompoundingAnalyzerNoOverlappingMatches: + tokenizer: standard + filter : [hyphenation_dec_no_overlapping_matches] + From c72d5fdf1cce46763d7b820b2eccf78d741ede04 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 18 Nov 2024 16:45:40 +0000 Subject: [PATCH 018/386] Revert "Index stats enhancement: creation date and tier_preference (#116339)" (#116959) This reverts commit e0af1238fcdf44ebe838b1da188c8fb46a915ea1. --- docs/changelog/116339.yaml | 5 -- .../16_creation_date_tier_preference.yml | 14 ------ .../org/elasticsearch/TransportVersions.java | 1 + .../admin/indices/stats/IndexStats.java | 42 ++-------------- .../indices/stats/IndicesStatsFeatures.java | 2 +- .../indices/stats/IndicesStatsResponse.java | 48 +++++-------------- .../core/ilm/WaitForNoFollowersStepTests.java | 7 +-- .../IndicesStatsMonitoringDocTests.java | 2 - 8 files changed, 21 insertions(+), 100 deletions(-) delete mode 100644 docs/changelog/116339.yaml delete mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/16_creation_date_tier_preference.yml diff --git a/docs/changelog/116339.yaml b/docs/changelog/116339.yaml deleted file mode 100644 index 1767183271812..0000000000000 --- a/docs/changelog/116339.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116339 -summary: "Index stats enhancement: creation date and `tier_preference`" -area: Stats -type: feature -issues: [] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/16_creation_date_tier_preference.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/16_creation_date_tier_preference.yml deleted file mode 100644 index 6ecd9c3e9c2ce..0000000000000 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/16_creation_date_tier_preference.yml +++ /dev/null @@ -1,14 +0,0 @@ ---- -"Ensure index creation date and tier preference are exposed": - - requires: - cluster_features: ["stats.tier_creation_date"] - reason: index creation date and tier preference added to stats in 8.17 - - - do: - indices.create: - index: myindex - - do: - indices.stats: {} - - - is_true: indices.myindex.creation_date - - is_true: indices.myindex.tier_preference diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 5b5d12d738194..aadfffb562558 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -200,6 +200,7 @@ static TransportVersion def(int id) { public static final TransportVersion SKIP_INNER_HITS_SEARCH_SOURCE = def(8_791_00_0); public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES = def(8_792_00_0); public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS = def(8_793_00_0); + public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS_REVERT = def(8_794_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java index 7cefc086e17dc..5bdecd10075e6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java @@ -22,7 +22,8 @@ public class IndexStats implements Iterable { - public static final NodeFeature TIER_CREATION_DATE = new NodeFeature("stats.tier_creation_date"); + // feature was effectively reverted but we still need to keep this constant around + public static final NodeFeature REVERTED_TIER_CREATION_DATE = new NodeFeature("stats.tier_creation_date"); private final String index; @@ -32,10 +33,6 @@ public class IndexStats implements Iterable { private final IndexMetadata.State state; - private final List tierPreference; - - private final Long creationDate; - private final ShardStats shards[]; public IndexStats( @@ -43,16 +40,12 @@ public IndexStats( String uuid, @Nullable ClusterHealthStatus health, @Nullable IndexMetadata.State state, - @Nullable List tierPreference, - @Nullable Long creationDate, ShardStats[] shards ) { this.index = index; this.uuid = uuid; this.health = health; this.state = state; - this.tierPreference = tierPreference; - this.creationDate = creationDate; this.shards = shards; } @@ -72,14 +65,6 @@ public IndexMetadata.State getState() { return state; } - public List getTierPreference() { - return tierPreference; - } - - public Long getCreationDate() { - return creationDate; - } - public ShardStats[] getShards() { return this.shards; } @@ -148,24 +133,13 @@ public static class IndexStatsBuilder { private final String uuid; private final ClusterHealthStatus health; private final IndexMetadata.State state; - private final List tierPreference; - private final Long creationDate; private final List shards = new ArrayList<>(); - public IndexStatsBuilder( - String indexName, - String uuid, - @Nullable ClusterHealthStatus health, - @Nullable IndexMetadata.State state, - @Nullable List tierPreference, - @Nullable Long creationDate - ) { + public IndexStatsBuilder(String indexName, String uuid, @Nullable ClusterHealthStatus health, @Nullable IndexMetadata.State state) { this.indexName = indexName; this.uuid = uuid; this.health = health; this.state = state; - this.tierPreference = tierPreference; - this.creationDate = creationDate; } public IndexStatsBuilder add(ShardStats shardStats) { @@ -174,15 +148,7 @@ public IndexStatsBuilder add(ShardStats shardStats) { } public IndexStats build() { - return new IndexStats( - indexName, - uuid, - health, - state, - tierPreference, - creationDate, - shards.toArray(new ShardStats[shards.size()]) - ); + return new IndexStats(indexName, uuid, health, state, shards.toArray(new ShardStats[shards.size()])); } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsFeatures.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsFeatures.java index 2b67885273d05..558343db1023a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsFeatures.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsFeatures.java @@ -18,6 +18,6 @@ public class IndicesStatsFeatures implements FeatureSpecification { @Override public Set getFeatures() { - return Set.of(IndexStats.TIER_CREATION_DATE); + return Set.of(IndexStats.REVERTED_TIER_CREATION_DATE); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java index 205f1cbc04e8b..91e0e7cbc1dff 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java @@ -47,10 +47,6 @@ public class IndicesStatsResponse extends ChunkedBroadcastResponse { private final Map indexStateMap; - private final Map> indexTierPreferenceMap; - - private final Map indexCreationDateMap; - private final ShardStats[] shards; private Map shardStatsMap; @@ -58,23 +54,22 @@ public class IndicesStatsResponse extends ChunkedBroadcastResponse { IndicesStatsResponse(StreamInput in) throws IOException { super(in); shards = in.readArray(ShardStats::new, ShardStats[]::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS_REVERT)) { indexHealthMap = in.readMap(ClusterHealthStatus::readFrom); indexStateMap = in.readMap(IndexMetadata.State::readFrom); - indexTierPreferenceMap = in.readMap(StreamInput::readStringCollectionAsList); - indexCreationDateMap = in.readMap(StreamInput::readLong); + } else if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS)) { + indexHealthMap = in.readMap(ClusterHealthStatus::readFrom); + indexStateMap = in.readMap(IndexMetadata.State::readFrom); + in.readMap(StreamInput::readStringCollectionAsList); // unused, reverted + in.readMap(StreamInput::readLong); // unused, reverted } else if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) { // Between 8.1 and INDEX_STATS_ADDITIONAL_FIELDS, we had a different format for the response // where we only had health and state available. indexHealthMap = in.readMap(ClusterHealthStatus::readFrom); indexStateMap = in.readMap(IndexMetadata.State::readFrom); - indexTierPreferenceMap = Map.of(); - indexCreationDateMap = Map.of(); } else { indexHealthMap = Map.of(); indexStateMap = Map.of(); - indexTierPreferenceMap = Map.of(); - indexCreationDateMap = Map.of(); } } @@ -94,8 +89,6 @@ public class IndicesStatsResponse extends ChunkedBroadcastResponse { Objects.requireNonNull(shards); Map indexHealthModifiableMap = new HashMap<>(); Map indexStateModifiableMap = new HashMap<>(); - Map> indexTierPreferenceModifiableMap = new HashMap<>(); - Map indexCreationDateModifiableMap = new HashMap<>(); for (ShardStats shard : shards) { Index index = shard.getShardRouting().index(); IndexMetadata indexMetadata = metadata.index(index); @@ -105,14 +98,10 @@ public class IndicesStatsResponse extends ChunkedBroadcastResponse { ignored -> new ClusterIndexHealth(indexMetadata, routingTable.index(index)).getStatus() ); indexStateModifiableMap.computeIfAbsent(index.getName(), ignored -> indexMetadata.getState()); - indexTierPreferenceModifiableMap.computeIfAbsent(index.getName(), ignored -> indexMetadata.getTierPreference()); - indexCreationDateModifiableMap.computeIfAbsent(index.getName(), ignored -> indexMetadata.getCreationDate()); } } indexHealthMap = unmodifiableMap(indexHealthModifiableMap); indexStateMap = unmodifiableMap(indexStateModifiableMap); - indexTierPreferenceMap = unmodifiableMap(indexTierPreferenceModifiableMap); - indexCreationDateMap = unmodifiableMap(indexCreationDateModifiableMap); } public Map asMap() { @@ -150,14 +139,7 @@ public Map getIndices() { Index index = shard.getShardRouting().index(); IndexStatsBuilder indexStatsBuilder = indexToIndexStatsBuilder.computeIfAbsent( index.getName(), - k -> new IndexStatsBuilder( - k, - index.getUUID(), - indexHealthMap.get(index.getName()), - indexStateMap.get(index.getName()), - indexTierPreferenceMap.get(index.getName()), - indexCreationDateMap.get(index.getName()) - ) + k -> new IndexStatsBuilder(k, index.getUUID(), indexHealthMap.get(index.getName()), indexStateMap.get(index.getName())) ); indexStatsBuilder.add(shard); } @@ -202,12 +184,14 @@ public CommonStats getPrimaries() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeArray(shards); - if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS_REVERT)) { out.writeMap(indexHealthMap, StreamOutput::writeWriteable); out.writeMap(indexStateMap, StreamOutput::writeWriteable); - out.writeMap(indexTierPreferenceMap, StreamOutput::writeStringCollection); - out.writeMap(indexCreationDateMap, StreamOutput::writeLong); - + } else if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS)) { + out.writeMap(indexHealthMap, StreamOutput::writeWriteable); + out.writeMap(indexStateMap, StreamOutput::writeWriteable); + out.writeMap(Map.of(), StreamOutput::writeStringCollection); + out.writeMap(Map.of(), StreamOutput::writeLong); } else if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) { out.writeMap(indexHealthMap, StreamOutput::writeWriteable); out.writeMap(indexStateMap, StreamOutput::writeWriteable); @@ -237,12 +221,6 @@ protected Iterator customXContentChunks(ToXContent.Params params) { if (indexStats.getState() != null) { builder.field("status", indexStats.getState().toString().toLowerCase(Locale.ROOT)); } - if (indexStats.getTierPreference() != null) { - builder.field("tier_preference", indexStats.getTierPreference()); - } - if (indexStats.getCreationDate() != null) { - builder.field("creation_date", indexStats.getCreationDate()); - } builder.startObject("primaries"); indexStats.getPrimaries().toXContent(builder, p); builder.endObject(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForNoFollowersStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForNoFollowersStepTests.java index 21e3155501995..01a12fb795316 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForNoFollowersStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForNoFollowersStepTests.java @@ -189,10 +189,7 @@ public void testNoShardStats() { shardStats[0] = sStats; mockXPackInfo(true, true); - mockIndexStatsCall( - indexName, - new IndexStats(indexName, "uuid", ClusterHealthStatus.GREEN, IndexMetadata.State.OPEN, null, null, shardStats) - ); + mockIndexStatsCall(indexName, new IndexStats(indexName, "uuid", ClusterHealthStatus.GREEN, IndexMetadata.State.OPEN, shardStats)); final SetOnce conditionMetHolder = new SetOnce<>(); final SetOnce stepInfoHolder = new SetOnce<>(); @@ -292,7 +289,7 @@ private IndexStats randomIndexStats(boolean isLeaderIndex, int numOfShards) { for (int i = 0; i < numOfShards; i++) { shardStats[i] = randomShardStats(isLeaderIndex); } - return new IndexStats(randomAlphaOfLength(5), randomAlphaOfLength(10), null, null, null, null, shardStats); + return new IndexStats(randomAlphaOfLength(5), randomAlphaOfLength(10), null, null, shardStats); } private ShardStats randomShardStats(boolean isLeaderIndex) { diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndicesStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndicesStatsMonitoringDocTests.java index b1d4f3ff7045f..6822f54633bdc 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndicesStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndicesStatsMonitoringDocTests.java @@ -53,8 +53,6 @@ public void setUp() throws Exception { "dcvO5uZATE-EhIKc3tk9Bg", null, null, - null, - null, new ShardStats[] { // Primaries new ShardStats(mockShardRouting(true), mockShardPath(), mockCommonStats(), null, null, null, false, 0), From 82c02de9141bad3bfc3ebd44a9c3abaa925e649d Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 18 Nov 2024 12:27:05 -0500 Subject: [PATCH 019/386] Fixing MultiDenseVectorScriptDocValuesTests tests (#116940) This fixes two test issues: - 1. Now the tests skip if the multi_dense_vector feature isn't enabled - 2. fixes silly bwc testing where we were testing for big-endian floats, which aren't possible. closes: https://github.com/elastic/elasticsearch/issues/116862 closes: https://github.com/elastic/elasticsearch/issues/116863 --- muted-tests.yml | 3 - .../MultiDenseVectorScriptDocValuesTests.java | 72 +++++++++---------- 2 files changed, 36 insertions(+), 39 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 4382b133522c6..8696a1bddd3d4 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -234,9 +234,6 @@ tests: - class: org.elasticsearch.search.basic.SearchWithRandomIOExceptionsIT method: testRandomDirectoryIOExceptions issue: https://github.com/elastic/elasticsearch/issues/114824 -- class: org.elasticsearch.index.mapper.vectors.MultiDenseVectorScriptDocValuesTests - method: testFloatGetVectorValueAndGetMagnitude - issue: https://github.com/elastic/elasticsearch/issues/116863 - class: org.elasticsearch.xpack.inference.InferenceRestIT method: test {p0=inference/30_semantic_text_inference/Calculates embeddings using the default ELSER 2 endpoint} issue: https://github.com/elastic/elasticsearch/issues/116542 diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/MultiDenseVectorScriptDocValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/MultiDenseVectorScriptDocValuesTests.java index ef316c5addefa..435baa477e740 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/MultiDenseVectorScriptDocValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/MultiDenseVectorScriptDocValuesTests.java @@ -18,46 +18,48 @@ import org.elasticsearch.script.field.vectors.MultiDenseVector; import org.elasticsearch.script.field.vectors.MultiDenseVectorDocValuesField; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.index.IndexVersionUtils; +import org.junit.BeforeClass; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.Iterator; -import java.util.List; import static org.hamcrest.Matchers.containsString; public class MultiDenseVectorScriptDocValuesTests extends ESTestCase { + @BeforeClass + public static void setup() { + assumeTrue("Requires multi-dense vector support", MultiDenseVectorFieldMapper.FEATURE_FLAG.isEnabled()); + } + public void testFloatGetVectorValueAndGetMagnitude() throws IOException { int dims = 3; float[][][] vectors = { { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }, { { 1, 0, 2 } } }; float[][] expectedMagnitudes = { { 1.7320f, 2.4495f, 3.3166f }, { 2.2361f } }; - for (IndexVersion indexVersion : List.of(IndexVersionUtils.randomCompatibleVersion(random()), IndexVersion.current())) { - BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT, indexVersion); - BinaryDocValues magnitudeValues = wrap(expectedMagnitudes); - MultiDenseVectorDocValuesField field = new FloatMultiDenseVectorDocValuesField( - docValues, - magnitudeValues, - "test", - ElementType.FLOAT, - dims - ); - MultiDenseVectorScriptDocValues scriptDocValues = field.toScriptDocValues(); - for (int i = 0; i < vectors.length; i++) { - field.setNextDocId(i); - assertEquals(vectors[i].length, field.size()); - assertEquals(dims, scriptDocValues.dims()); - Iterator iterator = scriptDocValues.getVectorValues(); - float[] magnitudes = scriptDocValues.getMagnitudes(); - assertEquals(expectedMagnitudes[i].length, magnitudes.length); - for (int j = 0; j < vectors[i].length; j++) { - assertTrue(iterator.hasNext()); - assertArrayEquals(vectors[i][j], iterator.next(), 0.0001f); - assertEquals(expectedMagnitudes[i][j], magnitudes[j], 0.0001f); - } + BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT); + BinaryDocValues magnitudeValues = wrap(expectedMagnitudes); + MultiDenseVectorDocValuesField field = new FloatMultiDenseVectorDocValuesField( + docValues, + magnitudeValues, + "test", + ElementType.FLOAT, + dims + ); + MultiDenseVectorScriptDocValues scriptDocValues = field.toScriptDocValues(); + for (int i = 0; i < vectors.length; i++) { + field.setNextDocId(i); + assertEquals(vectors[i].length, field.size()); + assertEquals(dims, scriptDocValues.dims()); + Iterator iterator = scriptDocValues.getVectorValues(); + float[] magnitudes = scriptDocValues.getMagnitudes(); + assertEquals(expectedMagnitudes[i].length, magnitudes.length); + for (int j = 0; j < vectors[i].length; j++) { + assertTrue(iterator.hasNext()); + assertArrayEquals(vectors[i][j], iterator.next(), 0.0001f); + assertEquals(expectedMagnitudes[i][j], magnitudes[j], 0.0001f); } } } @@ -67,7 +69,7 @@ public void testByteGetVectorValueAndGetMagnitude() throws IOException { float[][][] vectors = { { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }, { { 1, 0, 2 } } }; float[][] expectedMagnitudes = { { 1.7320f, 2.4495f, 3.3166f }, { 2.2361f } }; - BinaryDocValues docValues = wrap(vectors, ElementType.BYTE, IndexVersion.current()); + BinaryDocValues docValues = wrap(vectors, ElementType.BYTE); BinaryDocValues magnitudeValues = wrap(expectedMagnitudes); MultiDenseVectorDocValuesField field = new ByteMultiDenseVectorDocValuesField( docValues, @@ -94,10 +96,9 @@ public void testByteGetVectorValueAndGetMagnitude() throws IOException { public void testFloatMetadataAndIterator() throws IOException { int dims = 3; - IndexVersion indexVersion = IndexVersion.current(); float[][][] vectors = new float[][][] { fill(new float[3][dims], ElementType.FLOAT), fill(new float[2][dims], ElementType.FLOAT) }; float[][] magnitudes = new float[][] { new float[3], new float[2] }; - BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT, indexVersion); + BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT); BinaryDocValues magnitudeValues = wrap(magnitudes); MultiDenseVectorDocValuesField field = new FloatMultiDenseVectorDocValuesField( @@ -123,10 +124,9 @@ public void testFloatMetadataAndIterator() throws IOException { public void testByteMetadataAndIterator() throws IOException { int dims = 3; - IndexVersion indexVersion = IndexVersion.current(); float[][][] vectors = new float[][][] { fill(new float[3][dims], ElementType.BYTE), fill(new float[2][dims], ElementType.BYTE) }; float[][] magnitudes = new float[][] { new float[3], new float[2] }; - BinaryDocValues docValues = wrap(vectors, ElementType.BYTE, indexVersion); + BinaryDocValues docValues = wrap(vectors, ElementType.BYTE); BinaryDocValues magnitudeValues = wrap(magnitudes); MultiDenseVectorDocValuesField field = new ByteMultiDenseVectorDocValuesField( docValues, @@ -162,7 +162,7 @@ public void testFloatMissingValues() throws IOException { int dims = 3; float[][][] vectors = { { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }, { { 1, 0, 2 } } }; float[][] magnitudes = { { 1.7320f, 2.4495f, 3.3166f }, { 2.2361f } }; - BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT, IndexVersion.current()); + BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT); BinaryDocValues magnitudeValues = wrap(magnitudes); MultiDenseVectorDocValuesField field = new FloatMultiDenseVectorDocValuesField( docValues, @@ -186,7 +186,7 @@ public void testByteMissingValues() throws IOException { int dims = 3; float[][][] vectors = { { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }, { { 1, 0, 2 } } }; float[][] magnitudes = { { 1.7320f, 2.4495f, 3.3166f }, { 2.2361f } }; - BinaryDocValues docValues = wrap(vectors, ElementType.BYTE, IndexVersion.current()); + BinaryDocValues docValues = wrap(vectors, ElementType.BYTE); BinaryDocValues magnitudeValues = wrap(magnitudes); MultiDenseVectorDocValuesField field = new ByteMultiDenseVectorDocValuesField( docValues, @@ -210,7 +210,7 @@ public void testFloatGetFunctionIsNotAccessible() throws IOException { int dims = 3; float[][][] vectors = { { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }, { { 1, 0, 2 } } }; float[][] magnitudes = { { 1.7320f, 2.4495f, 3.3166f }, { 2.2361f } }; - BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT, IndexVersion.current()); + BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT); BinaryDocValues magnitudeValues = wrap(magnitudes); MultiDenseVectorDocValuesField field = new FloatMultiDenseVectorDocValuesField( docValues, @@ -236,7 +236,7 @@ public void testByteGetFunctionIsNotAccessible() throws IOException { int dims = 3; float[][][] vectors = { { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }, { { 1, 0, 2 } } }; float[][] magnitudes = { { 1.7320f, 2.4495f, 3.3166f }, { 2.2361f } }; - BinaryDocValues docValues = wrap(vectors, ElementType.BYTE, IndexVersion.current()); + BinaryDocValues docValues = wrap(vectors, ElementType.BYTE); BinaryDocValues magnitudeValues = wrap(magnitudes); MultiDenseVectorDocValuesField field = new ByteMultiDenseVectorDocValuesField( docValues, @@ -306,7 +306,7 @@ public long cost() { }; } - public static BinaryDocValues wrap(float[][][] vectors, ElementType elementType, IndexVersion indexVersion) { + public static BinaryDocValues wrap(float[][][] vectors, ElementType elementType) { return new BinaryDocValues() { int idx = -1; int maxIdx = vectors.length; @@ -316,7 +316,7 @@ public BytesRef binaryValue() { if (idx >= maxIdx) { throw new IllegalStateException("max index exceeded"); } - return mockEncodeDenseVector(vectors[idx], elementType, indexVersion); + return mockEncodeDenseVector(vectors[idx], elementType, IndexVersion.current()); } @Override From 29bdae1618833b666dea92d5811b86b8035f90c3 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 18 Nov 2024 17:34:15 +0000 Subject: [PATCH 020/386] Split searchable snapshot into multiple repo operations (#116918) Each operation on a snapshot repository uses the same `Repository`, `BlobStore`, etc. instances throughout, in order to avoid the complexity arising from handling metadata updates that occur while an operation is running. Today we model the entire lifetime of a searchable snapshot shard as a single repository operation since there should be no metadata updates that matter in this context (other than those that are handled dynamically via other mechanisms) and some metadata updates might be positively harmful to a searchable snapshot shard. It turns out that there are some undocumented legacy settings which _do_ matter to searchable snapshots, and which are still in use, so with this commit we move to a finer-grained model of repository operations within a searchable snapshot. --- docs/changelog/116918.yaml | 5 + .../cluster/metadata/RepositoryMetadata.java | 6 +- .../repositories/RepositoriesService.java | 14 +- .../blobstore/BlobStoreRepository.java | 6 +- ...earchableSnapshotsCredentialsReloadIT.java | 281 ++++++++++++++++++ .../SearchableSnapshots.java | 4 +- .../store/BlobContainerSupplier.java | 95 ++++++ .../store/RepositorySupplier.java | 83 ++++++ .../store/SearchableSnapshotDirectory.java | 90 ++---- 9 files changed, 505 insertions(+), 79 deletions(-) create mode 100644 docs/changelog/116918.yaml create mode 100644 x-pack/plugin/searchable-snapshots/qa/s3/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/s3/S3SearchableSnapshotsCredentialsReloadIT.java create mode 100644 x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/BlobContainerSupplier.java create mode 100644 x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/RepositorySupplier.java diff --git a/docs/changelog/116918.yaml b/docs/changelog/116918.yaml new file mode 100644 index 0000000000000..3b04b4ae4a69a --- /dev/null +++ b/docs/changelog/116918.yaml @@ -0,0 +1,5 @@ +pr: 116918 +summary: Split searchable snapshot into multiple repo operations +area: Snapshot/Restore +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoryMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoryMetadata.java index 9b3abf38c519b..0b9c359006b23 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoryMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoryMetadata.java @@ -46,7 +46,11 @@ public class RepositoryMetadata implements Writeable { * @param settings repository settings */ public RepositoryMetadata(String name, String type, Settings settings) { - this(name, RepositoryData.MISSING_UUID, type, settings, RepositoryData.UNKNOWN_REPO_GEN, RepositoryData.EMPTY_REPO_GEN); + this(name, RepositoryData.MISSING_UUID, type, settings); + } + + public RepositoryMetadata(String name, String uuid, String type, Settings settings) { + this(name, uuid, type, settings, RepositoryData.UNKNOWN_REPO_GEN, RepositoryData.EMPTY_REPO_GEN); } public RepositoryMetadata(RepositoryMetadata metadata, long generation, long pendingGeneration) { diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java index 2b95fbc69199f..1c4b7cfdab4ef 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java @@ -283,12 +283,22 @@ public RegisterRepositoryTask(final RepositoriesService repositoriesService, fin @Override public ClusterState execute(ClusterState currentState) { - RepositoryMetadata newRepositoryMetadata = new RepositoryMetadata(request.name(), request.type(), request.settings()); Metadata.Builder mdBuilder = Metadata.builder(currentState.metadata()); RepositoriesMetadata repositories = RepositoriesMetadata.get(currentState); List repositoriesMetadata = new ArrayList<>(repositories.repositories().size() + 1); for (RepositoryMetadata repositoryMetadata : repositories.repositories()) { - if (repositoryMetadata.name().equals(newRepositoryMetadata.name())) { + if (repositoryMetadata.name().equals(request.name())) { + final RepositoryMetadata newRepositoryMetadata = new RepositoryMetadata( + request.name(), + // Copy the UUID from the existing instance rather than resetting it back to MISSING_UUID which would force us to + // re-read the RepositoryData to get it again. In principle the new RepositoryMetadata might point to a different + // underlying repository at this point, but if so that'll cause things to fail in clear ways and eventually (before + // writing anything) we'll read the RepositoryData again and update the UUID in the RepositoryMetadata to match. See + // also #109936. + repositoryMetadata.uuid(), + request.type(), + request.settings() + ); Repository existing = repositoriesService.repositories.get(request.name()); if (existing == null) { existing = repositoriesService.internalRepositories.get(request.name()); diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 8c847da344fe5..f1c3d82b74cab 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -205,6 +205,8 @@ private class ShutdownLogger { public static final String STATELESS_SHARD_WRITE_THREAD_NAME = "stateless_shard_write"; public static final String STATELESS_CLUSTER_STATE_READ_WRITE_THREAD_NAME = "stateless_cluster_state"; public static final String STATELESS_SHARD_PREWARMING_THREAD_NAME = "stateless_prewarm"; + public static final String SEARCHABLE_SNAPSHOTS_CACHE_FETCH_ASYNC_THREAD_NAME = "searchable_snapshots_cache_fetch_async"; + public static final String SEARCHABLE_SNAPSHOTS_CACHE_PREWARMING_THREAD_NAME = "searchable_snapshots_cache_prewarming"; /** * Prefix for the name of the root {@link RepositoryData} blob. @@ -2188,7 +2190,9 @@ private void assertSnapshotOrStatelessPermittedThreadPool() { STATELESS_TRANSLOG_THREAD_NAME, STATELESS_SHARD_WRITE_THREAD_NAME, STATELESS_CLUSTER_STATE_READ_WRITE_THREAD_NAME, - STATELESS_SHARD_PREWARMING_THREAD_NAME + STATELESS_SHARD_PREWARMING_THREAD_NAME, + SEARCHABLE_SNAPSHOTS_CACHE_FETCH_ASYNC_THREAD_NAME, + SEARCHABLE_SNAPSHOTS_CACHE_PREWARMING_THREAD_NAME ); } diff --git a/x-pack/plugin/searchable-snapshots/qa/s3/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/s3/S3SearchableSnapshotsCredentialsReloadIT.java b/x-pack/plugin/searchable-snapshots/qa/s3/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/s3/S3SearchableSnapshotsCredentialsReloadIT.java new file mode 100644 index 0000000000000..3049fe830e728 --- /dev/null +++ b/x-pack/plugin/searchable-snapshots/qa/s3/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/s3/S3SearchableSnapshotsCredentialsReloadIT.java @@ -0,0 +1,281 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.searchablesnapshots.s3; + +import fixture.s3.S3HttpFixture; +import io.netty.handler.codec.http.HttpMethod; + +import org.apache.http.client.methods.HttpPut; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.entity.ContentType; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.WarningsHandler; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.MutableSettingsProvider; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.ObjectPath; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.function.UnaryOperator; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.Matchers.allOf; + +public class S3SearchableSnapshotsCredentialsReloadIT extends ESRestTestCase { + + private static final String BUCKET = "S3SearchableSnapshotsCredentialsReloadIT-bucket"; + private static final String BASE_PATH = "S3SearchableSnapshotsCredentialsReloadIT-base-path"; + + public static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, "ignored"); + + private static final MutableSettingsProvider keystoreSettings = new MutableSettingsProvider(); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.license.self_generated.type", "trial") + .keystore(keystoreSettings) + .setting("xpack.searchable.snapshot.shared_cache.size", "4kB") + .setting("xpack.searchable.snapshot.shared_cache.region_size", "4kB") + .setting("xpack.searchable_snapshots.cache_fetch_async_thread_pool.keep_alive", "0ms") + .setting("xpack.security.enabled", "false") + .systemProperty("es.allow_insecure_settings", "true") + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(s3Fixture).around(cluster); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Before + public void skipFips() { + assumeFalse("getting these tests to run in a FIPS JVM is kinda fiddly and we don't really need the extra coverage", inFipsJvm()); + } + + public void testReloadCredentialsFromKeystore() throws IOException { + final TestHarness testHarness = new TestHarness(); + testHarness.putRepository(); + + // Set up initial credentials + final String accessKey1 = randomIdentifier(); + s3Fixture.setAccessKey(accessKey1); + keystoreSettings.put("s3.client.default.access_key", accessKey1); + keystoreSettings.put("s3.client.default.secret_key", randomIdentifier()); + cluster.updateStoredSecureSettings(); + assertOK(client().performRequest(new Request("POST", "/_nodes/reload_secure_settings"))); + + testHarness.createFrozenSearchableSnapshotIndex(); + + // Verify searchable snapshot functionality + testHarness.ensureSearchSuccess(); + + // Rotate credentials in blob store + logger.info("--> rotate credentials"); + final String accessKey2 = randomValueOtherThan(accessKey1, ESTestCase::randomIdentifier); + s3Fixture.setAccessKey(accessKey2); + + // Ensure searchable snapshot now does not work due to invalid credentials + logger.info("--> expect failure"); + testHarness.ensureSearchFailure(); + + // Set up refreshed credentials + logger.info("--> update keystore contents"); + keystoreSettings.put("s3.client.default.access_key", accessKey2); + cluster.updateStoredSecureSettings(); + assertOK(client().performRequest(new Request("POST", "/_nodes/reload_secure_settings"))); + + // Check access using refreshed credentials + logger.info("--> expect success"); + testHarness.ensureSearchSuccess(); + } + + public void testReloadCredentialsFromAlternativeClient() throws IOException { + final TestHarness testHarness = new TestHarness(); + testHarness.putRepository(); + + // Set up credentials + final String accessKey1 = randomIdentifier(); + final String accessKey2 = randomValueOtherThan(accessKey1, ESTestCase::randomIdentifier); + final String alternativeClient = randomValueOtherThan("default", ESTestCase::randomIdentifier); + + s3Fixture.setAccessKey(accessKey1); + keystoreSettings.put("s3.client.default.access_key", accessKey1); + keystoreSettings.put("s3.client.default.secret_key", randomIdentifier()); + keystoreSettings.put("s3.client." + alternativeClient + ".access_key", accessKey2); + keystoreSettings.put("s3.client." + alternativeClient + ".secret_key", randomIdentifier()); + cluster.updateStoredSecureSettings(); + assertOK(client().performRequest(new Request("POST", "/_nodes/reload_secure_settings"))); + + testHarness.createFrozenSearchableSnapshotIndex(); + + // Verify searchable snapshot functionality + testHarness.ensureSearchSuccess(); + + // Rotate credentials in blob store + logger.info("--> rotate credentials"); + s3Fixture.setAccessKey(accessKey2); + + // Ensure searchable snapshot now does not work due to invalid credentials + logger.info("--> expect failure"); + testHarness.ensureSearchFailure(); + + // Adjust repository to use new client + logger.info("--> update repository metadata"); + testHarness.putRepository(b -> b.put("client", alternativeClient)); + + // Check access using refreshed credentials + logger.info("--> expect success"); + testHarness.ensureSearchSuccess(); + } + + public void testReloadCredentialsFromMetadata() throws IOException { + final TestHarness testHarness = new TestHarness(); + testHarness.warningsHandler = WarningsHandler.PERMISSIVE; + + // Set up credentials + final String accessKey1 = randomIdentifier(); + final String accessKey2 = randomValueOtherThan(accessKey1, ESTestCase::randomIdentifier); + + testHarness.putRepository(b -> b.put("access_key", accessKey1).put("secret_key", randomIdentifier())); + s3Fixture.setAccessKey(accessKey1); + + testHarness.createFrozenSearchableSnapshotIndex(); + + // Verify searchable snapshot functionality + testHarness.ensureSearchSuccess(); + + // Rotate credentials in blob store + logger.info("--> rotate credentials"); + s3Fixture.setAccessKey(accessKey2); + + // Ensure searchable snapshot now does not work due to invalid credentials + logger.info("--> expect failure"); + testHarness.ensureSearchFailure(); + + // Adjust repository to use new client + logger.info("--> update repository metadata"); + testHarness.putRepository(b -> b.put("access_key", accessKey2).put("secret_key", randomIdentifier())); + + // Check access using refreshed credentials + logger.info("--> expect success"); + testHarness.ensureSearchSuccess(); + } + + private class TestHarness { + private final String mountedIndexName = randomIdentifier(); + private final String repositoryName = randomIdentifier(); + + @Nullable // to use the default + WarningsHandler warningsHandler; + + void putRepository() throws IOException { + putRepository(UnaryOperator.identity()); + } + + void putRepository(UnaryOperator settingsOperator) throws IOException { + // Register repository + final Request request = newXContentRequest( + HttpMethod.PUT, + "/_snapshot/" + repositoryName, + (b, p) -> b.field("type", "s3") + .startObject("settings") + .value( + settingsOperator.apply( + Settings.builder().put("bucket", BUCKET).put("base_path", BASE_PATH).put("endpoint", s3Fixture.getAddress()) + ).build() + ) + .endObject() + ); + request.addParameter("verify", "false"); // because we don't have access to the blob store yet + request.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warningsHandler)); + assertOK(client().performRequest(request)); + } + + void createFrozenSearchableSnapshotIndex() throws IOException { + // Create an index, large enough that its data is not all captured in the file headers + final String indexName = randomValueOtherThan(mountedIndexName, ESTestCase::randomIdentifier); + createIndex(indexName, indexSettings(1, 0).build()); + try (var bodyStream = new ByteArrayOutputStream()) { + for (int i = 0; i < 1024; i++) { + try (XContentBuilder bodyLineBuilder = new XContentBuilder(XContentType.JSON.xContent(), bodyStream)) { + bodyLineBuilder.startObject().startObject("index").endObject().endObject(); + } + bodyStream.write(0x0a); + try (XContentBuilder bodyLineBuilder = new XContentBuilder(XContentType.JSON.xContent(), bodyStream)) { + bodyLineBuilder.startObject().field("foo", "bar").endObject(); + } + bodyStream.write(0x0a); + } + bodyStream.flush(); + final Request request = new Request("PUT", indexName + "/_bulk"); + request.setEntity(new ByteArrayEntity(bodyStream.toByteArray(), ContentType.APPLICATION_JSON)); + client().performRequest(request); + } + + // Take a snapshot and delete the original index + final String snapshotName = randomIdentifier(); + final Request createSnapshotRequest = new Request(HttpPut.METHOD_NAME, "_snapshot/" + repositoryName + '/' + snapshotName); + createSnapshotRequest.addParameter("wait_for_completion", "true"); + createSnapshotRequest.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warningsHandler)); + assertOK(client().performRequest(createSnapshotRequest)); + + deleteIndex(indexName); + + // Mount the snapshotted index as a searchable snapshot + final Request mountRequest = newXContentRequest( + HttpMethod.POST, + "/_snapshot/" + repositoryName + "/" + snapshotName + "/_mount", + (b, p) -> b.field("index", indexName).field("renamed_index", mountedIndexName) + ); + mountRequest.addParameter("wait_for_completion", "true"); + mountRequest.addParameter("storage", "shared_cache"); + assertOK(client().performRequest(mountRequest)); + ensureGreen(mountedIndexName); + } + + void ensureSearchSuccess() throws IOException { + final Request searchRequest = new Request("GET", mountedIndexName + "/_search"); + searchRequest.addParameter("size", "10000"); + assertEquals( + "bar", + ObjectPath.createFromResponse(assertOK(client().performRequest(searchRequest))).evaluate("hits.hits.0._source.foo") + ); + } + + void ensureSearchFailure() throws IOException { + assertOK(client().performRequest(new Request("POST", "/_searchable_snapshots/cache/clear"))); + final Request searchRequest = new Request("GET", mountedIndexName + "/_search"); + searchRequest.addParameter("size", "10000"); + assertThat( + expectThrows(ResponseException.class, () -> client().performRequest(searchRequest)).getMessage(), + allOf( + containsString("Bad access key"), + containsString("Status Code: 403"), + containsString("Error Code: AccessDenied"), + containsString("failed to read data from cache") + ) + ); + } + } + +} diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java index 8bb4c45e54ab3..33982536cd634 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java @@ -543,9 +543,9 @@ public Map getRecoveryStateFactories() { return Map.of(SNAPSHOT_RECOVERY_STATE_FACTORY_KEY, SearchableSnapshotRecoveryState::new); } - public static final String CACHE_FETCH_ASYNC_THREAD_POOL_NAME = "searchable_snapshots_cache_fetch_async"; + public static final String CACHE_FETCH_ASYNC_THREAD_POOL_NAME = BlobStoreRepository.SEARCHABLE_SNAPSHOTS_CACHE_FETCH_ASYNC_THREAD_NAME; public static final String CACHE_FETCH_ASYNC_THREAD_POOL_SETTING = "xpack.searchable_snapshots.cache_fetch_async_thread_pool"; - public static final String CACHE_PREWARMING_THREAD_POOL_NAME = "searchable_snapshots_cache_prewarming"; + public static final String CACHE_PREWARMING_THREAD_POOL_NAME = BlobStoreRepository.SEARCHABLE_SNAPSHOTS_CACHE_PREWARMING_THREAD_NAME; public static final String CACHE_PREWARMING_THREAD_POOL_SETTING = "xpack.searchable_snapshots.cache_prewarming_thread_pool"; public static ScalingExecutorBuilder[] executorBuilders(Settings settings) { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/BlobContainerSupplier.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/BlobContainerSupplier.java new file mode 100644 index 0000000000000..335c8e311ace6 --- /dev/null +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/BlobContainerSupplier.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.searchablesnapshots.store; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.OperationPurpose; +import org.elasticsearch.common.blobstore.support.FilterBlobContainer; +import org.elasticsearch.repositories.IndexId; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; + +import java.io.IOException; +import java.io.InputStream; +import java.util.function.Supplier; + +public class BlobContainerSupplier implements Supplier { + + private static final Logger logger = LogManager.getLogger(BlobContainerSupplier.class); + + private final Supplier repositorySupplier; + private final IndexId indexId; + private final int shardId; + + private volatile LastKnownState lastKnownState = new LastKnownState(null, null); + + public BlobContainerSupplier(Supplier repositorySupplier, IndexId indexId, int shardId) { + this.repositorySupplier = repositorySupplier; + this.indexId = indexId; + this.shardId = shardId; + } + + @Override + public BlobContainer get() { + final LastKnownState lastKnownState = this.lastKnownState; + final BlobStoreRepository currentRepository = repositorySupplier.get(); + + if (lastKnownState.blobStoreRepository() == currentRepository) { + return lastKnownState.blobContainer(); + } else { + return refreshAndGet(); + } + } + + private synchronized BlobContainer refreshAndGet() { + final BlobStoreRepository currentRepository = repositorySupplier.get(); + if (lastKnownState.blobStoreRepository() == currentRepository) { + return lastKnownState.blobContainer(); + } else { + logger.debug("creating new blob container [{}][{}][{}]", currentRepository.getMetadata().name(), indexId, shardId); + final BlobContainer newContainer = new RateLimitingBlobContainer( + currentRepository, + currentRepository.shardContainer(indexId, shardId) + ); + lastKnownState = new LastKnownState(currentRepository, newContainer); + return newContainer; + } + } + + private record LastKnownState(BlobStoreRepository blobStoreRepository, BlobContainer blobContainer) {} + + /** + * A {@link FilterBlobContainer} that uses {@link BlobStoreRepository#maybeRateLimitRestores(InputStream)} to limit the rate at which + * blobs are read from the repository. + */ + private static class RateLimitingBlobContainer extends FilterBlobContainer { + + private final BlobStoreRepository blobStoreRepository; + + RateLimitingBlobContainer(BlobStoreRepository blobStoreRepository, BlobContainer blobContainer) { + super(blobContainer); + this.blobStoreRepository = blobStoreRepository; + } + + @Override + protected BlobContainer wrapChild(BlobContainer child) { + return new RateLimitingBlobContainer(blobStoreRepository, child); + } + + @Override + public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { + return blobStoreRepository.maybeRateLimitRestores(super.readBlob(purpose, blobName)); + } + + @Override + public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { + return blobStoreRepository.maybeRateLimitRestores(super.readBlob(purpose, blobName, position, length)); + } + } +} diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/RepositorySupplier.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/RepositorySupplier.java new file mode 100644 index 0000000000000..63522ce2309a1 --- /dev/null +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/RepositorySupplier.java @@ -0,0 +1,83 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.searchablesnapshots.store; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.RepositoryMissingException; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots; + +import java.util.Map; +import java.util.Objects; +import java.util.function.Supplier; + +public class RepositorySupplier implements Supplier { + + private static final Logger logger = LogManager.getLogger(BlobContainerSupplier.class); + + private final RepositoriesService repositoriesService; + + private final String repositoryName; + + @Nullable // if repository specified only by name + private final String repositoryUuid; + + private volatile String repositoryNameHint; + + public RepositorySupplier(RepositoriesService repositoriesService, String repositoryName, String repositoryUuid) { + this.repositoriesService = Objects.requireNonNull(repositoriesService); + this.repositoryName = Objects.requireNonNull(repositoryName); + this.repositoryUuid = repositoryUuid; + this.repositoryNameHint = repositoryName; + } + + @Override + public BlobStoreRepository get() { + return SearchableSnapshots.getSearchableRepository(getRepository()); + } + + private Repository getRepository() { + if (repositoryUuid == null) { + // repository containing pre-7.12 snapshots has no UUID so we assume it matches by name + final Repository repository = repositoriesService.repository(repositoryName); + assert repository.getMetadata().name().equals(repositoryName) : repository.getMetadata().name() + " vs " + repositoryName; + return repository; + } + + final Map repositoriesByName = repositoriesService.getRepositories(); + + final String currentRepositoryNameHint = repositoryNameHint; + final Repository repositoryByLastKnownName = repositoriesByName.get(currentRepositoryNameHint); + if (repositoryByLastKnownName != null) { + final var foundRepositoryUuid = repositoryByLastKnownName.getMetadata().uuid(); + if (Objects.equals(repositoryUuid, foundRepositoryUuid)) { + return repositoryByLastKnownName; + } + } + + for (final Repository repository : repositoriesByName.values()) { + if (repository.getMetadata().uuid().equals(repositoryUuid)) { + final var newRepositoryName = repository.getMetadata().name(); + logger.debug( + "getRepository: repository [{}] with uuid [{}] replacing repository [{}]", + newRepositoryName, + repositoryUuid, + currentRepositoryNameHint + ); + repositoryNameHint = repository.getMetadata().name(); + return repository; + } + } + + throw new RepositoryMissingException("uuid [" + repositoryUuid + "], original name [" + repositoryName + "]"); + } +} diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java index b56cd28e9dc6c..bbdf371e1ed7b 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectory.java @@ -24,8 +24,6 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.common.blobstore.BlobContainer; -import org.elasticsearch.common.blobstore.OperationPurpose; -import org.elasticsearch.common.blobstore.support.FilterBlobContainer; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; import org.elasticsearch.common.settings.Settings; @@ -43,8 +41,6 @@ import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.repositories.Repository; -import org.elasticsearch.repositories.RepositoryMissingException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.threadpool.ThreadPool; @@ -62,7 +58,6 @@ import java.io.FileNotFoundException; import java.io.IOException; -import java.io.InputStream; import java.io.UncheckedIOException; import java.nio.file.Files; import java.nio.file.Path; @@ -134,7 +129,6 @@ public class SearchableSnapshotDirectory extends BaseDirectory { // volatile fields are updated once under `this` lock, all together, iff loaded is not true. private volatile BlobStoreIndexShardSnapshot snapshot; - private volatile BlobContainer blobContainer; private volatile boolean loaded; private volatile SearchableSnapshotRecoveryState recoveryState; @@ -182,7 +176,6 @@ public SearchableSnapshotDirectory( private synchronized boolean invariant() { assert loaded != (snapshot == null); - assert loaded != (blobContainer == null); assert loaded != (recoveryState == null); return true; } @@ -212,7 +205,6 @@ public boolean loadSnapshot( synchronized (this) { alreadyLoaded = this.loaded; if (alreadyLoaded == false) { - this.blobContainer = blobContainerSupplier.get(); this.snapshot = snapshotSupplier.get(); this.loaded = true; cleanExistingRegularShardFiles(); @@ -226,14 +218,12 @@ public boolean loadSnapshot( return alreadyLoaded == false; } - @Nullable public BlobContainer blobContainer() { - final BlobContainer blobContainer = this.blobContainer; + final BlobContainer blobContainer = blobContainerSupplier.get(); assert blobContainer != null; return blobContainer; } - @Nullable public BlobStoreIndexShardSnapshot snapshot() { final BlobStoreIndexShardSnapshot snapshot = this.snapshot; assert snapshot != null; @@ -590,23 +580,15 @@ public static Directory create( ); } - Repository repository; - final String repositoryName; - if (SNAPSHOT_REPOSITORY_UUID_SETTING.exists(indexSettings.getSettings())) { - repository = repositoryByUuid( - repositories.getRepositories(), - SNAPSHOT_REPOSITORY_UUID_SETTING.get(indexSettings.getSettings()), - SNAPSHOT_REPOSITORY_NAME_SETTING.get(indexSettings.getSettings()) - ); - repositoryName = repository.getMetadata().name(); - } else { - // repository containing pre-7.12 snapshots has no UUID so we assume it matches by name - repositoryName = SNAPSHOT_REPOSITORY_NAME_SETTING.get(indexSettings.getSettings()); - repository = repositories.repository(repositoryName); - assert repository.getMetadata().name().equals(repositoryName) : repository.getMetadata().name() + " vs " + repositoryName; - } + final Supplier repositorySupplier = new RepositorySupplier( + repositories, + SNAPSHOT_REPOSITORY_NAME_SETTING.get(indexSettings.getSettings()), + SNAPSHOT_REPOSITORY_UUID_SETTING.exists(indexSettings.getSettings()) + ? SNAPSHOT_REPOSITORY_UUID_SETTING.get(indexSettings.getSettings()) + : null + ); - final BlobStoreRepository blobStoreRepository = SearchableSnapshots.getSearchableRepository(repository); + final BlobStoreRepository initialRepository = repositorySupplier.get(); final IndexId indexId = new IndexId( SNAPSHOT_INDEX_NAME_SETTING.get(indexSettings.getSettings()), @@ -617,14 +599,14 @@ public static Directory create( SNAPSHOT_SNAPSHOT_ID_SETTING.get(indexSettings.getSettings()) ); - final LazyInitializable lazyBlobContainer = new LazyInitializable<>( - () -> new RateLimitingBlobContainer( - blobStoreRepository, - blobStoreRepository.shardContainer(indexId, shardPath.getShardId().id()) - ) + final Supplier blobContainerSupplier = new BlobContainerSupplier( + repositorySupplier, + indexId, + shardPath.getShardId().id() ); + final LazyInitializable lazySnapshot = new LazyInitializable<>( - () -> blobStoreRepository.loadShardSnapshot(lazyBlobContainer.getOrCompute(), snapshotId) + () -> repositorySupplier.get().loadShardSnapshot(blobContainerSupplier.get(), snapshotId) ); final Path cacheDir = CacheService.getShardCachePath(shardPath).resolve(snapshotId.getUUID()); @@ -632,10 +614,10 @@ public static Directory create( return new InMemoryNoOpCommitDirectory( new SearchableSnapshotDirectory( - lazyBlobContainer::getOrCompute, + blobContainerSupplier, lazySnapshot::getOrCompute, blobStoreCacheService, - repositoryName, + initialRepository.getMetadata().name(), snapshotId, indexId, shardPath.getShardId(), @@ -690,42 +672,4 @@ public void putCachedBlob(String name, ByteRange range, BytesReference content, public SharedBlobCacheService.CacheFile getFrozenCacheFile(String fileName, long length) { return sharedBlobCacheService.getCacheFile(createCacheKey(fileName), length); } - - private static Repository repositoryByUuid(Map repositories, String repositoryUuid, String originalName) { - for (Repository repository : repositories.values()) { - if (repository.getMetadata().uuid().equals(repositoryUuid)) { - return repository; - } - } - throw new RepositoryMissingException("uuid [" + repositoryUuid + "], original name [" + originalName + "]"); - } - - /** - * A {@link FilterBlobContainer} that uses {@link BlobStoreRepository#maybeRateLimitRestores(InputStream)} to limit the rate at which - * blobs are read from the repository. - */ - private static class RateLimitingBlobContainer extends FilterBlobContainer { - - private final BlobStoreRepository blobStoreRepository; - - RateLimitingBlobContainer(BlobStoreRepository blobStoreRepository, BlobContainer blobContainer) { - super(blobContainer); - this.blobStoreRepository = blobStoreRepository; - } - - @Override - protected BlobContainer wrapChild(BlobContainer child) { - return new RateLimitingBlobContainer(blobStoreRepository, child); - } - - @Override - public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { - return blobStoreRepository.maybeRateLimitRestores(super.readBlob(purpose, blobName)); - } - - @Override - public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { - return blobStoreRepository.maybeRateLimitRestores(super.readBlob(purpose, blobName, position, length)); - } - } } From efc3ba9958e3bd11265d062115d80c9ca0b1f7bf Mon Sep 17 00:00:00 2001 From: Jason Tu Date: Mon, 18 Nov 2024 10:17:17 -0800 Subject: [PATCH 021/386] Update indexing-speed.asciidoc (#116559) --- docs/reference/how-to/indexing-speed.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/how-to/indexing-speed.asciidoc b/docs/reference/how-to/indexing-speed.asciidoc index 12de469c68449..d4cdb85e4d624 100644 --- a/docs/reference/how-to/indexing-speed.asciidoc +++ b/docs/reference/how-to/indexing-speed.asciidoc @@ -112,7 +112,7 @@ different nodes so there's redundancy for any node failures. You can also use insurance. [discrete] -==== Local vs.remote storage +==== Local vs. remote storage include::./remote-storage.asciidoc[] From 9bece7e2903610b4e565cdc54dba475d46996802 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 18 Nov 2024 10:34:51 -0800 Subject: [PATCH 022/386] Upgrade the build's ASM to 9.7.1 (#116963) Primarily this allows forbidden apis to read java 23 class files. --- gradle/build.versions.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gradle/build.versions.toml b/gradle/build.versions.toml index e3148c6f3ef2e..1bdd93e3a7470 100644 --- a/gradle/build.versions.toml +++ b/gradle/build.versions.toml @@ -1,5 +1,5 @@ [versions] -asm = "9.6" +asm = "9.7.1" jackson = "2.15.0" junit5 = "5.8.1" spock = "2.1-groovy-3.0" From 29659fdc814c772b44234610dbd365dfd458075b Mon Sep 17 00:00:00 2001 From: Mike Pellegrini Date: Mon, 18 Nov 2024 13:55:45 -0500 Subject: [PATCH 023/386] Update Semantic Query To Handle Zero Size Responses (#116277) --- docs/changelog/116277.yaml | 6 + .../xpack/inference/InferenceFeatures.java | 3 +- .../mapper/SemanticTextFieldMapper.java | 12 +- .../queries/SemanticQueryBuilder.java | 2 +- .../test/inference/40_semantic_text_query.yml | 114 ++++++++++++++++++ 5 files changed, 133 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/116277.yaml diff --git a/docs/changelog/116277.yaml b/docs/changelog/116277.yaml new file mode 100644 index 0000000000000..62262b7797783 --- /dev/null +++ b/docs/changelog/116277.yaml @@ -0,0 +1,6 @@ +pr: 116277 +summary: Update Semantic Query To Handle Zero Size Responses +area: Vector Search +type: bug +issues: + - 116083 diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index d9d1a87e714a3..f70e7f367127d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -38,7 +38,8 @@ public Set getTestFeatures() { return Set.of( SemanticTextFieldMapper.SEMANTIC_TEXT_IN_OBJECT_FIELD_FIX, SemanticTextFieldMapper.SEMANTIC_TEXT_SINGLE_FIELD_UPDATE_FIX, - SemanticTextFieldMapper.SEMANTIC_TEXT_DELETE_FIX + SemanticTextFieldMapper.SEMANTIC_TEXT_DELETE_FIX, + SemanticTextFieldMapper.SEMANTIC_TEXT_ZERO_SIZE_FIX ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index 2a9fcfed49d2f..89a54ffe29177 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -69,6 +69,7 @@ import java.util.Set; import java.util.function.Function; +import static org.elasticsearch.search.SearchService.DEFAULT_SIZE; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKED_EMBEDDINGS_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKED_TEXT_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKS_FIELD; @@ -91,6 +92,7 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie public static final NodeFeature SEMANTIC_TEXT_IN_OBJECT_FIELD_FIX = new NodeFeature("semantic_text.in_object_field_fix"); public static final NodeFeature SEMANTIC_TEXT_SINGLE_FIELD_UPDATE_FIX = new NodeFeature("semantic_text.single_field_update_fix"); public static final NodeFeature SEMANTIC_TEXT_DELETE_FIX = new NodeFeature("semantic_text.delete_fix"); + public static final NodeFeature SEMANTIC_TEXT_ZERO_SIZE_FIX = new NodeFeature("semantic_text.zero_size_fix"); public static final String CONTENT_TYPE = "semantic_text"; public static final String DEFAULT_ELSER_2_INFERENCE_ID = DEFAULT_ELSER_ID; @@ -507,7 +509,7 @@ public boolean fieldHasValue(FieldInfos fieldInfos) { return fieldInfos.fieldInfo(getEmbeddingsFieldName(name())) != null; } - public QueryBuilder semanticQuery(InferenceResults inferenceResults, float boost, String queryName) { + public QueryBuilder semanticQuery(InferenceResults inferenceResults, Integer requestSize, float boost, String queryName) { String nestedFieldPath = getChunksFieldName(name()); String inferenceResultsFieldName = getEmbeddingsFieldName(name()); QueryBuilder childQueryBuilder; @@ -551,7 +553,13 @@ public QueryBuilder semanticQuery(InferenceResults inferenceResults, float boost ); } - yield new KnnVectorQueryBuilder(inferenceResultsFieldName, inference, null, null, null); + Integer k = requestSize; + if (k != null) { + // Ensure that k is at least the default size so that aggregations work when size is set to 0 in the request + k = Math.max(k, DEFAULT_SIZE); + } + + yield new KnnVectorQueryBuilder(inferenceResultsFieldName, inference, k, null, null); } default -> throw new IllegalStateException( "Field [" diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java index 478f2e6a21868..d648db2fbfdbc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java @@ -170,7 +170,7 @@ private QueryBuilder doRewriteBuildSemanticQuery(SearchExecutionContext searchEx ); } - return semanticTextFieldType.semanticQuery(inferenceResults, boost(), queryName()); + return semanticTextFieldType.semanticQuery(inferenceResults, searchExecutionContext.requestSize(), boost(), queryName()); } else { throw new IllegalArgumentException( "Field [" + fieldName + "] of type [" + fieldType.typeName() + "] does not support " + NAME + " queries" diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml index d28fce3be0d87..c2704a4c22914 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml @@ -878,3 +878,117 @@ setup: - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "doc_1" } + +--- +"Query using a sparse embedding model with size set to zero": + - requires: + cluster_features: "semantic_text.zero_size_fix" + reason: zero size fix added in 8.16.1 & 8.15.5 + + - do: + indices.create: + index: test-sparse-index-with-agg-id + body: + mappings: + properties: + inference_field: + type: semantic_text + inference_id: sparse-inference-id + non_inference_field: + type: text + agg_id: + type: keyword + + - do: + index: + index: test-sparse-index-with-agg-id + id: doc_1 + body: + inference_field: "inference test" + agg_id: "doc_1" + + - do: + index: + index: test-sparse-index-with-agg-id + id: doc_2 + body: + non_inference_field: "non-inference test" + agg_id: "doc_2" + refresh: true + + - do: + search: + index: test-sparse-index-with-agg-id + body: + size: 0 + query: + semantic: + field: "inference_field" + query: "inference test" + aggs: + agg_ids: + terms: + field: agg_id + + - match: { hits.total.value: 1 } + - length: { hits.hits: 0 } + - length: { aggregations.agg_ids.buckets: 1 } + - match: { aggregations.agg_ids.buckets.0.key: "doc_1" } + - match: { aggregations.agg_ids.buckets.0.doc_count: 1 } + +--- +"Query using a dense embedding model with size set to zero": + - requires: + cluster_features: "semantic_text.zero_size_fix" + reason: zero size fix added in 8.16.1 & 8.15.5 + + - do: + indices.create: + index: test-dense-index-with-agg-id + body: + mappings: + properties: + inference_field: + type: semantic_text + inference_id: dense-inference-id + non_inference_field: + type: text + agg_id: + type: keyword + + - do: + index: + index: test-dense-index-with-agg-id + id: doc_1 + body: + inference_field: "inference test" + agg_id: "doc_1" + + - do: + index: + index: test-dense-index-with-agg-id + id: doc_2 + body: + non_inference_field: "non-inference test" + agg_id: "doc_2" + refresh: true + + - do: + search: + index: test-dense-index-with-agg-id + body: + size: 0 + query: + semantic: + field: "inference_field" + query: "inference test" + aggs: + agg_ids: + terms: + field: agg_id + + - match: { hits.total.value: 1 } + - length: { hits.hits: 0 } + - length: { aggregations.agg_ids.buckets: 1 } + - match: { aggregations.agg_ids.buckets.0.key: "doc_1" } + - match: { aggregations.agg_ids.buckets.0.doc_count: 1 } From 7003d7080c27556a524775c88029b7b9c014c253 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 19 Nov 2024 08:23:12 +1100 Subject: [PATCH 024/386] Mute org.elasticsearch.xpack.restart.QueryBuilderBWCIT testQueryBuilderBWC {p0=UPGRADED} #116989 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 8696a1bddd3d4..231be53b78486 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -242,6 +242,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/116879 - class: org.elasticsearch.xpack.inference.InferenceRestIT issue: https://github.com/elastic/elasticsearch/issues/116899 +- class: org.elasticsearch.xpack.restart.QueryBuilderBWCIT + method: testQueryBuilderBWC {p0=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/116989 # Examples: # From fd0cdf09a438829e9947b2f07e663f599e2e02cc Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 19 Nov 2024 08:23:33 +1100 Subject: [PATCH 025/386] Mute org.elasticsearch.upgrades.QueryBuilderBWCIT testQueryBuilderBWC {cluster=UPGRADED} #116990 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 231be53b78486..2b3c2a64d5ab5 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -245,6 +245,9 @@ tests: - class: org.elasticsearch.xpack.restart.QueryBuilderBWCIT method: testQueryBuilderBWC {p0=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/116989 +- class: org.elasticsearch.upgrades.QueryBuilderBWCIT + method: testQueryBuilderBWC {cluster=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/116990 # Examples: # From 46fa424e5853974352a526fb03f0939adc913419 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 18 Nov 2024 22:40:24 +0000 Subject: [PATCH 026/386] [ML] Remove scale from 0 feature flag (#116936) Co-authored-by: Elastic Machine --- .../test/cluster/FeatureFlag.java | 3 +-- .../TransportInternalInferModelAction.java | 13 +++++------- .../ScaleFromZeroFeatureFlag.java | 20 ------------------- 3 files changed, 6 insertions(+), 30 deletions(-) delete mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleFromZeroFeatureFlag.java diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index 22449ca763d09..11787866af0d7 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -18,8 +18,7 @@ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null), - SUB_OBJECTS_AUTO_ENABLED("es.sub_objects_auto_feature_flag_enabled=true", Version.fromString("8.16.0"), null), - ML_SCALE_FROM_ZERO("es.ml_scale_from_zero_feature_flag_enabled=true", Version.fromString("8.16.0"), null); + SUB_OBJECTS_AUTO_ENABLED("es.sub_objects_auto_feature_flag_enabled=true", Version.fromString("8.16.0"), null); public final String systemProperty; public final Version from; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java index ba4483493da1d..e0405b1749536 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java @@ -45,7 +45,6 @@ import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.inference.InferenceWaitForAllocation; import org.elasticsearch.xpack.ml.inference.adaptiveallocations.AdaptiveAllocationsScalerService; -import org.elasticsearch.xpack.ml.inference.adaptiveallocations.ScaleFromZeroFeatureFlag; import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentService; import org.elasticsearch.xpack.ml.inference.loadingservice.LocalModel; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; @@ -277,13 +276,11 @@ private void inferAgainstAllocatedModel( boolean starting = adaptiveAllocationsScalerService.maybeStartAllocation(assignment); if (starting) { message += "; starting deployment of one allocation"; - - if (ScaleFromZeroFeatureFlag.isEnabled()) { - waitForAllocation.waitForAssignment( - new InferenceWaitForAllocation.WaitingRequest(request, responseBuilder, parentTaskId, listener) - ); - return; - } + logger.debug(message); + waitForAllocation.waitForAssignment( + new InferenceWaitForAllocation.WaitingRequest(request, responseBuilder, parentTaskId, listener) + ); + return; } logger.debug(message); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleFromZeroFeatureFlag.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleFromZeroFeatureFlag.java deleted file mode 100644 index 4c446b65db9dd..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleFromZeroFeatureFlag.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.inference.adaptiveallocations; - -import org.elasticsearch.common.util.FeatureFlag; - -public class ScaleFromZeroFeatureFlag { - private ScaleFromZeroFeatureFlag() {} - - private static final FeatureFlag FEATURE_FLAG = new FeatureFlag("ml_scale_from_zero"); - - public static boolean isEnabled() { - return FEATURE_FLAG.isEnabled(); - } -} From f7b6e20ce35699ad2479d5d7e58c3c01a386d673 Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Mon, 18 Nov 2024 17:59:47 -0500 Subject: [PATCH 027/386] [Inference API] Add special case to inference API (#116962) * Add reranker special case to inference API * Update docs/changelog/116962.yaml * Update 116962.yaml * spotless * improvements from review * Fix typo --- docs/changelog/116962.yaml | 5 ++ .../InferenceNamedWriteablesProvider.java | 9 ++- .../BaseElasticsearchInternalService.java | 2 + .../elasticsearch/ElasticRerankerModel.java | 60 ++++++++++++++++++ .../ElasticRerankerServiceSettings.java | 62 +++++++++++++++++++ .../ElasticsearchInternalService.java | 29 +++++++++ 6 files changed, 166 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/116962.yaml create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerModel.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java diff --git a/docs/changelog/116962.yaml b/docs/changelog/116962.yaml new file mode 100644 index 0000000000000..8f16b00e3f9fc --- /dev/null +++ b/docs/changelog/116962.yaml @@ -0,0 +1,5 @@ +pr: 116962 +summary: "Add special case for elastic reranker in inference API" +area: Machine Learning +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index 02bddb6076d69..2320cca8295d1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -63,6 +63,7 @@ import org.elasticsearch.xpack.inference.services.elasticsearch.CustomElandInternalServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.CustomElandInternalTextEmbeddingServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.CustomElandRerankTaskSettings; +import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticRerankerServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.ElserInternalServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.ElserMlNodeTaskSettings; @@ -415,7 +416,13 @@ private static void addInternalNamedWriteables(List namedWriteables) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java index fc070965f29c2..f743b94df3810 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java @@ -156,6 +156,8 @@ protected void putModel(Model model, ActionListener listener) { putBuiltInModel(e5Model.getServiceSettings().modelId(), listener); } else if (model instanceof ElserInternalModel elserModel) { putBuiltInModel(elserModel.getServiceSettings().modelId(), listener); + } else if (model instanceof ElasticRerankerModel elasticRerankerModel) { + putBuiltInModel(elasticRerankerModel.getServiceSettings().modelId(), listener); } else if (model instanceof CustomElandModel) { logger.info("Custom eland model detected, model must have been already loaded into the cluster with eland."); listener.onResponse(Boolean.TRUE); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerModel.java new file mode 100644 index 0000000000000..115cc9f05599a --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerModel.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elasticsearch; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.ChunkingSettings; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +public class ElasticRerankerModel extends ElasticsearchInternalModel { + + public ElasticRerankerModel( + String inferenceEntityId, + TaskType taskType, + String service, + ElasticRerankerServiceSettings serviceSettings, + ChunkingSettings chunkingSettings + ) { + super(inferenceEntityId, taskType, service, serviceSettings, chunkingSettings); + } + + @Override + public ElasticRerankerServiceSettings getServiceSettings() { + return (ElasticRerankerServiceSettings) super.getServiceSettings(); + } + + @Override + public ActionListener getCreateTrainedModelAssignmentActionListener( + Model model, + ActionListener listener + ) { + + return new ActionListener<>() { + @Override + public void onResponse(CreateTrainedModelAssignmentAction.Response response) { + listener.onResponse(Boolean.TRUE); + } + + @Override + public void onFailure(Exception e) { + if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { + listener.onFailure( + new ResourceNotFoundException("Could not start the Elastic Reranker Endpoint due to [{}]", e, e.getMessage()) + ); + return; + } + listener.onFailure(e); + } + }; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java new file mode 100644 index 0000000000000..316dc092e03c7 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elasticsearch; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; + +import java.io.IOException; +import java.util.Map; + +public class ElasticRerankerServiceSettings extends ElasticsearchInternalServiceSettings { + + public static final String NAME = "elastic_reranker_service_settings"; + + public ElasticRerankerServiceSettings(ElasticsearchInternalServiceSettings other) { + super(other); + } + + public ElasticRerankerServiceSettings( + Integer numAllocations, + int numThreads, + String modelId, + AdaptiveAllocationsSettings adaptiveAllocationsSettings + ) { + super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings); + } + + public ElasticRerankerServiceSettings(StreamInput in) throws IOException { + super(in); + } + + /** + * Parse the ElasticRerankerServiceSettings from map and validate the setting values. + * + * If required setting are missing or the values are invalid an + * {@link ValidationException} is thrown. + * + * @param map Source map containing the config + * @return The builder + */ + public static Builder fromRequestMap(Map map) { + ValidationException validationException = new ValidationException(); + var baseSettings = ElasticsearchInternalServiceSettings.fromMap(map, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return baseSettings; + } + + @Override + public String getWriteableName() { + return ElasticRerankerServiceSettings.NAME; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index fe83acc8574aa..718aeae979fe9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -97,6 +97,8 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86 ); + public static final String RERANKER_ID = ".rerank-v1"; + public static final int EMBEDDING_MAX_BATCH_SIZE = 10; public static final String DEFAULT_ELSER_ID = ".elser-2-elasticsearch"; public static final String DEFAULT_E5_ID = ".multilingual-e5-small-elasticsearch"; @@ -223,6 +225,8 @@ public void parseRequestConfig( ) ) ); + } else if (RERANKER_ID.equals(modelId)) { + rerankerCase(inferenceEntityId, taskType, config, serviceSettingsMap, chunkingSettings, modelListener); } else { customElandCase(inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, chunkingSettings, modelListener); } @@ -323,6 +327,31 @@ private static CustomElandInternalServiceSettings elandServiceSettings( }; } + private void rerankerCase( + String inferenceEntityId, + TaskType taskType, + Map config, + Map serviceSettingsMap, + ChunkingSettings chunkingSettings, + ActionListener modelListener + ) { + + var esServiceSettingsBuilder = ElasticsearchInternalServiceSettings.fromRequestMap(serviceSettingsMap); + + throwIfNotEmptyMap(config, name()); + throwIfNotEmptyMap(serviceSettingsMap, name()); + + modelListener.onResponse( + new ElasticRerankerModel( + inferenceEntityId, + taskType, + NAME, + new ElasticRerankerServiceSettings(esServiceSettingsBuilder.build()), + chunkingSettings + ) + ); + } + private void e5Case( String inferenceEntityId, TaskType taskType, From 25f13c9740768100a5542f20dbb93eec8e8fa766 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Mon, 18 Nov 2024 15:04:56 -0800 Subject: [PATCH 028/386] Update BwcVersionsSpec to use current version numbers (#116991) --- .../gradle/internal/BwcVersionsSpec.groovy | 271 ++++++++---------- 1 file changed, 120 insertions(+), 151 deletions(-) diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy index c5b7a44a19d31..9c7d20d84a670 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy @@ -17,232 +17,201 @@ import org.elasticsearch.gradle.internal.BwcVersions.UnreleasedVersionInfo class BwcVersionsSpec extends Specification { List versionLines = [] - def "current version is next major with last minor staged"() { - given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.16.2', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('8.0.0', '9.0.0') - addVersion('8.1.0', '9.0.0') - - when: - def bwc = new BwcVersions(versionLines, v('8.1.0')) - def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } - - then: - unreleased == [ - (v('7.16.2')): new UnreleasedVersionInfo(v('7.16.2'), '7.16', ':distribution:bwc:bugfix'), - (v('7.17.0')): new UnreleasedVersionInfo(v('7.17.0'), '7.17', ':distribution:bwc:staged'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), '8.x', ':distribution:bwc:minor'), - (v('8.1.0')): new UnreleasedVersionInfo(v('8.1.0'), 'main', ':distribution') - ] - bwc.wireCompatible == [v('7.17.0'), v('8.0.0'), v('8.1.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.16.2'), v('7.17.0'), v('8.0.0'), v('8.1.0')] - bwc.minimumWireCompatibleVersion == v('7.17.0') - } - def "current version is next minor with next major and last minor both staged"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('8.0.0', '9.0.0') - addVersion('8.1.0', '9.1.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.1.0', '10.1.0') when: - def bwc = new BwcVersions(versionLines, v('8.1.0')) + def bwc = new BwcVersions(versionLines, v('9.1.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.16.1')): new UnreleasedVersionInfo(v('7.16.1'), '7.16', ':distribution:bwc:bugfix'), - (v('7.17.0')): new UnreleasedVersionInfo(v('7.17.0'), '7.17', ':distribution:bwc:staged'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), '8.x', ':distribution:bwc:minor'), - (v('8.1.0')): new UnreleasedVersionInfo(v('8.1.0'), 'main', ':distribution') + (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix'), + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.17', ':distribution:bwc:staged'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), '9.x', ':distribution:bwc:minor'), + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution') ] - bwc.wireCompatible == [v('7.17.0'), v('8.0.0'), v('8.1.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('8.0.0'), v('8.1.0')] + bwc.wireCompatible == [v('8.17.0'), v('9.0.0'), v('9.1.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('9.0.0'), v('9.1.0')] } def "current is next minor with upcoming minor staged"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('7.17.1', '8.10.0') - addVersion('8.0.0', '9.0.0') - addVersion('8.1.0', '9.1.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.17.1', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.1.0', '10.1.0') when: - def bwc = new BwcVersions(versionLines, v('8.1.0')) + def bwc = new BwcVersions(versionLines, v('9.1.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:bugfix'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), '8.0', ':distribution:bwc:staged'), - (v('8.1.0')): new UnreleasedVersionInfo(v('8.1.0'), 'main', ':distribution') + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:bugfix'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), '9.0', ':distribution:bwc:staged'), + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution') ] - bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.1.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.1.0')] + bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.1.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.1.0')] } def "current version is staged major"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('7.17.1', '8.10.0') - addVersion('8.0.0', '9.0.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.17.1', '9.10.0') + addVersion('9.0.0', '10.0.0') when: - def bwc = new BwcVersions(versionLines, v('8.0.0')) + def bwc = new BwcVersions(versionLines, v('9.0.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:bugfix'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), 'main', ':distribution'), + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:bugfix'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.0.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0')] + bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0')] } def "current version is major with unreleased next minor"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('8.0.0', '9.0.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('9.0.0', '10.0.0') when: - def bwc = new BwcVersions(versionLines, v('8.0.0')) + def bwc = new BwcVersions(versionLines, v('9.0.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.16.1')): new UnreleasedVersionInfo(v('7.16.1'), '7.16', ':distribution:bwc:bugfix'), - (v('7.17.0')): new UnreleasedVersionInfo(v('7.17.0'), '7.x', ':distribution:bwc:minor'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), 'main', ':distribution'), + (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix'), + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.x', ':distribution:bwc:minor'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('7.17.0'), v('8.0.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('8.0.0')] + bwc.wireCompatible == [v('8.17.0'), v('9.0.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('9.0.0')] } def "current version is major with staged next minor"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('8.0.0', '9.0.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('9.0.0', '10.0.0') when: - def bwc = new BwcVersions(versionLines, v('8.0.0')) + def bwc = new BwcVersions(versionLines, v('9.0.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.15.2')): new UnreleasedVersionInfo(v('7.15.2'), '7.15', ':distribution:bwc:bugfix'), - (v('7.16.0')): new UnreleasedVersionInfo(v('7.16.0'), '7.16', ':distribution:bwc:staged'), - (v('7.17.0')): new UnreleasedVersionInfo(v('7.17.0'), '7.x', ':distribution:bwc:minor'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), 'main', ':distribution'), + (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:bugfix'), + (v('8.16.0')): new UnreleasedVersionInfo(v('8.16.0'), '8.16', ':distribution:bwc:staged'), + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.x', ':distribution:bwc:minor'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('7.17.0'), v('8.0.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.17.0'), v('8.0.0')] + bwc.wireCompatible == [v('8.17.0'), v('9.0.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.17.0'), v('9.0.0')] } def "current version is next bugfix"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('7.17.1', '8.10.0') - addVersion('8.0.0', '9.0.0') - addVersion('8.0.1', '9.0.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.17.1', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.0.1', '10.0.0') when: - def bwc = new BwcVersions(versionLines, v('8.0.1')) + def bwc = new BwcVersions(versionLines, v('9.0.1')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:maintenance'), - (v('8.0.1')): new UnreleasedVersionInfo(v('8.0.1'), 'main', ':distribution'), + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:maintenance'), + (v('9.0.1')): new UnreleasedVersionInfo(v('9.0.1'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1')] + bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1')] } def "current version is next minor with no staged releases"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('7.17.1', '8.10.0') - addVersion('8.0.0', '9.0.0') - addVersion('8.0.1', '9.0.0') - addVersion('8.1.0', '9.1.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.17.1', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.0.1', '10.0.0') + addVersion('9.1.0', '10.1.0') when: - def bwc = new BwcVersions(versionLines, v('8.1.0')) + def bwc = new BwcVersions(versionLines, v('9.1.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:maintenance'), - (v('8.0.1')): new UnreleasedVersionInfo(v('8.0.1'), '8.0', ':distribution:bwc:bugfix'), - (v('8.1.0')): new UnreleasedVersionInfo(v('8.1.0'), 'main', ':distribution') + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:maintenance'), + (v('9.0.1')): new UnreleasedVersionInfo(v('9.0.1'), '9.0', ':distribution:bwc:bugfix'), + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution') ] - bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1'), v('8.1.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1'), v('8.1.0')] + bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1'), v('9.1.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1'), v('9.1.0')] } private void addVersion(String elasticsearch, String lucene) { From db63a281616c004b3e2e8fcb2f9bcd19f91c0337 Mon Sep 17 00:00:00 2001 From: Kostiantyn Masliuk <1pkg@protonmail.com> Date: Mon, 18 Nov 2024 18:30:00 -0800 Subject: [PATCH 029/386] [Apm-data]: disable date_detection for all apm data streams (#116995) --- docs/changelog/116995.yaml | 5 +++++ .../src/main/resources/component-templates/apm@mappings.yaml | 1 + 2 files changed, 6 insertions(+) create mode 100644 docs/changelog/116995.yaml diff --git a/docs/changelog/116995.yaml b/docs/changelog/116995.yaml new file mode 100644 index 0000000000000..a0467c630edf3 --- /dev/null +++ b/docs/changelog/116995.yaml @@ -0,0 +1,5 @@ +pr: 116995 +summary: "Apm-data: disable date_detection for all apm data streams" +area: Data streams +type: enhancement +issues: [] \ No newline at end of file diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@mappings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@mappings.yaml index ac6462c86676c..a5a3a7433f4c1 100644 --- a/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@mappings.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@mappings.yaml @@ -4,6 +4,7 @@ _meta: managed: true template: mappings: + date_detection: false dynamic: true dynamic_templates: - numeric_labels: From 673b24fd5fec1c07e0a3b0ede73c1fb4d120ec24 Mon Sep 17 00:00:00 2001 From: Iraklis Psaroudakis Date: Tue, 19 Nov 2024 09:59:52 +0200 Subject: [PATCH 030/386] Fast refresh indices to use search shards (#116658) The changes of PR #115019 were reverted because it induced ES-8275. Now that the ticket is done, this PR re-introduces the reverted changes. Fast refresh indices should now behave like non fast refresh indices in how they execute (m)gets and searches. I.e., they should use the search shards. For BWC, we define a new transport version. We expect search shards to be upgraded first, before promotable shards. Until the cluster is fully upgraded, the promotable shards (whether upgraded or not) will still receive and execute gets/searches locally. Relates ES-9573 --- .../org/elasticsearch/TransportVersions.java | 1 + .../refresh/TransportShardRefreshAction.java | 32 +++++++----------- ...ansportUnpromotableShardRefreshAction.java | 15 +++++++++ .../action/get/TransportGetAction.java | 4 +-- .../get/TransportShardMultiGetAction.java | 4 +-- .../support/replication/PostWriteRefresh.java | 9 ++--- .../cluster/routing/OperationRouting.java | 9 ++++- .../index/cache/bitset/BitsetFilterCache.java | 7 +--- .../routing/IndexRoutingTableTests.java | 24 +++++++++----- .../cache/bitset/BitSetFilterCacheTests.java | 33 +++++-------------- 10 files changed, 64 insertions(+), 74 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index aadfffb562558..a1fb241861061 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -201,6 +201,7 @@ static TransportVersion def(int id) { public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES = def(8_792_00_0); public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS = def(8_793_00_0); public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS_REVERT = def(8_794_00_0); + public static final TransportVersion FAST_REFRESH_RCO_2 = def(8_795_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java index 7857e9a22e9b9..cb667400240f0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.injection.guice.Inject; @@ -120,27 +119,18 @@ public void onPrimaryOperationComplete( ActionListener listener ) { assert replicaRequest.primaryRefreshResult.refreshed() : "primary has not refreshed"; - boolean fastRefresh = IndexSettings.INDEX_FAST_REFRESH_SETTING.get( - clusterService.state().metadata().index(indexShardRoutingTable.shardId().getIndex()).getSettings() + UnpromotableShardRefreshRequest unpromotableReplicaRequest = new UnpromotableShardRefreshRequest( + indexShardRoutingTable, + replicaRequest.primaryRefreshResult.primaryTerm(), + replicaRequest.primaryRefreshResult.generation(), + false + ); + transportService.sendRequest( + transportService.getLocalNode(), + TransportUnpromotableShardRefreshAction.NAME, + unpromotableReplicaRequest, + new ActionListenerResponseHandler<>(listener.safeMap(r -> null), in -> ActionResponse.Empty.INSTANCE, refreshExecutor) ); - - // Indices marked with fast refresh do not rely on refreshing the unpromotables - if (fastRefresh) { - listener.onResponse(null); - } else { - UnpromotableShardRefreshRequest unpromotableReplicaRequest = new UnpromotableShardRefreshRequest( - indexShardRoutingTable, - replicaRequest.primaryRefreshResult.primaryTerm(), - replicaRequest.primaryRefreshResult.generation(), - false - ); - transportService.sendRequest( - transportService.getLocalNode(), - TransportUnpromotableShardRefreshAction.NAME, - unpromotableReplicaRequest, - new ActionListenerResponseHandler<>(listener.safeMap(r -> null), in -> ActionResponse.Empty.INSTANCE, refreshExecutor) - ); - } } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java index 6c24ec2d17604..4458c008babcd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java @@ -24,6 +24,9 @@ import java.util.List; +import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO_2; +import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; + public class TransportUnpromotableShardRefreshAction extends TransportBroadcastUnpromotableAction< UnpromotableShardRefreshRequest, ActionResponse.Empty> { @@ -73,6 +76,18 @@ protected void unpromotableShardOperation( return; } + // During an upgrade to FAST_REFRESH_RCO_2, we expect search shards to be first upgraded before the primary is upgraded. Thus, + // when the primary is upgraded, and starts to deliver unpromotable refreshes, we expect the search shards to be upgraded already. + // Note that the fast refresh setting is final. + // TODO: remove assertion (ES-9563) + assert INDEX_FAST_REFRESH_SETTING.get(shard.indexSettings().getSettings()) == false + || transportService.getLocalNodeConnection().getTransportVersion().onOrAfter(FAST_REFRESH_RCO_2) + : "attempted to refresh a fast refresh search shard " + + shard + + " on transport version " + + transportService.getLocalNodeConnection().getTransportVersion() + + " (before FAST_REFRESH_RCO_2)"; + ActionListener.run(responseListener, listener -> { shard.waitForPrimaryTermAndGeneration( request.getPrimaryTerm(), diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index 9e535344c9589..fb4b3907d2bfd 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -126,12 +126,10 @@ protected void asyncShardOperation(GetRequest request, ShardId shardId, ActionLi IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); IndexShard indexShard = indexService.getShard(shardId.id()); if (indexShard.routingEntry().isPromotableToPrimary() == false) { - // TODO: Re-evaluate assertion (ES-8227) - // assert indexShard.indexSettings().isFastRefresh() == false - // : "a search shard should not receive a TransportGetAction for an index with fast refresh"; handleGetOnUnpromotableShard(request, indexShard, listener); return; } + // TODO: adapt assertion to assert only that it is not stateless (ES-9563) assert DiscoveryNode.isStateless(clusterService.getSettings()) == false || indexShard.indexSettings().isFastRefresh() : "in Stateless a promotable to primary shard can receive a TransportGetAction only if an index has the fast refresh setting"; if (request.realtime()) { // we are not tied to a refresh cycle here anyway diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index 34b3ae50e0b51..633e7ef6793ab 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -124,12 +124,10 @@ protected void asyncShardOperation(MultiGetShardRequest request, ShardId shardId IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); IndexShard indexShard = indexService.getShard(shardId.id()); if (indexShard.routingEntry().isPromotableToPrimary() == false) { - // TODO: Re-evaluate assertion (ES-8227) - // assert indexShard.indexSettings().isFastRefresh() == false - // : "a search shard should not receive a TransportShardMultiGetAction for an index with fast refresh"; handleMultiGetOnUnpromotableShard(request, indexShard, listener); return; } + // TODO: adapt assertion to assert only that it is not stateless (ES-9563) assert DiscoveryNode.isStateless(clusterService.getSettings()) == false || indexShard.indexSettings().isFastRefresh() : "in Stateless a promotable to primary shard can receive a TransportShardMultiGetAction only if an index has " + "the fast refresh setting"; diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java b/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java index 683c3589c893d..7414aeeb2c405 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.translog.Translog; @@ -53,9 +52,7 @@ public void refreshShard( case WAIT_UNTIL -> waitUntil(indexShard, location, new ActionListener<>() { @Override public void onResponse(Boolean forced) { - // Fast refresh indices do not depend on the unpromotables being refreshed - boolean fastRefresh = IndexSettings.INDEX_FAST_REFRESH_SETTING.get(indexShard.indexSettings().getSettings()); - if (location != null && (indexShard.routingEntry().isSearchable() == false && fastRefresh == false)) { + if (location != null && indexShard.routingEntry().isSearchable() == false) { refreshUnpromotables(indexShard, location, listener, forced, postWriteRefreshTimeout); } else { listener.onResponse(forced); @@ -68,9 +65,7 @@ public void onFailure(Exception e) { } }); case IMMEDIATE -> immediate(indexShard, listener.delegateFailureAndWrap((l, r) -> { - // Fast refresh indices do not depend on the unpromotables being refreshed - boolean fastRefresh = IndexSettings.INDEX_FAST_REFRESH_SETTING.get(indexShard.indexSettings().getSettings()); - if (indexShard.getReplicationGroup().getRoutingTable().unpromotableShards().size() > 0 && fastRefresh == false) { + if (indexShard.getReplicationGroup().getRoutingTable().unpromotableShards().size() > 0) { sendUnpromotableRequests(indexShard, r.generation(), true, l, postWriteRefreshTimeout); } else { l.onResponse(true); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java index f7812d284f2af..13fc874f52e9f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java @@ -32,6 +32,7 @@ import java.util.Set; import java.util.stream.Collectors; +import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO_2; import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; public class OperationRouting { @@ -305,8 +306,14 @@ public ShardId shardId(ClusterState clusterState, String index, String id, @Null } public static boolean canSearchShard(ShardRouting shardRouting, ClusterState clusterState) { + // TODO: remove if and always return isSearchable (ES-9563) if (INDEX_FAST_REFRESH_SETTING.get(clusterState.metadata().index(shardRouting.index()).getSettings())) { - return shardRouting.isPromotableToPrimary(); + // Until all the cluster is upgraded, we send searches/gets to the primary (even if it has been upgraded) to execute locally. + if (clusterState.getMinTransportVersion().onOrAfter(FAST_REFRESH_RCO_2)) { + return shardRouting.isSearchable(); + } else { + return shardRouting.isPromotableToPrimary(); + } } else { return shardRouting.isSearchable(); } diff --git a/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java index 59607fadc0dd9..33a8487bb33a3 100644 --- a/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java +++ b/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java @@ -58,8 +58,6 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; -import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; - /** * This is a cache for {@link BitDocIdSet} based filters and is unbounded by size or time. *

@@ -105,10 +103,7 @@ static boolean shouldLoadRandomAccessFiltersEagerly(IndexSettings settings) { boolean loadFiltersEagerlySetting = settings.getValue(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING); boolean isStateless = DiscoveryNode.isStateless(settings.getNodeSettings()); if (isStateless) { - return loadFiltersEagerlySetting - && (DiscoveryNode.hasRole(settings.getNodeSettings(), DiscoveryNodeRole.SEARCH_ROLE) - || (DiscoveryNode.hasRole(settings.getNodeSettings(), DiscoveryNodeRole.INDEX_ROLE) - && INDEX_FAST_REFRESH_SETTING.get(settings.getSettings()))); + return loadFiltersEagerlySetting && DiscoveryNode.hasRole(settings.getNodeSettings(), DiscoveryNodeRole.SEARCH_ROLE); } else { return loadFiltersEagerlySetting; } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java index 21b30557cafea..e5786b1b3449e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.routing; +import org.elasticsearch.TransportVersion; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; @@ -19,6 +20,7 @@ import java.util.List; +import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO_2; import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; @@ -27,16 +29,22 @@ public class IndexRoutingTableTests extends ESTestCase { public void testReadyForSearch() { - innerReadyForSearch(false); - innerReadyForSearch(true); + innerReadyForSearch(false, false); + innerReadyForSearch(false, true); + innerReadyForSearch(true, false); + innerReadyForSearch(true, true); } - private void innerReadyForSearch(boolean fastRefresh) { + // TODO: remove if (fastRefresh && beforeFastRefreshRCO) branches (ES-9563) + private void innerReadyForSearch(boolean fastRefresh, boolean beforeFastRefreshRCO) { Index index = new Index(randomIdentifier(), UUIDs.randomBase64UUID()); ClusterState clusterState = mock(ClusterState.class, Mockito.RETURNS_DEEP_STUBS); when(clusterState.metadata().index(any(Index.class)).getSettings()).thenReturn( Settings.builder().put(INDEX_FAST_REFRESH_SETTING.getKey(), fastRefresh).build() ); + when(clusterState.getMinTransportVersion()).thenReturn( + beforeFastRefreshRCO ? TransportVersion.fromId(FAST_REFRESH_RCO_2.id() - 1_00_0) : TransportVersion.current() + ); // 2 primaries that are search and index ShardId p1 = new ShardId(index, 0); IndexShardRoutingTable shardTable1 = new IndexShardRoutingTable( @@ -55,7 +63,7 @@ private void innerReadyForSearch(boolean fastRefresh) { shardTable1 = new IndexShardRoutingTable(p1, List.of(getShard(p1, true, ShardRoutingState.STARTED, ShardRouting.Role.INDEX_ONLY))); shardTable2 = new IndexShardRoutingTable(p2, List.of(getShard(p2, true, ShardRoutingState.STARTED, ShardRouting.Role.INDEX_ONLY))); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh) { + if (fastRefresh && beforeFastRefreshRCO) { assertTrue(indexRoutingTable.readyForSearch(clusterState)); } else { assertFalse(indexRoutingTable.readyForSearch(clusterState)); @@ -91,7 +99,7 @@ private void innerReadyForSearch(boolean fastRefresh) { ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh) { + if (fastRefresh && beforeFastRefreshRCO) { assertTrue(indexRoutingTable.readyForSearch(clusterState)); } else { assertFalse(indexRoutingTable.readyForSearch(clusterState)); @@ -118,8 +126,6 @@ private void innerReadyForSearch(boolean fastRefresh) { assertTrue(indexRoutingTable.readyForSearch(clusterState)); // 2 unassigned primaries that are index only with some replicas that are all available - // Fast refresh indices do not support replicas so this can not practically happen. If we add support we will want to ensure - // that readyForSearch allows for searching replicas when the index shard is not available. shardTable1 = new IndexShardRoutingTable( p1, List.of( @@ -137,8 +143,8 @@ private void innerReadyForSearch(boolean fastRefresh) { ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh) { - assertFalse(indexRoutingTable.readyForSearch(clusterState)); // if we support replicas for fast refreshes this needs to change + if (fastRefresh && beforeFastRefreshRCO) { + assertFalse(indexRoutingTable.readyForSearch(clusterState)); } else { assertTrue(indexRoutingTable.readyForSearch(clusterState)); } diff --git a/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java b/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java index 77ab665166926..997cb123dbf8e 100644 --- a/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java @@ -48,7 +48,6 @@ import java.util.concurrent.atomic.AtomicLong; import static org.elasticsearch.cluster.node.DiscoveryNode.STATELESS_ENABLED_SETTING_NAME; -import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; import static org.elasticsearch.index.cache.bitset.BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -253,35 +252,21 @@ public void testShouldLoadRandomAccessFiltersEagerly() { for (var hasIndexRole : values) { for (var loadFiltersEagerly : values) { for (var isStateless : values) { - for (var fastRefresh : values) { - if (isStateless == false && fastRefresh) { - // fast refresh is only relevant for stateless indices - continue; - } - - boolean result = BitsetFilterCache.shouldLoadRandomAccessFiltersEagerly( - bitsetFilterCacheSettings(isStateless, hasIndexRole, loadFiltersEagerly, fastRefresh) - ); - if (isStateless) { - assertEquals(loadFiltersEagerly && ((hasIndexRole && fastRefresh) || hasIndexRole == false), result); - } else { - assertEquals(loadFiltersEagerly, result); - } + boolean result = BitsetFilterCache.shouldLoadRandomAccessFiltersEagerly( + bitsetFilterCacheSettings(isStateless, hasIndexRole, loadFiltersEagerly) + ); + if (isStateless) { + assertEquals(loadFiltersEagerly && hasIndexRole == false, result); + } else { + assertEquals(loadFiltersEagerly, result); } } } } } - private IndexSettings bitsetFilterCacheSettings( - boolean isStateless, - boolean hasIndexRole, - boolean loadFiltersEagerly, - boolean fastRefresh - ) { - var indexSettingsBuilder = Settings.builder() - .put(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING.getKey(), loadFiltersEagerly) - .put(INDEX_FAST_REFRESH_SETTING.getKey(), fastRefresh); + private IndexSettings bitsetFilterCacheSettings(boolean isStateless, boolean hasIndexRole, boolean loadFiltersEagerly) { + var indexSettingsBuilder = Settings.builder().put(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING.getKey(), loadFiltersEagerly); var nodeSettingsBuilder = Settings.builder() .putList( From 5363a38be9f7bbf0f9f196a889e629d1aac85641 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 19 Nov 2024 08:32:16 +0000 Subject: [PATCH 031/386] [ML] Protect against multiple concurrent downloads of the same model (#116869) Check for current downloading tasks in the download action. --- .../xpack/inference/DefaultEndPointsIT.java | 39 +++++++ .../inference/InferenceBaseRestTest.java | 7 +- .../elasticsearch/CustomElandModel.java | 36 +----- .../elasticsearch/ElasticDeployedModel.java | 5 + .../ElasticsearchInternalModel.java | 37 +++++- .../elasticsearch/ElserInternalModel.java | 32 ------ .../MultilingualE5SmallModel.java | 35 ------ .../action/DownloadTaskRemovedListener.java | 29 +++++ .../action/ModelDownloadTask.java | 23 +++- .../TransportLoadTrainedModelPackage.java | 108 +++++++++++++++--- ...TransportLoadTrainedModelPackageTests.java | 82 +++++++++---- ...portStartTrainedModelDeploymentAction.java | 4 +- 12 files changed, 295 insertions(+), 142 deletions(-) create mode 100644 x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/DownloadTaskRemovedListener.java diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java index 3db834bb579ff..69767ce0b24f0 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java @@ -8,6 +8,9 @@ package org.elasticsearch.xpack.inference; import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseListener; +import org.elasticsearch.common.Strings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService; @@ -16,9 +19,12 @@ import org.junit.Before; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.concurrent.CountDownLatch; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.oneOf; @@ -108,4 +114,37 @@ private static void assertDefaultE5Config(Map modelConfig) { Matchers.is(Map.of("enabled", true, "min_number_of_allocations", 0, "max_number_of_allocations", 32)) ); } + + public void testMultipleInferencesTriggeringDownloadAndDeploy() throws InterruptedException { + int numParallelRequests = 4; + var latch = new CountDownLatch(numParallelRequests); + var errors = new ArrayList(); + + var listener = new ResponseListener() { + @Override + public void onSuccess(Response response) { + latch.countDown(); + } + + @Override + public void onFailure(Exception exception) { + errors.add(exception); + latch.countDown(); + } + }; + + var inputs = List.of("Hello World", "Goodnight moon"); + var queryParams = Map.of("timeout", "120s"); + for (int i = 0; i < numParallelRequests; i++) { + var request = createInferenceRequest( + Strings.format("_inference/%s", ElasticsearchInternalService.DEFAULT_ELSER_ID), + inputs, + queryParams + ); + client().performRequestAsync(request, listener); + } + + latch.await(); + assertThat(errors.toString(), errors, empty()); + } } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index 6790b9bb14c5a..4e32ef99d06dd 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -373,12 +373,17 @@ protected Map infer(String modelId, TaskType taskType, List inferInternal(String endpoint, List input, Map queryParameters) throws IOException { + protected Request createInferenceRequest(String endpoint, List input, Map queryParameters) { var request = new Request("POST", endpoint); request.setJsonEntity(jsonBody(input)); if (queryParameters.isEmpty() == false) { request.addParameters(queryParameters); } + return request; + } + + private Map inferInternal(String endpoint, List input, Map queryParameters) throws IOException { + var request = createInferenceRequest(endpoint, input, queryParameters); var response = client().performRequest(request); assertOkOrCreated(response); return entityAsMap(response); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java index b710b24cbda31..b76de5eeedbfc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java @@ -7,14 +7,9 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.ChunkingSettings; -import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; public class CustomElandModel extends ElasticsearchInternalModel { @@ -39,31 +34,10 @@ public CustomElandModel( } @Override - public ActionListener getCreateTrainedModelAssignmentActionListener( - Model model, - ActionListener listener - ) { - - return new ActionListener<>() { - @Override - public void onResponse(CreateTrainedModelAssignmentAction.Response response) { - listener.onResponse(Boolean.TRUE); - } - - @Override - public void onFailure(Exception e) { - if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { - listener.onFailure( - new ResourceNotFoundException( - "Could not start the inference as the custom eland model [{0}] for this platform cannot be found." - + " Custom models need to be loaded into the cluster with eland before they can be started.", - internalServiceSettings.modelId() - ) - ); - return; - } - listener.onFailure(e); - } - }; + protected String modelNotFoundErrorMessage(String modelId) { + return "Could not deploy model [" + + modelId + + "] as the model cannot be found." + + " Custom models need to be loaded into the cluster with Eland before they can be started."; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticDeployedModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticDeployedModel.java index 724c7a8f0a166..ce6c6258d0393 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticDeployedModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticDeployedModel.java @@ -36,6 +36,11 @@ public StartTrainedModelDeploymentAction.Request getStartTrainedModelDeploymentA throw new IllegalStateException("cannot start model that uses an existing deployment"); } + @Override + protected String modelNotFoundErrorMessage(String modelId) { + throw new IllegalStateException("cannot start model [" + modelId + "] that uses an existing deployment"); + } + @Override public ActionListener getCreateTrainedModelAssignmentActionListener( Model model, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java index 2405243f302bc..aa12bf0c645c3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java @@ -7,6 +7,9 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Strings; import org.elasticsearch.core.TimeValue; @@ -15,8 +18,10 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import static org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus.State.STARTED; @@ -79,10 +84,38 @@ public StartTrainedModelDeploymentAction.Request getStartTrainedModelDeploymentA return startRequest; } - public abstract ActionListener getCreateTrainedModelAssignmentActionListener( + public ActionListener getCreateTrainedModelAssignmentActionListener( Model model, ActionListener listener - ); + ) { + return new ActionListener<>() { + @Override + public void onResponse(CreateTrainedModelAssignmentAction.Response response) { + listener.onResponse(Boolean.TRUE); + } + + @Override + public void onFailure(Exception e) { + var cause = ExceptionsHelper.unwrapCause(e); + if (cause instanceof ResourceNotFoundException) { + listener.onFailure(new ResourceNotFoundException(modelNotFoundErrorMessage(internalServiceSettings.modelId()))); + return; + } else if (cause instanceof ElasticsearchStatusException statusException) { + if (statusException.status() == RestStatus.CONFLICT + && statusException.getRootCause() instanceof ResourceAlreadyExistsException) { + // Deployment is already started + listener.onResponse(Boolean.TRUE); + } + return; + } + listener.onFailure(e); + } + }; + } + + protected String modelNotFoundErrorMessage(String modelId) { + return "Could not deploy model [" + modelId + "] as the model cannot be found."; + } public boolean usesExistingDeployment() { return internalServiceSettings.getDeploymentId() != null; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModel.java index 8d2f59171a601..2594f18db3fb5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModel.java @@ -7,13 +7,8 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.ChunkingSettings; -import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; public class ElserInternalModel extends ElasticsearchInternalModel { @@ -37,31 +32,4 @@ public ElserInternalServiceSettings getServiceSettings() { public ElserMlNodeTaskSettings getTaskSettings() { return (ElserMlNodeTaskSettings) super.getTaskSettings(); } - - @Override - public ActionListener getCreateTrainedModelAssignmentActionListener( - Model model, - ActionListener listener - ) { - return new ActionListener<>() { - @Override - public void onResponse(CreateTrainedModelAssignmentAction.Response response) { - listener.onResponse(Boolean.TRUE); - } - - @Override - public void onFailure(Exception e) { - if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { - listener.onFailure( - new ResourceNotFoundException( - "Could not start the ELSER service as the ELSER model for this platform cannot be found." - + " ELSER needs to be downloaded before it can be started." - ) - ); - return; - } - listener.onFailure(e); - } - }; - } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallModel.java index fee00d04d940b..2dcf91140c995 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallModel.java @@ -7,13 +7,8 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.ChunkingSettings; -import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; public class MultilingualE5SmallModel extends ElasticsearchInternalModel { @@ -31,34 +26,4 @@ public MultilingualE5SmallModel( public MultilingualE5SmallInternalServiceSettings getServiceSettings() { return (MultilingualE5SmallInternalServiceSettings) super.getServiceSettings(); } - - @Override - public ActionListener getCreateTrainedModelAssignmentActionListener( - Model model, - ActionListener listener - ) { - - return new ActionListener<>() { - @Override - public void onResponse(CreateTrainedModelAssignmentAction.Response response) { - listener.onResponse(Boolean.TRUE); - } - - @Override - public void onFailure(Exception e) { - if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { - listener.onFailure( - new ResourceNotFoundException( - "Could not start the TextEmbeddingService service as the " - + "Multilingual-E5-Small model for this platform cannot be found." - + " Multilingual-E5-Small needs to be downloaded before it can be started" - ) - ); - return; - } - listener.onFailure(e); - } - }; - } - } diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/DownloadTaskRemovedListener.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/DownloadTaskRemovedListener.java new file mode 100644 index 0000000000000..929dac6ee357a --- /dev/null +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/DownloadTaskRemovedListener.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.packageloader.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.tasks.RemovedTaskListener; +import org.elasticsearch.tasks.Task; + +public record DownloadTaskRemovedListener(ModelDownloadTask trackedTask, ActionListener listener) + implements + RemovedTaskListener { + + @Override + public void onRemoved(Task task) { + if (task.getId() == trackedTask.getId()) { + if (trackedTask.getTaskException() == null) { + listener.onResponse(AcknowledgedResponse.TRUE); + } else { + listener.onFailure(trackedTask.getTaskException()); + } + } + } +} diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTask.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTask.java index 59977bd418e11..dd09c3cf65fec 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTask.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTask.java @@ -13,6 +13,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ml.MlTasks; import java.io.IOException; import java.util.Map; @@ -51,9 +52,12 @@ public void writeTo(StreamOutput out) throws IOException { } private final AtomicReference downloadProgress = new AtomicReference<>(new DownLoadProgress(0, 0)); + private final String modelId; + private volatile Exception taskException; - public ModelDownloadTask(long id, String type, String action, String description, TaskId parentTaskId, Map headers) { - super(id, type, action, description, parentTaskId, headers); + public ModelDownloadTask(long id, String type, String action, String modelId, TaskId parentTaskId, Map headers) { + super(id, type, action, taskDescription(modelId), parentTaskId, headers); + this.modelId = modelId; } void setProgress(int totalParts, int downloadedParts) { @@ -65,4 +69,19 @@ public DownloadStatus getStatus() { return new DownloadStatus(downloadProgress.get()); } + public String getModelId() { + return modelId; + } + + public void setTaskException(Exception exception) { + this.taskException = exception; + } + + public Exception getTaskException() { + return taskException; + } + + public static String taskDescription(String modelId) { + return MlTasks.downloadModelTaskDescription(modelId); + } } diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java index 76b7781b1cffe..2a14a8761e357 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java @@ -30,7 +30,6 @@ import org.elasticsearch.tasks.TaskAwareRequest; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.common.notifications.Level; @@ -42,6 +41,9 @@ import java.io.IOException; import java.net.MalformedURLException; import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -49,7 +51,6 @@ import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ml.MlTasks.MODEL_IMPORT_TASK_ACTION; import static org.elasticsearch.xpack.core.ml.MlTasks.MODEL_IMPORT_TASK_TYPE; -import static org.elasticsearch.xpack.core.ml.MlTasks.downloadModelTaskDescription; public class TransportLoadTrainedModelPackage extends TransportMasterNodeAction { @@ -57,6 +58,7 @@ public class TransportLoadTrainedModelPackage extends TransportMasterNodeAction< private final Client client; private final CircuitBreakerService circuitBreakerService; + final Map> taskRemovedListenersByModelId; @Inject public TransportLoadTrainedModelPackage( @@ -81,6 +83,7 @@ public TransportLoadTrainedModelPackage( ); this.client = new OriginSettingClient(client, ML_ORIGIN); this.circuitBreakerService = circuitBreakerService; + taskRemovedListenersByModelId = new HashMap<>(); } @Override @@ -91,6 +94,12 @@ protected ClusterBlockException checkBlock(Request request, ClusterState state) @Override protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) throws Exception { + if (handleDownloadInProgress(request.getModelId(), request.isWaitForCompletion(), listener)) { + logger.debug("Existing download of model [{}] in progress", request.getModelId()); + // download in progress, nothing to do + return; + } + ModelDownloadTask downloadTask = createDownloadTask(request); try { @@ -107,7 +116,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A var downloadCompleteListener = request.isWaitForCompletion() ? listener : ActionListener.noop(); - importModel(client, taskManager, request, modelImporter, downloadCompleteListener, downloadTask); + importModel(client, () -> unregisterTask(downloadTask), request, modelImporter, downloadTask, downloadCompleteListener); } catch (Exception e) { taskManager.unregister(downloadTask); listener.onFailure(e); @@ -124,22 +133,91 @@ private ParentTaskAssigningClient getParentTaskAssigningClient(Task originTask) return new ParentTaskAssigningClient(client, parentTaskId); } + /** + * Look for a current download task of the model and optionally wait + * for that task to complete if there is one. + * synchronized with {@code unregisterTask} to prevent the task being + * removed before the remove listener is added. + * @param modelId Model being downloaded + * @param isWaitForCompletion Wait until the download completes before + * calling the listener + * @param listener Model download listener + * @return True if a download task is in progress + */ + synchronized boolean handleDownloadInProgress( + String modelId, + boolean isWaitForCompletion, + ActionListener listener + ) { + var description = ModelDownloadTask.taskDescription(modelId); + var tasks = taskManager.getCancellableTasks().values(); + + ModelDownloadTask inProgress = null; + for (var task : tasks) { + if (description.equals(task.getDescription()) && task instanceof ModelDownloadTask downloadTask) { + inProgress = downloadTask; + break; + } + } + + if (inProgress != null) { + if (isWaitForCompletion == false) { + // Not waiting for the download to complete, it is enough that the download is in progress + // Respond now not when the download completes + listener.onResponse(AcknowledgedResponse.TRUE); + return true; + } + // Otherwise register a task removed listener which is called + // once the tasks is complete and unregistered + var tracker = new DownloadTaskRemovedListener(inProgress, listener); + taskRemovedListenersByModelId.computeIfAbsent(modelId, s -> new ArrayList<>()).add(tracker); + taskManager.registerRemovedTaskListener(tracker); + return true; + } + + return false; + } + + /** + * Unregister the completed task triggering any remove task listeners. + * This method is synchronized to prevent the task being removed while + * {@code waitForExistingDownload} is in progress. + * @param task The completed task + */ + synchronized void unregisterTask(ModelDownloadTask task) { + taskManager.unregister(task); // unregister will call the on remove function + + var trackers = taskRemovedListenersByModelId.remove(task.getModelId()); + if (trackers != null) { + for (var tracker : trackers) { + taskManager.unregisterRemovedTaskListener(tracker); + } + } + } + /** * This is package scope so that we can test the logic directly. - * This should only be called from the masterOperation method and the tests + * This should only be called from the masterOperation method and the tests. + * This method is static for testing. * * @param auditClient a client which should only be used to send audit notifications. This client cannot be associated with the passed * in task, that way when the task is cancelled the notification requests can * still be performed. If it is associated with the task (i.e. via ParentTaskAssigningClient), * then the requests will throw a TaskCancelledException. + * @param unregisterTaskFn Runnable to unregister the task. Because this is a static function + * a lambda is used rather than the instance method. + * @param request The download request + * @param modelImporter The importer + * @param task Download task + * @param listener Listener */ static void importModel( Client auditClient, - TaskManager taskManager, + Runnable unregisterTaskFn, Request request, ModelImporter modelImporter, - ActionListener listener, - Task task + ModelDownloadTask task, + ActionListener listener ) { final String modelId = request.getModelId(); final long relativeStartNanos = System.nanoTime(); @@ -155,9 +233,12 @@ static void importModel( Level.INFO ); listener.onResponse(AcknowledgedResponse.TRUE); - }, exception -> listener.onFailure(processException(auditClient, modelId, exception))); + }, exception -> { + task.setTaskException(exception); + listener.onFailure(processException(auditClient, modelId, exception)); + }); - modelImporter.doImport(ActionListener.runAfter(finishListener, () -> taskManager.unregister(task))); + modelImporter.doImport(ActionListener.runAfter(finishListener, unregisterTaskFn)); } static Exception processException(Client auditClient, String modelId, Exception e) { @@ -197,14 +278,7 @@ public TaskId getParentTask() { @Override public ModelDownloadTask createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { - return new ModelDownloadTask( - id, - type, - action, - downloadModelTaskDescription(request.getModelId()), - parentTaskId, - headers - ); + return new ModelDownloadTask(id, type, action, request.getModelId(), parentTaskId, headers); } }, false); } diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java index cbcfd5b760779..3486ce6af0db5 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java @@ -10,13 +10,19 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.common.notifications.Level; import org.elasticsearch.xpack.core.ml.action.AuditMlNotificationAction; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ModelPackageConfig; @@ -27,9 +33,13 @@ import java.io.IOException; import java.net.MalformedURLException; import java.net.URISyntaxException; +import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.core.ml.MlTasks.MODEL_IMPORT_TASK_ACTION; +import static org.elasticsearch.xpack.core.ml.MlTasks.MODEL_IMPORT_TASK_TYPE; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.core.Is.is; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; @@ -37,6 +47,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class TransportLoadTrainedModelPackageTests extends ESTestCase { private static final String MODEL_IMPORT_FAILURE_MSG_FORMAT = "Model importing failed due to %s [%s]"; @@ -44,17 +55,10 @@ public class TransportLoadTrainedModelPackageTests extends ESTestCase { public void testSendsFinishedUploadNotification() { var uploader = createUploader(null); var taskManager = mock(TaskManager.class); - var task = mock(Task.class); + var task = mock(ModelDownloadTask.class); var client = mock(Client.class); - TransportLoadTrainedModelPackage.importModel( - client, - taskManager, - createRequestWithWaiting(), - uploader, - ActionListener.noop(), - task - ); + TransportLoadTrainedModelPackage.importModel(client, () -> {}, createRequestWithWaiting(), uploader, task, ActionListener.noop()); var notificationArg = ArgumentCaptor.forClass(AuditMlNotificationAction.Request.class); // 2 notifications- the start and finish messages @@ -108,32 +112,63 @@ public void testSendsWarningNotificationForTaskCancelledException() throws Excep public void testCallsOnResponseWithAcknowledgedResponse() throws Exception { var client = mock(Client.class); var taskManager = mock(TaskManager.class); - var task = mock(Task.class); + var task = mock(ModelDownloadTask.class); ModelImporter uploader = createUploader(null); var responseRef = new AtomicReference(); var listener = ActionListener.wrap(responseRef::set, e -> fail("received an exception: " + e.getMessage())); - TransportLoadTrainedModelPackage.importModel(client, taskManager, createRequestWithWaiting(), uploader, listener, task); + TransportLoadTrainedModelPackage.importModel(client, () -> {}, createRequestWithWaiting(), uploader, task, listener); assertThat(responseRef.get(), is(AcknowledgedResponse.TRUE)); } public void testDoesNotCallListenerWhenNotWaitingForCompletion() { var uploader = mock(ModelImporter.class); var client = mock(Client.class); - var taskManager = mock(TaskManager.class); - var task = mock(Task.class); - + var task = mock(ModelDownloadTask.class); TransportLoadTrainedModelPackage.importModel( client, - taskManager, + () -> {}, createRequestWithoutWaiting(), uploader, - ActionListener.running(ESTestCase::fail), - task + task, + ActionListener.running(ESTestCase::fail) ); } + public void testWaitForExistingDownload() { + var taskManager = mock(TaskManager.class); + var modelId = "foo"; + var task = new ModelDownloadTask(1L, MODEL_IMPORT_TASK_TYPE, MODEL_IMPORT_TASK_ACTION, modelId, new TaskId("node", 1L), Map.of()); + when(taskManager.getCancellableTasks()).thenReturn(Map.of(1L, task)); + + var transportService = mock(TransportService.class); + when(transportService.getTaskManager()).thenReturn(taskManager); + + var action = new TransportLoadTrainedModelPackage( + transportService, + mock(ClusterService.class), + mock(ThreadPool.class), + mock(ActionFilters.class), + mock(IndexNameExpressionResolver.class), + mock(Client.class), + mock(CircuitBreakerService.class) + ); + + assertTrue(action.handleDownloadInProgress(modelId, true, ActionListener.noop())); + verify(taskManager).registerRemovedTaskListener(any()); + assertThat(action.taskRemovedListenersByModelId.entrySet(), hasSize(1)); + assertThat(action.taskRemovedListenersByModelId.get(modelId), hasSize(1)); + + // With wait for completion == false no new removed listener will be added + assertTrue(action.handleDownloadInProgress(modelId, false, ActionListener.noop())); + verify(taskManager, times(1)).registerRemovedTaskListener(any()); + assertThat(action.taskRemovedListenersByModelId.entrySet(), hasSize(1)); + assertThat(action.taskRemovedListenersByModelId.get(modelId), hasSize(1)); + + assertFalse(action.handleDownloadInProgress("no-task-for-this-one", randomBoolean(), ActionListener.noop())); + } + private void assertUploadCallsOnFailure(Exception exception, String message, RestStatus status, Level level) throws Exception { var esStatusException = new ElasticsearchStatusException(message, status, exception); @@ -152,7 +187,7 @@ private void assertNotificationAndOnFailure( ) throws Exception { var client = mock(Client.class); var taskManager = mock(TaskManager.class); - var task = mock(Task.class); + var task = mock(ModelDownloadTask.class); ModelImporter uploader = createUploader(thrownException); var failureRef = new AtomicReference(); @@ -160,7 +195,14 @@ private void assertNotificationAndOnFailure( (AcknowledgedResponse response) -> { fail("received a acknowledged response: " + response.toString()); }, failureRef::set ); - TransportLoadTrainedModelPackage.importModel(client, taskManager, createRequestWithWaiting(), uploader, listener, task); + TransportLoadTrainedModelPackage.importModel( + client, + () -> taskManager.unregister(task), + createRequestWithWaiting(), + uploader, + task, + listener + ); var notificationArg = ArgumentCaptor.forClass(AuditMlNotificationAction.Request.class); // 2 notifications- the starting message and the failure diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java index 5fd70ce71cd24..f01372ca4f246 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java @@ -190,11 +190,11 @@ protected void masterOperation( () -> "[" + request.getDeploymentId() + "] creating new assignment for model [" + request.getModelId() + "] failed", e ); - if (ExceptionsHelper.unwrapCause(e) instanceof ResourceAlreadyExistsException) { + if (ExceptionsHelper.unwrapCause(e) instanceof ResourceAlreadyExistsException resourceAlreadyExistsException) { e = new ElasticsearchStatusException( "Cannot start deployment [{}] because it has already been started", RestStatus.CONFLICT, - e, + resourceAlreadyExistsException, request.getDeploymentId() ); } From abc7f58ad61ecf2d41dad5ec018011010da15fad Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 19 Nov 2024 09:49:35 +0100 Subject: [PATCH 032/386] Add license check for ES|QL functions (#116715) --- .../core/expression/function/Function.java | 6 + .../xpack/esql/EsqlTestUtils.java | 3 +- .../xpack/esql/analysis/Verifier.java | 17 ++- .../xpack/esql/execution/PlanExecutor.java | 5 +- .../xpack/esql/plugin/EsqlPlugin.java | 9 +- .../function/CheckLicenseTests.java | 138 ++++++++++++++++++ .../LocalPhysicalPlanOptimizerTests.java | 3 +- .../esql/planner/QueryTranslatorTests.java | 3 +- .../esql/stats/PlanExecutorMetricsTests.java | 3 +- .../esql/stats/VerifierMetricsTests.java | 7 +- 10 files changed, 183 insertions(+), 11 deletions(-) create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java index cad5c631088f2..a1afcdbf1f77c 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.esql.core.expression.function; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Nullability; @@ -42,6 +43,11 @@ public Nullability nullable() { return Expressions.nullable(children()); } + /** Return true if this function can be executed under the provided {@link XPackLicenseState}, otherwise false.*/ + public boolean checkLicense(XPackLicenseState state) { + return true; + } + @Override public int hashCode() { return Objects.hash(getClass(), children()); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index 2913401d8aab3..d6715a932c075 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -29,6 +29,7 @@ import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; @@ -342,7 +343,7 @@ public String toString() { public static final Configuration TEST_CFG = configuration(new QueryPragmas(Settings.EMPTY)); - public static final Verifier TEST_VERIFIER = new Verifier(new Metrics(new EsqlFunctionRegistry())); + public static final Verifier TEST_VERIFIER = new Verifier(new Metrics(new EsqlFunctionRegistry()), new XPackLicenseState(() -> 0L)); private EsqlTestUtils() {} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index d399c826e0bf2..0641a03c88b69 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.analysis; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; @@ -82,9 +83,11 @@ public class Verifier { private final Metrics metrics; + private final XPackLicenseState licenseState; - public Verifier(Metrics metrics) { + public Verifier(Metrics metrics, XPackLicenseState licenseState) { this.metrics = metrics; + this.licenseState = licenseState; } /** @@ -201,6 +204,10 @@ else if (p instanceof Lookup lookup) { }); checkRemoteEnrich(plan, failures); + if (failures.isEmpty()) { + checkLicense(plan, licenseState, failures); + } + // gather metrics if (failures.isEmpty()) { gatherMetrics(plan, partialMetrics); @@ -546,6 +553,14 @@ private static void checkBinaryComparison(LogicalPlan p, Set failures) }); } + private void checkLicense(LogicalPlan plan, XPackLicenseState licenseState, Set failures) { + plan.forEachExpressionDown(Function.class, p -> { + if (p.checkLicense(licenseState) == false) { + failures.add(new Failure(p, "current license is non-compliant for function [" + p.sourceText() + "]")); + } + }); + } + private void gatherMetrics(LogicalPlan plan, BitSet b) { plan.forEachDown(p -> FeatureMetric.set(p, b)); for (int i = b.nextSetBit(0); i >= 0; i = b.nextSetBit(i + 1)) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java index 816388193c5f6..c1269009c6a41 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.indices.IndicesExpressionGrouper; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; @@ -40,13 +41,13 @@ public class PlanExecutor { private final Verifier verifier; private final PlanningMetricsManager planningMetricsManager; - public PlanExecutor(IndexResolver indexResolver, MeterRegistry meterRegistry) { + public PlanExecutor(IndexResolver indexResolver, MeterRegistry meterRegistry, XPackLicenseState licenseState) { this.indexResolver = indexResolver; this.preAnalyzer = new PreAnalyzer(); this.functionRegistry = new EsqlFunctionRegistry(); this.mapper = new Mapper(); this.metrics = new Metrics(functionRegistry); - this.verifier = new Verifier(metrics); + this.verifier = new Verifier(metrics, licenseState); this.planningMetricsManager = new PlanningMetricsManager(meterRegistry); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java index e9b9f571e880e..b091ab0c1bafc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java @@ -38,6 +38,7 @@ import org.elasticsearch.compute.operator.exchange.ExchangeSourceOperator; import org.elasticsearch.compute.operator.topn.TopNOperatorStatus; import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestController; @@ -45,6 +46,7 @@ import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.esql.EsqlInfoTransportAction; @@ -116,7 +118,7 @@ public Collection createComponents(PluginServices services) { BlockFactory blockFactory = new BlockFactory(circuitBreaker, bigArrays, maxPrimitiveArrayBlockSize); setupSharedSecrets(); return List.of( - new PlanExecutor(new IndexResolver(services.client()), services.telemetryProvider().getMeterRegistry()), + new PlanExecutor(new IndexResolver(services.client()), services.telemetryProvider().getMeterRegistry(), getLicenseState()), new ExchangeService(services.clusterService().getSettings(), services.threadPool(), ThreadPool.Names.SEARCH, blockFactory), blockFactory ); @@ -131,6 +133,11 @@ private void setupSharedSecrets() { } } + // to be overriden by tests + protected XPackLicenseState getLicenseState() { + return XPackPlugin.getSharedLicenseState(); + } + /** * The settings defined by the ESQL plugin. * diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java new file mode 100644 index 0000000000000..98f36d339976c --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java @@ -0,0 +1,138 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function; + +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicensedFeature; +import org.elasticsearch.license.TestUtils; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.license.internal.XPackLicenseStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.analysis.Analyzer; +import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; +import org.elasticsearch.xpack.esql.analysis.Verifier; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.function.Function; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.parser.EsqlParser; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.stats.Metrics; + +import java.util.List; + +import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyzerDefaultMapping; +import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.defaultEnrichResolution; +import static org.hamcrest.Matchers.containsString; + +public class CheckLicenseTests extends ESTestCase { + + private final EsqlParser parser = new EsqlParser(); + private final String esql = "from tests | eval license() | LIMIT 10"; + + public void testLicense() { + for (License.OperationMode functionLicense : License.OperationMode.values()) { + final LicensedFeature functionLicenseFeature = random().nextBoolean() + ? LicensedFeature.momentary("test", "license", functionLicense) + : LicensedFeature.persistent("test", "license", functionLicense); + final EsqlFunctionRegistry.FunctionBuilder builder = (source, expression, cfg) -> { + final LicensedFunction licensedFunction = new LicensedFunction(source); + licensedFunction.setLicensedFeature(functionLicenseFeature); + return licensedFunction; + }; + for (License.OperationMode operationMode : License.OperationMode.values()) { + if (License.OperationMode.TRIAL != operationMode && License.OperationMode.compare(operationMode, functionLicense) < 0) { + // non-compliant license + final VerificationException ex = expectThrows(VerificationException.class, () -> analyze(builder, operationMode)); + assertThat(ex.getMessage(), containsString("current license is non-compliant for function [license()]")); + } else { + // compliant license + assertNotNull(analyze(builder, operationMode)); + } + } + } + } + + private LogicalPlan analyze(EsqlFunctionRegistry.FunctionBuilder builder, License.OperationMode operationMode) { + final FunctionDefinition def = EsqlFunctionRegistry.def(LicensedFunction.class, builder, "license"); + final EsqlFunctionRegistry registry = new EsqlFunctionRegistry(def) { + @Override + public EsqlFunctionRegistry snapshotRegistry() { + return this; + } + }; + return analyzer(registry, operationMode).analyze(parser.createStatement(esql)); + } + + private static Analyzer analyzer(EsqlFunctionRegistry registry, License.OperationMode operationMode) { + return new Analyzer( + new AnalyzerContext(EsqlTestUtils.TEST_CFG, registry, analyzerDefaultMapping(), defaultEnrichResolution()), + new Verifier(new Metrics(new EsqlFunctionRegistry()), getLicenseState(operationMode)) + ); + } + + private static XPackLicenseState getLicenseState(License.OperationMode operationMode) { + final TestUtils.UpdatableLicenseState licenseState = new TestUtils.UpdatableLicenseState(); + licenseState.update(new XPackLicenseStatus(operationMode, true, null)); + return licenseState; + } + + // It needs to be public because we run validation on it via reflection in org.elasticsearch.xpack.esql.tree.EsqlNodeSubclassTests. + // This test prevents to add the license as constructor parameter too. + public static class LicensedFunction extends Function { + + private LicensedFeature licensedFeature; + + public LicensedFunction(Source source) { + super(source, List.of()); + } + + void setLicensedFeature(LicensedFeature licensedFeature) { + this.licensedFeature = licensedFeature; + } + + @Override + public boolean checkLicense(XPackLicenseState state) { + if (licensedFeature instanceof LicensedFeature.Momentary momentary) { + return momentary.check(state); + } else { + return licensedFeature.checkWithoutTracking(state); + } + } + + @Override + public DataType dataType() { + return DataType.KEYWORD; + } + + @Override + public Expression replaceChildren(List newChildren) { + throw new UnsupportedOperationException("this type of node doesn't have any children to replace"); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + + @Override + public void writeTo(StreamOutput out) { + throw new UnsupportedOperationException(); + } + } + +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 073a51ee69114..2134e16b00131 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.EsqlTestUtils.TestSearchStats; @@ -145,7 +146,7 @@ private Analyzer makeAnalyzer(String mappingFileName, EnrichResolution enrichRes return new Analyzer( new AnalyzerContext(config, new EsqlFunctionRegistry(), getIndexResult, enrichResolution), - new Verifier(new Metrics(new EsqlFunctionRegistry())) + new Verifier(new Metrics(new EsqlFunctionRegistry()), new XPackLicenseState(() -> 0L)) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java index cf90cf96fe683..57210fda07f2b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.planner; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.analysis.Analyzer; @@ -46,7 +47,7 @@ private static Analyzer makeAnalyzer(String mappingFileName) { return new Analyzer( new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResult, new EnrichResolution()), - new Verifier(new Metrics(new EsqlFunctionRegistry())) + new Verifier(new Metrics(new EsqlFunctionRegistry()), new XPackLicenseState(() -> 0L)) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java index 116df21a33ac0..b323efad2b4c3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.index.IndexMode; import org.elasticsearch.indices.IndicesExpressionGrouper; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -102,7 +103,7 @@ public void testFailedMetric() { return null; }).when(esqlClient).execute(eq(EsqlResolveFieldsAction.TYPE), any(), any()); - var planExecutor = new PlanExecutor(indexResolver, MeterRegistry.NOOP); + var planExecutor = new PlanExecutor(indexResolver, MeterRegistry.NOOP, new XPackLicenseState(() -> 0L)); var enrichResolver = mockEnrichResolver(); var request = new EsqlQueryRequest(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java index 5e6588d2295f9..eda906b147956 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.stats; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.common.stats.Counters; import org.elasticsearch.xpack.esql.analysis.Verifier; @@ -205,7 +206,7 @@ public void testTwoWhereQuery() { public void testTwoQueriesExecuted() { Metrics metrics = new Metrics(new EsqlFunctionRegistry()); - Verifier verifier = new Verifier(metrics); + Verifier verifier = new Verifier(metrics, new XPackLicenseState(() -> 0L)); esqlWithVerifier(""" from employees | where languages > 2 @@ -252,7 +253,7 @@ public void testTwoQueriesExecuted() { public void testMultipleFunctions() { Metrics metrics = new Metrics(new EsqlFunctionRegistry()); - Verifier verifier = new Verifier(metrics); + Verifier verifier = new Verifier(metrics, new XPackLicenseState(() -> 0L)); esqlWithVerifier(""" from employees | where languages > 2 @@ -526,7 +527,7 @@ private Counters esql(String esql, Verifier v) { Metrics metrics = null; if (v == null) { metrics = new Metrics(new EsqlFunctionRegistry()); - verifier = new Verifier(metrics); + verifier = new Verifier(metrics, new XPackLicenseState(() -> 0L)); } analyzer(verifier).analyze(parser.createStatement(esql)); From 25002fc11c543fca3382a8a5aaa731f9073f9f43 Mon Sep 17 00:00:00 2001 From: Pete Gillin Date: Tue, 19 Nov 2024 09:26:32 +0000 Subject: [PATCH 033/386] Use `assertThrows` in `ConfigurationUtilsTests` (#116971) This was trying to assert that the code under test threw an exception using the 'try-act-fail-catch-assert' pattern, only the 'fail' step was missing, meaning that the tests would have incorrectly passed if the method didn't throw. This switches it to using `assertThrows`, which is less easy to get wrong. --- .../ingest/ConfigurationUtilsTests.java | 31 ++++++++++--------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java b/server/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java index dabc8672733e2..0e8c7e0857251 100644 --- a/server/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java @@ -65,11 +65,12 @@ public void testReadStringProperty() { } public void testReadStringPropertyInvalidType() { - try { - ConfigurationUtils.readStringProperty(null, null, config, "arr"); - } catch (ElasticsearchParseException e) { - assertThat(e.getMessage(), equalTo("[arr] property isn't a string, but of type [java.util.Arrays$ArrayList]")); - } + ElasticsearchParseException caught = assertThrows( + ElasticsearchParseException.class, + () -> ConfigurationUtils.readStringProperty(null, null, config, "arr") + ); + assertThat(caught.getMessage(), equalTo("[arr] property isn't a string, but of type [java.util.Arrays$ArrayList]")); + } public void testReadBooleanProperty() { @@ -83,11 +84,11 @@ public void testReadNullBooleanProperty() { } public void testReadBooleanPropertyInvalidType() { - try { - ConfigurationUtils.readBooleanProperty(null, null, config, "arr", true); - } catch (ElasticsearchParseException e) { - assertThat(e.getMessage(), equalTo("[arr] property isn't a boolean, but of type [java.util.Arrays$ArrayList]")); - } + ElasticsearchParseException caught = assertThrows( + ElasticsearchParseException.class, + () -> ConfigurationUtils.readBooleanProperty(null, null, config, "arr", true) + ); + assertThat(caught.getMessage(), equalTo("[arr] property isn't a boolean, but of type [java.util.Arrays$ArrayList]")); } public void testReadStringOrIntProperty() { @@ -98,11 +99,11 @@ public void testReadStringOrIntProperty() { } public void testReadStringOrIntPropertyInvalidType() { - try { - ConfigurationUtils.readStringOrIntProperty(null, null, config, "arr", null); - } catch (ElasticsearchParseException e) { - assertThat(e.getMessage(), equalTo("[arr] property isn't a string or int, but of type [java.util.Arrays$ArrayList]")); - } + ElasticsearchParseException caught = assertThrows( + ElasticsearchParseException.class, + () -> ConfigurationUtils.readStringOrIntProperty(null, null, config, "arr", null) + ); + assertThat(caught.getMessage(), equalTo("[arr] property isn't a string or int, but of type [java.util.Arrays$ArrayList]")); } public void testReadMediaProperty() { From b5addca40a0b11d3294a6154e56dba5b2e588303 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Tue, 19 Nov 2024 10:37:47 +0100 Subject: [PATCH 034/386] ESQL: Docs: COUNT: add an explanation to the use of the 3VL (#116684) Add an explanation of why `... OR NULL` is needed with `COUNT(...)`. Fixes: #99954 --- docs/reference/esql/functions/examples/count.asciidoc | 2 +- .../xpack/esql/expression/function/aggregate/Count.java | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/reference/esql/functions/examples/count.asciidoc b/docs/reference/esql/functions/examples/count.asciidoc index fb696b51e054c..33ed054d3d1e2 100644 --- a/docs/reference/esql/functions/examples/count.asciidoc +++ b/docs/reference/esql/functions/examples/count.asciidoc @@ -37,7 +37,7 @@ include::{esql-specs}/stats.csv-spec[tag=count-where] |=== include::{esql-specs}/stats.csv-spec[tag=count-where-result] |=== -To count the same stream of data based on two different expressions use the pattern `COUNT( OR NULL)` +To count the same stream of data based on two different expressions use the pattern `COUNT( OR NULL)`. This builds on the three-valued logic ({wikipedia}/Three-valued_logic[3VL]) of the language: `TRUE OR NULL` is `TRUE`, but `FALSE OR NULL` is `NULL`, plus the way COUNT handles `NULL`s: `COUNT(TRUE)` and `COUNT(FALSE)` are both 1, but `COUNT(NULL)` is 0. [source.merge.styled,esql] ---- include::{esql-specs}/stats.csv-spec[tag=count-or-null] diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java index fa8a9e7d8c837..3a0d616d407a3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java @@ -58,7 +58,9 @@ public class Count extends AggregateFunction implements ToAggregator, SurrogateE ), @Example( description = "To count the same stream of data based on two different expressions " - + "use the pattern `COUNT( OR NULL)`", + + "use the pattern `COUNT( OR NULL)`. This builds on the three-valued logic " + + "({wikipedia}/Three-valued_logic[3VL]) of the language: `TRUE OR NULL` is `TRUE`, but `FALSE OR NULL` is `NULL`, " + + "plus the way COUNT handles `NULL`s: `COUNT(TRUE)` and `COUNT(FALSE)` are both 1, but `COUNT(NULL)` is 0.", file = "stats", tag = "count-or-null" ) } From 7ba63f26f0c54845e343bbb6137347ab52cc064b Mon Sep 17 00:00:00 2001 From: Dai Sugimori Date: Tue, 19 Nov 2024 19:52:39 +0900 Subject: [PATCH 035/386] [ML] Support any tokenizers for text_expansion task (#116935) * Support any tokenizers for text_expansion task * Remove unused imports --------- Co-authored-by: Elastic Machine --- .../inference/trainedmodel/TextExpansionConfig.java | 6 ------ .../trainedmodel/TextExpansionConfigTests.java | 11 ----------- 2 files changed, 17 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java index f4ac89124cddb..68e0f7e1ac885 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java @@ -76,12 +76,6 @@ public TextExpansionConfig( this.vocabularyConfig = Optional.ofNullable(vocabularyConfig) .orElse(new VocabularyConfig(InferenceIndexConstants.nativeDefinitionStore())); this.tokenization = tokenization == null ? Tokenization.createDefault() : tokenization; - if (this.tokenization instanceof BertTokenization == false) { - throw ExceptionsHelper.badRequestException( - "text expansion models must be configured with BERT tokenizer, [{}] given", - this.tokenization.getName() - ); - } this.resultsField = resultsField; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigTests.java index cf4630899ab53..a91cceec8a167 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigTests.java @@ -7,10 +7,8 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.inference.InferenceConfigItemTestCase; @@ -67,13 +65,4 @@ protected TextExpansionConfig doParseInstance(XContentParser parser) throws IOEx protected TextExpansionConfig mutateInstanceForVersion(TextExpansionConfig instance, TransportVersion version) { return instance; } - - public void testBertTokenizationOnly() { - ElasticsearchStatusException e = expectThrows( - ElasticsearchStatusException.class, - () -> new TextExpansionConfig(null, RobertaTokenizationTests.createRandom(), null) - ); - assertEquals(RestStatus.BAD_REQUEST, e.status()); - assertEquals("text expansion models must be configured with BERT tokenizer, [roberta] given", e.getMessage()); - } } From 9296fb40ff2f30a6f53428ae2cfe09446dffe38f Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 19 Nov 2024 12:26:37 +0100 Subject: [PATCH 036/386] Use LongArray instead of long[] for owning ordinals when building Internal aggregations (#116874) This commit changes the signature of InternalAggregation#buildAggregations(long[]) to InternalAggregation#buildAggregations(LongArray) to avoid allocations of humongous arrays. --- .../adjacency/AdjacencyMatrixAggregator.java | 84 ++++---- .../AutoDateHistogramAggregator.java | 6 +- .../timeseries/TimeSeriesAggregator.java | 68 ++++--- .../ChildrenToParentAggregator.java | 3 +- .../aggregations/ParentJoinAggregator.java | 10 +- .../ParentToChildrenAggregator.java | 3 +- .../action/search/TransportSearchIT.java | 3 +- .../aggregations/AdaptingAggregator.java | 7 +- .../search/aggregations/Aggregator.java | 8 +- .../aggregations/NonCollectingAggregator.java | 7 +- .../bucket/BestBucketsDeferringCollector.java | 26 +-- .../bucket/BucketsAggregator.java | 191 ++++++++++-------- .../bucket/DeferableBucketAggregator.java | 7 +- .../bucket/DeferringBucketCollector.java | 8 +- .../bucket/composite/CompositeAggregator.java | 75 +++---- .../countedterms/CountedTermsAggregator.java | 126 ++++++------ .../bucket/filter/FiltersAggregator.java | 3 +- .../bucket/geogrid/GeoGridAggregator.java | 59 +++--- .../bucket/global/GlobalAggregator.java | 5 +- .../AbstractHistogramAggregator.java | 3 +- .../histogram/DateHistogramAggregator.java | 3 +- .../DateRangeHistogramAggregator.java | 3 +- .../VariableWidthHistogramAggregator.java | 42 ++-- .../bucket/missing/MissingAggregator.java | 3 +- .../bucket/nested/NestedAggregator.java | 3 +- .../nested/ReverseNestedAggregator.java | 3 +- .../bucket/prefix/IpPrefixAggregator.java | 98 ++++----- .../bucket/range/BinaryRangeAggregator.java | 3 +- .../bucket/range/RangeAggregator.java | 3 +- .../sampler/BestDocsDeferringCollector.java | 3 +- .../bucket/sampler/SamplerAggregator.java | 3 +- .../random/RandomSamplerAggregator.java | 3 +- .../GlobalOrdinalsStringTermsAggregator.java | 110 +++++----- .../terms/InternalSignificantTerms.java | 2 +- .../bucket/terms/LongRareTermsAggregator.java | 115 ++++++----- .../terms/MapStringTermsAggregator.java | 89 ++++---- .../bucket/terms/NumericTermsAggregator.java | 104 +++++----- .../terms/StringRareTermsAggregator.java | 124 +++++++----- .../metrics/MetricsAggregator.java | 9 +- .../aggregation/ProfilingAggregator.java | 3 +- .../aggregations/AdaptingAggregatorTests.java | 3 +- .../aggregations/AggregatorBaseTests.java | 3 +- .../BestBucketsDeferringCollectorTests.java | 20 +- .../bucket/BucketsAggregatorTests.java | 3 +- .../BestDocsDeferringCollectorTests.java | 3 +- .../multiterms/MultiTermsAggregator.java | 97 ++++----- .../CategorizeTextAggregator.java | 48 ++--- .../mr/DelegatingCircuitBreakerService.java | 6 +- .../mr/ItemSetMapReduceAggregator.java | 7 +- 49 files changed, 877 insertions(+), 741 deletions(-) diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java index 2b4fea0327e86..29e8aec00a02d 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -177,65 +178,66 @@ public void collect(int doc, long bucket) throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { // Buckets are ordered into groups - [keyed filters] [key1&key2 intersects] - int maxOrd = owningBucketOrds.length * totalNumKeys; - int totalBucketsToBuild = 0; - for (int ord = 0; ord < maxOrd; ord++) { + long maxOrd = owningBucketOrds.size() * totalNumKeys; + long totalBucketsToBuild = 0; + for (long ord = 0; ord < maxOrd; ord++) { if (bucketDocCount(ord) > 0) { totalBucketsToBuild++; } } - long[] bucketOrdsToBuild = new long[totalBucketsToBuild]; - int builtBucketIndex = 0; - for (int ord = 0; ord < maxOrd; ord++) { - if (bucketDocCount(ord) > 0) { - bucketOrdsToBuild[builtBucketIndex++] = ord; - } - } - assert builtBucketIndex == totalBucketsToBuild; - builtBucketIndex = 0; - var bucketSubAggs = buildSubAggsForBuckets(bucketOrdsToBuild); - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int owningBucketOrdIdx = 0; owningBucketOrdIdx < owningBucketOrds.length; owningBucketOrdIdx++) { - List buckets = new ArrayList<>(filters.length); - for (int i = 0; i < keys.length; i++) { - long bucketOrd = bucketOrd(owningBucketOrds[owningBucketOrdIdx], i); - long docCount = bucketDocCount(bucketOrd); - // Empty buckets are not returned because this aggregation will commonly be used under a - // a date-histogram where we will look for transactions over time and can expect many - // empty buckets. - if (docCount > 0) { - InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket( - keys[i], - docCount, - bucketSubAggs.apply(builtBucketIndex++) - ); - buckets.add(bucket); + try (LongArray bucketOrdsToBuild = bigArrays().newLongArray(totalBucketsToBuild)) { + int builtBucketIndex = 0; + for (int ord = 0; ord < maxOrd; ord++) { + if (bucketDocCount(ord) > 0) { + bucketOrdsToBuild.set(builtBucketIndex++, ord); } } - int pos = keys.length; - for (int i = 0; i < keys.length; i++) { - for (int j = i + 1; j < keys.length; j++) { - long bucketOrd = bucketOrd(owningBucketOrds[owningBucketOrdIdx], pos); + assert builtBucketIndex == totalBucketsToBuild; + builtBucketIndex = 0; + var bucketSubAggs = buildSubAggsForBuckets(bucketOrdsToBuild); + InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; + for (int owningBucketOrdIdx = 0; owningBucketOrdIdx < results.length; owningBucketOrdIdx++) { + List buckets = new ArrayList<>(filters.length); + for (int i = 0; i < keys.length; i++) { + long bucketOrd = bucketOrd(owningBucketOrds.get(owningBucketOrdIdx), i); long docCount = bucketDocCount(bucketOrd); - // Empty buckets are not returned due to potential for very sparse matrices + // Empty buckets are not returned because this aggregation will commonly be used under a + // a date-histogram where we will look for transactions over time and can expect many + // empty buckets. if (docCount > 0) { - String intersectKey = keys[i] + separator + keys[j]; InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket( - intersectKey, + keys[i], docCount, bucketSubAggs.apply(builtBucketIndex++) ); buckets.add(bucket); } - pos++; } + int pos = keys.length; + for (int i = 0; i < keys.length; i++) { + for (int j = i + 1; j < keys.length; j++) { + long bucketOrd = bucketOrd(owningBucketOrds.get(owningBucketOrdIdx), pos); + long docCount = bucketDocCount(bucketOrd); + // Empty buckets are not returned due to potential for very sparse matrices + if (docCount > 0) { + String intersectKey = keys[i] + separator + keys[j]; + InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket( + intersectKey, + docCount, + bucketSubAggs.apply(builtBucketIndex++) + ); + buckets.add(bucket); + } + pos++; + } + } + results[owningBucketOrdIdx] = new InternalAdjacencyMatrix(name, buckets, metadata()); } - results[owningBucketOrdIdx] = new InternalAdjacencyMatrix(name, buckets, metadata()); + assert builtBucketIndex == totalBucketsToBuild; + return results; } - assert builtBucketIndex == totalBucketsToBuild; - return results; } @Override diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregator.java index d4e1c2928c441..6add1b0ac4a13 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregator.java @@ -141,7 +141,7 @@ public final LeafBucketCollector getLeafCollector(AggregationExecutionContext ag protected final InternalAggregation[] buildAggregations( LongKeyedBucketOrds bucketOrds, LongToIntFunction roundingIndexFor, - long[] owningBucketOrds + LongArray owningBucketOrds ) throws IOException { return buildAggregationsForVariableBuckets( owningBucketOrds, @@ -324,7 +324,7 @@ private void increaseRoundingIfNeeded(long rounded) { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregations(bucketOrds, l -> roundingIdx, owningBucketOrds); } @@ -594,7 +594,7 @@ private void rebucket() { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { /* * Rebucket before building the aggregation to build as small as result * as possible. diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java index c74637330dd7a..1263d4282a18a 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java @@ -11,6 +11,8 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.mapper.RoutingPathFields; @@ -30,6 +32,7 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Map; @@ -67,42 +70,43 @@ public TimeSeriesAggregator( } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { BytesRef spare = new BytesRef(); - InternalTimeSeries.InternalBucket[][] allBucketsPerOrd = new InternalTimeSeries.InternalBucket[owningBucketOrds.length][]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - List buckets = new ArrayList<>(); - while (ordsEnum.next()) { - long docCount = bucketDocCount(ordsEnum.ord()); - ordsEnum.readValue(spare); - InternalTimeSeries.InternalBucket bucket = new InternalTimeSeries.InternalBucket( - BytesRef.deepCopyOf(spare), // Closing bucketOrds will corrupt the bytes ref, so need to make a deep copy here. - docCount, - null, - keyed - ); - bucket.bucketOrd = ordsEnum.ord(); - buckets.add(bucket); - if (buckets.size() >= size) { - break; + try (ObjectArray allBucketsPerOrd = bigArrays().newObjectArray(owningBucketOrds.size())) { + for (long ordIdx = 0; ordIdx < allBucketsPerOrd.size(); ordIdx++) { + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx)); + List buckets = new ArrayList<>(); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + ordsEnum.readValue(spare); + InternalTimeSeries.InternalBucket bucket = new InternalTimeSeries.InternalBucket( + BytesRef.deepCopyOf(spare), // Closing bucketOrds will corrupt the bytes ref, so need to make a deep copy here. + docCount, + null, + keyed + ); + bucket.bucketOrd = ordsEnum.ord(); + buckets.add(bucket); + if (buckets.size() >= size) { + break; + } } + // NOTE: after introducing _tsid hashing time series are sorted by (_tsid hash, @timestamp) instead of (_tsid, timestamp). + // _tsid hash and _tsid might sort differently, and out of order data might result in incorrect buckets due to _tsid value + // changes not matching _tsid hash changes. Changes in _tsid hash are handled creating a new bucket as a result of making + // the assumption that sorting data results in new buckets whenever there is a change in _tsid hash. This is no true anymore + // because we collect data sorted on (_tsid hash, timestamp) but build aggregation results sorted by (_tsid, timestamp). + buckets.sort(Comparator.comparing(bucket -> bucket.key)); + allBucketsPerOrd.set(ordIdx, buckets.toArray(new InternalTimeSeries.InternalBucket[0])); } - // NOTE: after introducing _tsid hashing time series are sorted by (_tsid hash, @timestamp) instead of (_tsid, timestamp). - // _tsid hash and _tsid might sort differently, and out of order data might result in incorrect buckets due to _tsid value - // changes not matching _tsid hash changes. Changes in _tsid hash are handled creating a new bucket as a result of making - // the assumption that sorting data results in new buckets whenever there is a change in _tsid hash. This is no true anymore - // because we collect data sorted on (_tsid hash, timestamp) but build aggregation results sorted by (_tsid, timestamp). - buckets.sort(Comparator.comparing(bucket -> bucket.key)); - allBucketsPerOrd[ordIdx] = buckets.toArray(new InternalTimeSeries.InternalBucket[0]); - } - buildSubAggsForAllBuckets(allBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); + buildSubAggsForAllBuckets(allBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - result[ordIdx] = buildResult(allBucketsPerOrd[ordIdx]); + InternalAggregation[] result = new InternalAggregation[Math.toIntExact(allBucketsPerOrd.size())]; + for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { + result[ordIdx] = buildResult(allBucketsPerOrd.get(ordIdx)); + } + return result; } - return result; } @Override @@ -185,7 +189,7 @@ public void collect(int doc, long bucket) throws IOException { } InternalTimeSeries buildResult(InternalTimeSeries.InternalBucket[] topBuckets) { - return new InternalTimeSeries(name, List.of(topBuckets), keyed, metadata()); + return new InternalTimeSeries(name, Arrays.asList(topBuckets), keyed, metadata()); } @FunctionalInterface diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregator.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregator.java index 6985f6da98cf1..12489ad37aabd 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregator.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregator.java @@ -9,6 +9,7 @@ package org.elasticsearch.join.aggregations; import org.apache.lucene.search.Query; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.CardinalityUpperBound; @@ -44,7 +45,7 @@ public ChildrenToParentAggregator( } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalParent( diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java index 60412179807a5..1b99d2b34046c 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.aggregations.AggregationExecutionContext; @@ -115,7 +116,7 @@ public void postCollection() throws IOException { } @Override - protected void prepareSubAggs(long[] ordsToCollect) throws IOException { + protected void prepareSubAggs(LongArray ordsToCollect) throws IOException { IndexReader indexReader = searcher().getIndexReader(); for (LeafReaderContext ctx : indexReader.leaves()) { Scorer childDocsScorer = outFilter.scorer(ctx); @@ -153,9 +154,10 @@ public float score() { * structure that maps a primitive long to a list of primitive * longs. */ - for (long owningBucketOrd : ordsToCollect) { - if (collectionStrategy.exists(owningBucketOrd, globalOrdinal)) { - collectBucket(sub, docId, owningBucketOrd); + for (long ord = 0; ord < ordsToCollect.size(); ord++) { + long ordToCollect = ordsToCollect.get(ord); + if (collectionStrategy.exists(ordToCollect, globalOrdinal)) { + collectBucket(sub, docId, ordToCollect); } } } diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java index d8a061a2de6d9..939107f87715d 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java @@ -9,6 +9,7 @@ package org.elasticsearch.join.aggregations; import org.apache.lucene.search.Query; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.CardinalityUpperBound; @@ -40,7 +41,7 @@ public ParentToChildrenAggregator( } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalChildren( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java index a1395f81eb091..67576059de1e0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexSettings; @@ -669,7 +670,7 @@ public Aggregator subAggregator(String aggregatorName) { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) { return new InternalAggregation[] { buildEmptyAggregation() }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AdaptingAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/AdaptingAggregator.java index b4d5512331b42..d08a76e51c6bd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AdaptingAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AdaptingAggregator.java @@ -10,6 +10,7 @@ package org.elasticsearch.search.aggregations; import org.apache.lucene.search.ScoreMode; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.search.profile.aggregation.InternalAggregationProfileTree; @@ -98,10 +99,10 @@ public final void postCollection() throws IOException { } @Override - public final InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public final InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { InternalAggregation[] delegateResults = delegate.buildAggregations(owningBucketOrds); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { + InternalAggregation[] result = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; + for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { result[ordIdx] = adapt(delegateResults[ordIdx]); } return result; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java index 0d36469dddfdc..aa8d9fba554c1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java @@ -13,6 +13,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasable; import org.elasticsearch.search.aggregations.support.AggregationPath; import org.elasticsearch.search.sort.SortOrder; @@ -142,7 +144,7 @@ public interface BucketComparator { * @return the results for each ordinal, in the same order as the array * of ordinals */ - public abstract InternalAggregation[] buildAggregations(long[] ordsToCollect) throws IOException; + public abstract InternalAggregation[] buildAggregations(LongArray ordsToCollect) throws IOException; /** * Release this aggregation and its sub-aggregations. @@ -153,11 +155,11 @@ public interface BucketComparator { * Build the result of this aggregation if it is at the "top level" * of the aggregation tree. If, instead, it is a sub-aggregation of * another aggregation then the aggregation that contains it will call - * {@link #buildAggregations(long[])}. + * {@link #buildAggregations(LongArray)}. */ public final InternalAggregation buildTopLevel() throws IOException { assert parent() == null; - return buildAggregations(new long[] { 0 })[0]; + return buildAggregations(BigArrays.NON_RECYCLING_INSTANCE.newLongArray(1, true))[0]; } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/NonCollectingAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/NonCollectingAggregator.java index 8accc6b15d820..4da2d10cfc0c2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/NonCollectingAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/NonCollectingAggregator.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.aggregations; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.support.AggregationContext; import java.io.IOException; @@ -39,9 +40,9 @@ public final LeafBucketCollector getLeafCollector(AggregationExecutionContext ag } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; + for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { results[ordIdx] = buildEmptyAggregation(); } return results; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java index 231130c920349..44d76d31be0e7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java @@ -20,6 +20,7 @@ import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedLongValues; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.LongHash; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -146,7 +147,7 @@ public void postCollection() throws IOException { * Replay the wrapped collector, but only on a selection of buckets. */ @Override - public void prepareSelectedBuckets(long... selectedBuckets) throws IOException { + public void prepareSelectedBuckets(LongArray selectedBuckets) throws IOException { if (finished == false) { throw new IllegalStateException("Cannot replay yet, collection is not finished: postCollect() has not been called"); } @@ -154,9 +155,9 @@ public void prepareSelectedBuckets(long... selectedBuckets) throws IOException { throw new IllegalStateException("Already been replayed"); } - this.selectedBuckets = new LongHash(selectedBuckets.length, BigArrays.NON_RECYCLING_INSTANCE); - for (long ord : selectedBuckets) { - this.selectedBuckets.add(ord); + this.selectedBuckets = new LongHash(selectedBuckets.size(), BigArrays.NON_RECYCLING_INSTANCE); + for (long i = 0; i < selectedBuckets.size(); i++) { + this.selectedBuckets.add(selectedBuckets.get(i)); } boolean needsScores = scoreMode().needsScores(); @@ -232,21 +233,22 @@ private static void failInCaseOfBadScorer(String message) { * been collected directly. */ @Override - public Aggregator wrap(final Aggregator in) { + public Aggregator wrap(final Aggregator in, BigArrays bigArrays) { return new WrappedAggregator(in) { @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { if (selectedBuckets == null) { throw new IllegalStateException("Collection has not been replayed yet."); } - long[] rebasedOrds = new long[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - rebasedOrds[ordIdx] = selectedBuckets.find(owningBucketOrds[ordIdx]); - if (rebasedOrds[ordIdx] == -1) { - throw new IllegalStateException("Cannot build for a bucket which has not been collected"); + try (LongArray rebasedOrds = bigArrays.newLongArray(owningBucketOrds.size())) { + for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { + rebasedOrds.set(ordIdx, selectedBuckets.find(owningBucketOrds.get(ordIdx))); + if (rebasedOrds.get(ordIdx) == -1) { + throw new IllegalStateException("Cannot build for a bucket which has not been collected"); + } } + return in.buildAggregations(rebasedOrds); } - return in.buildAggregations(rebasedOrds); } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java index e6c26c4278807..252eb0877d024 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java @@ -10,7 +10,9 @@ import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.IntArray; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.core.Releasable; import org.elasticsearch.search.aggregations.AggregationErrors; import org.elasticsearch.search.aggregations.Aggregator; @@ -155,22 +157,22 @@ public final long bucketDocCount(long bucketOrd) { /** * Hook to allow taking an action before building the sub agg results. */ - protected void prepareSubAggs(long[] ordsToCollect) throws IOException {} + protected void prepareSubAggs(LongArray ordsToCollect) throws IOException {} /** * Build the results of the sub-aggregations of the buckets at each of * the provided ordinals. *

* Most aggregations should probably use something like - * {@link #buildSubAggsForAllBuckets(Object[][], ToLongFunction, BiConsumer)} - * or {@link #buildAggregationsForVariableBuckets(long[], LongKeyedBucketOrds, BucketBuilderForVariable, ResultBuilderForVariable)} - * or {@link #buildAggregationsForFixedBucketCount(long[], int, BucketBuilderForFixedCount, Function)} - * or {@link #buildAggregationsForSingleBucket(long[], SingleBucketResultBuilder)} + * {@link #buildSubAggsForAllBuckets(ObjectArray, ToLongFunction, BiConsumer)} + * or {@link #buildAggregationsForVariableBuckets(LongArray, LongKeyedBucketOrds, BucketBuilderForVariable, ResultBuilderForVariable)} + * or {@link #buildAggregationsForFixedBucketCount(LongArray, int, BucketBuilderForFixedCount, Function)} + * or {@link #buildAggregationsForSingleBucket(LongArray, SingleBucketResultBuilder)} * instead of calling this directly. * @return the sub-aggregation results in the same order as the provided * array of ordinals */ - protected final IntFunction buildSubAggsForBuckets(long[] bucketOrdsToCollect) throws IOException { + protected final IntFunction buildSubAggsForBuckets(LongArray bucketOrdsToCollect) throws IOException { prepareSubAggs(bucketOrdsToCollect); InternalAggregation[][] aggregations = new InternalAggregation[subAggregators.length][]; for (int i = 0; i < subAggregators.length; i++) { @@ -204,26 +206,28 @@ public int size() { * @param setAggs how to set the sub-aggregation results on a bucket */ protected final void buildSubAggsForAllBuckets( - B[][] buckets, + ObjectArray buckets, ToLongFunction bucketToOrd, BiConsumer setAggs ) throws IOException { - int totalBucketOrdsToCollect = 0; - for (B[] bucketsForOneResult : buckets) { - totalBucketOrdsToCollect += bucketsForOneResult.length; + long totalBucketOrdsToCollect = 0; + for (long b = 0; b < buckets.size(); b++) { + totalBucketOrdsToCollect += buckets.get(b).length; } - long[] bucketOrdsToCollect = new long[totalBucketOrdsToCollect]; - int s = 0; - for (B[] bucketsForOneResult : buckets) { - for (B bucket : bucketsForOneResult) { - bucketOrdsToCollect[s++] = bucketToOrd.applyAsLong(bucket); + + try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(totalBucketOrdsToCollect)) { + int s = 0; + for (long ord = 0; ord < buckets.size(); ord++) { + for (B bucket : buckets.get(ord)) { + bucketOrdsToCollect.set(s++, bucketToOrd.applyAsLong(bucket)); + } } - } - var results = buildSubAggsForBuckets(bucketOrdsToCollect); - s = 0; - for (B[] bucket : buckets) { - for (int b = 0; b < bucket.length; b++) { - setAggs.accept(bucket[b], results.apply(s++)); + var results = buildSubAggsForBuckets(bucketOrdsToCollect); + s = 0; + for (long ord = 0; ord < buckets.size(); ord++) { + for (B value : buckets.get(ord)) { + setAggs.accept(value, results.apply(s++)); + } } } } @@ -237,37 +241,38 @@ protected final void buildSubAggsForAllBuckets( * @param resultBuilder how to build a result from buckets */ protected final InternalAggregation[] buildAggregationsForFixedBucketCount( - long[] owningBucketOrds, + LongArray owningBucketOrds, int bucketsPerOwningBucketOrd, BucketBuilderForFixedCount bucketBuilder, Function, InternalAggregation> resultBuilder ) throws IOException { - int totalBuckets = owningBucketOrds.length * bucketsPerOwningBucketOrd; - long[] bucketOrdsToCollect = new long[totalBuckets]; - int bucketOrdIdx = 0; - for (long owningBucketOrd : owningBucketOrds) { - long ord = owningBucketOrd * bucketsPerOwningBucketOrd; - for (int offsetInOwningOrd = 0; offsetInOwningOrd < bucketsPerOwningBucketOrd; offsetInOwningOrd++) { - bucketOrdsToCollect[bucketOrdIdx++] = ord++; + try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(owningBucketOrds.size() * bucketsPerOwningBucketOrd)) { + int bucketOrdIdx = 0; + for (long i = 0; i < owningBucketOrds.size(); i++) { + long ord = owningBucketOrds.get(i) * bucketsPerOwningBucketOrd; + for (int offsetInOwningOrd = 0; offsetInOwningOrd < bucketsPerOwningBucketOrd; offsetInOwningOrd++) { + bucketOrdsToCollect.set(bucketOrdIdx++, ord++); + } } - } - bucketOrdIdx = 0; - var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int owningOrdIdx = 0; owningOrdIdx < owningBucketOrds.length; owningOrdIdx++) { - List buckets = new ArrayList<>(bucketsPerOwningBucketOrd); - for (int offsetInOwningOrd = 0; offsetInOwningOrd < bucketsPerOwningBucketOrd; offsetInOwningOrd++) { - buckets.add( - bucketBuilder.build( - offsetInOwningOrd, - bucketDocCount(bucketOrdsToCollect[bucketOrdIdx]), - subAggregationResults.apply(bucketOrdIdx++) - ) - ); + bucketOrdIdx = 0; + var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); + + InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; + for (int owningOrdIdx = 0; owningOrdIdx < results.length; owningOrdIdx++) { + List buckets = new ArrayList<>(bucketsPerOwningBucketOrd); + for (int offsetInOwningOrd = 0; offsetInOwningOrd < bucketsPerOwningBucketOrd; offsetInOwningOrd++) { + buckets.add( + bucketBuilder.build( + offsetInOwningOrd, + bucketDocCount(bucketOrdsToCollect.get(bucketOrdIdx)), + subAggregationResults.apply(bucketOrdIdx++) + ) + ); + } + results[owningOrdIdx] = resultBuilder.apply(buckets); } - results[owningOrdIdx] = resultBuilder.apply(buckets); + return results; } - return results; } @FunctionalInterface @@ -280,17 +285,19 @@ protected interface BucketBuilderForFixedCount { * @param owningBucketOrds owning bucket ordinals for which to build the results * @param resultBuilder how to build a result from the sub aggregation results */ - protected final InternalAggregation[] buildAggregationsForSingleBucket(long[] owningBucketOrds, SingleBucketResultBuilder resultBuilder) - throws IOException { + protected final InternalAggregation[] buildAggregationsForSingleBucket( + LongArray owningBucketOrds, + SingleBucketResultBuilder resultBuilder + ) throws IOException { /* * It'd be entirely reasonable to call * `consumeBucketsAndMaybeBreak(owningBucketOrds.length)` * here but we don't because single bucket aggs never have. */ var subAggregationResults = buildSubAggsForBuckets(owningBucketOrds); - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - results[ordIdx] = resultBuilder.build(owningBucketOrds[ordIdx], subAggregationResults.apply(ordIdx)); + InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; + for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { + results[ordIdx] = resultBuilder.build(owningBucketOrds.get(ordIdx), subAggregationResults.apply(ordIdx)); } return results; } @@ -307,54 +314,60 @@ protected interface SingleBucketResultBuilder { * @param bucketOrds hash of values to the bucket ordinal */ protected final InternalAggregation[] buildAggregationsForVariableBuckets( - long[] owningBucketOrds, + LongArray owningBucketOrds, LongKeyedBucketOrds bucketOrds, BucketBuilderForVariable bucketBuilder, ResultBuilderForVariable resultBuilder ) throws IOException { long totalOrdsToCollect = 0; - final int[] bucketsInOrd = new int[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - final long bucketCount = bucketOrds.bucketsInOrd(owningBucketOrds[ordIdx]); - bucketsInOrd[ordIdx] = (int) bucketCount; - totalOrdsToCollect += bucketCount; - } - if (totalOrdsToCollect > Integer.MAX_VALUE) { - // TODO: We should instrument this error. While it is correct for it to be a 400 class IllegalArgumentException, there is not - // much the user can do about that. If this occurs with any frequency, we should do something about it. - throw new IllegalArgumentException( - "Can't collect more than [" + Integer.MAX_VALUE + "] buckets but attempted [" + totalOrdsToCollect + "]" - ); - } - long[] bucketOrdsToCollect = new long[(int) totalOrdsToCollect]; - int b = 0; - for (long owningBucketOrd : owningBucketOrds) { - LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); - while (ordsEnum.next()) { - bucketOrdsToCollect[b++] = ordsEnum.ord(); + try (IntArray bucketsInOrd = bigArrays().newIntArray(owningBucketOrds.size())) { + for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { + final long bucketCount = bucketOrds.bucketsInOrd(owningBucketOrds.get(ordIdx)); + bucketsInOrd.set(ordIdx, (int) bucketCount); + totalOrdsToCollect += bucketCount; } - } - var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); - - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - b = 0; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - List buckets = new ArrayList<>(bucketsInOrd[ordIdx]); - LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - while (ordsEnum.next()) { - if (bucketOrdsToCollect[b] != ordsEnum.ord()) { - // If we hit this, something has gone horribly wrong and we need to investigate - throw AggregationErrors.iterationOrderChangedWithoutMutating( - bucketOrds.toString(), - ordsEnum.ord(), - bucketOrdsToCollect[b] - ); + if (totalOrdsToCollect > Integer.MAX_VALUE) { + // TODO: We should instrument this error. While it is correct for it to be a 400 class IllegalArgumentException, there is + // not + // much the user can do about that. If this occurs with any frequency, we should do something about it. + throw new IllegalArgumentException( + "Can't collect more than [" + Integer.MAX_VALUE + "] buckets but attempted [" + totalOrdsToCollect + "]" + ); + } + try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(totalOrdsToCollect)) { + int b = 0; + for (long i = 0; i < owningBucketOrds.size(); i++) { + LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(i)); + while (ordsEnum.next()) { + bucketOrdsToCollect.set(b++, ordsEnum.ord()); + } } - buckets.add(bucketBuilder.build(ordsEnum.value(), bucketDocCount(ordsEnum.ord()), subAggregationResults.apply(b++))); + var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); + + InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; + b = 0; + for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { + final long owningBucketOrd = owningBucketOrds.get(ordIdx); + List buckets = new ArrayList<>(bucketsInOrd.get(ordIdx)); + LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); + while (ordsEnum.next()) { + if (bucketOrdsToCollect.get(b) != ordsEnum.ord()) { + // If we hit this, something has gone horribly wrong and we need to investigate + throw AggregationErrors.iterationOrderChangedWithoutMutating( + bucketOrds.toString(), + ordsEnum.ord(), + bucketOrdsToCollect.get(b) + ); + } + buckets.add( + bucketBuilder.build(ordsEnum.value(), bucketDocCount(ordsEnum.ord()), subAggregationResults.apply(b++)) + ); + } + results[ordIdx] = resultBuilder.build(owningBucketOrd, buckets); + } + return results; } - results[ordIdx] = resultBuilder.build(owningBucketOrds[ordIdx], buckets); } - return results; } @FunctionalInterface diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferableBucketAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferableBucketAggregator.java index 84a15b6d1c0eb..64744b705e222 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferableBucketAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferableBucketAggregator.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.aggregations.bucket; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.BucketCollector; @@ -65,7 +66,7 @@ protected void doPreCollection() throws IOException { } deferredAggregations.add(subAggregators[i]); deferredAggregationNames.add(subAggregators[i].name()); - subAggregators[i] = deferringCollector.wrap(subAggregators[i]); + subAggregators[i] = deferringCollector.wrap(subAggregators[i], bigArrays()); } else { collectors.add(subAggregators[i]); } @@ -87,7 +88,7 @@ protected DeferringBucketCollector deferringCollector() { /** * Build the {@link DeferringBucketCollector}. The default implementation * replays all hits against the buckets selected by - * {#link {@link DeferringBucketCollector#prepareSelectedBuckets(long...)}. + * {#link {@link DeferringBucketCollector#prepareSelectedBuckets(LongArray)}. */ protected DeferringBucketCollector buildDeferringCollector() { return new BestBucketsDeferringCollector(topLevelQuery(), searcher(), descendsFromGlobalAggregator(parent())); @@ -107,7 +108,7 @@ protected boolean shouldDefer(Aggregator aggregator) { } @Override - protected final void prepareSubAggs(long[] bucketOrdsToCollect) throws IOException { + protected final void prepareSubAggs(LongArray bucketOrdsToCollect) throws IOException { if (deferringCollector != null) { deferringCollector.prepareSelectedBuckets(bucketOrdsToCollect); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java index 44cff2651e273..468fec29a9420 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java @@ -10,6 +10,8 @@ package org.elasticsearch.search.aggregations.bucket; import org.apache.lucene.search.ScoreMode; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.BucketCollector; @@ -37,13 +39,13 @@ public DeferringBucketCollector() {} /** * Replay the deferred hits on the selected buckets. */ - public abstract void prepareSelectedBuckets(long... selectedBuckets) throws IOException; + public abstract void prepareSelectedBuckets(LongArray selectedBuckets) throws IOException; /** * Wrap the provided aggregator so that it behaves (almost) as if it had * been collected directly. */ - public Aggregator wrap(final Aggregator in) { + public Aggregator wrap(final Aggregator in, BigArrays bigArrays) { return new WrappedAggregator(in); } @@ -80,7 +82,7 @@ public Aggregator subAggregator(String name) { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return in.buildAggregations(owningBucketOrds); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index 9ee15306ce636..0baecf6e3f92b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -35,6 +35,7 @@ import org.apache.lucene.util.RoaringDocIdSet; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Rounding; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.Strings; import org.elasticsearch.index.IndexSortConfig; @@ -184,50 +185,51 @@ protected void doPostCollection() throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { // Composite aggregator must be at the top of the aggregation tree - assert owningBucketOrds.length == 1 && owningBucketOrds[0] == 0L; + assert owningBucketOrds.size() == 1 && owningBucketOrds.get(0) == 0L; if (deferredCollectors != NO_OP_BUCKET_COLLECTOR) { // Replay all documents that contain at least one top bucket (collected during the first pass). runDeferredCollections(); } - int num = Math.min(size, (int) queue.size()); + final int num = Math.min(size, (int) queue.size()); final InternalComposite.InternalBucket[] buckets = new InternalComposite.InternalBucket[num]; - long[] bucketOrdsToCollect = new long[(int) queue.size()]; - for (int i = 0; i < queue.size(); i++) { - bucketOrdsToCollect[i] = i; - } - var subAggsForBuckets = buildSubAggsForBuckets(bucketOrdsToCollect); - while (queue.size() > 0) { - int slot = queue.pop(); - CompositeKey key = queue.toCompositeKey(slot); - InternalAggregations aggs = subAggsForBuckets.apply(slot); - long docCount = queue.getDocCount(slot); - buckets[(int) queue.size()] = new InternalComposite.InternalBucket( - sourceNames, - formats, - key, - reverseMuls, - missingOrders, - docCount, - aggs - ); + try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(queue.size())) { + for (int i = 0; i < queue.size(); i++) { + bucketOrdsToCollect.set(i, i); + } + var subAggsForBuckets = buildSubAggsForBuckets(bucketOrdsToCollect); + while (queue.size() > 0) { + int slot = queue.pop(); + CompositeKey key = queue.toCompositeKey(slot); + InternalAggregations aggs = subAggsForBuckets.apply(slot); + long docCount = queue.getDocCount(slot); + buckets[(int) queue.size()] = new InternalComposite.InternalBucket( + sourceNames, + formats, + key, + reverseMuls, + missingOrders, + docCount, + aggs + ); + } + CompositeKey lastBucket = num > 0 ? buckets[num - 1].getRawKey() : null; + return new InternalAggregation[] { + new InternalComposite( + name, + size, + sourceNames, + formats, + Arrays.asList(buckets), + lastBucket, + reverseMuls, + missingOrders, + earlyTerminated, + metadata() + ) }; } - CompositeKey lastBucket = num > 0 ? buckets[num - 1].getRawKey() : null; - return new InternalAggregation[] { - new InternalComposite( - name, - size, - sourceNames, - formats, - Arrays.asList(buckets), - lastBucket, - reverseMuls, - missingOrders, - earlyTerminated, - metadata() - ) }; } @Override @@ -244,6 +246,7 @@ public InternalAggregation buildEmptyAggregation() { false, metadata() ); + } private void finishLeaf() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java index af4d60bf424a7..05fce2cff64d5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java @@ -13,6 +13,8 @@ import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationExecutionContext; @@ -108,70 +110,80 @@ private void collectOrdinal(long bucketOrdinal, int doc, LeafBucketCollector sub } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - StringTerms.Bucket[][] topBucketsPerOrd = new StringTerms.Bucket[owningBucketOrds.length][]; - long[] otherDocCounts = new long[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - int size = (int) Math.min(bucketOrds.size(), bucketCountThresholds.getShardSize()); - - // as users can't control sort order, in practice we'll always sort by doc count descending - try ( - BucketPriorityQueue ordered = new BucketPriorityQueue<>( - size, - bigArrays(), - partiallyBuiltBucketComparator - ) - ) { - StringTerms.Bucket spare = null; - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - Supplier emptyBucketBuilder = () -> new StringTerms.Bucket(new BytesRef(), 0, null, false, 0, format); - while (ordsEnum.next()) { - long docCount = bucketDocCount(ordsEnum.ord()); - otherDocCounts[ordIdx] += docCount; - if (spare == null) { - spare = emptyBucketBuilder.get(); + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + try ( + LongArray otherDocCounts = bigArrays().newLongArray(owningBucketOrds.size()); + ObjectArray topBucketsPerOrd = bigArrays().newObjectArray(owningBucketOrds.size()) + ) { + for (long ordIdx = 0; ordIdx < topBucketsPerOrd.size(); ordIdx++) { + int size = (int) Math.min(bucketOrds.size(), bucketCountThresholds.getShardSize()); + + // as users can't control sort order, in practice we'll always sort by doc count descending + try ( + BucketPriorityQueue ordered = new BucketPriorityQueue<>( + size, + bigArrays(), + partiallyBuiltBucketComparator + ) + ) { + StringTerms.Bucket spare = null; + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx)); + Supplier emptyBucketBuilder = () -> new StringTerms.Bucket( + new BytesRef(), + 0, + null, + false, + 0, + format + ); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + otherDocCounts.increment(ordIdx, docCount); + if (spare == null) { + spare = emptyBucketBuilder.get(); + } + ordsEnum.readValue(spare.getTermBytes()); + spare.setDocCount(docCount); + spare.setBucketOrd(ordsEnum.ord()); + spare = ordered.insertWithOverflow(spare); } - ordsEnum.readValue(spare.getTermBytes()); - spare.setDocCount(docCount); - spare.setBucketOrd(ordsEnum.ord()); - spare = ordered.insertWithOverflow(spare); - } - topBucketsPerOrd[ordIdx] = new StringTerms.Bucket[(int) ordered.size()]; - for (int i = (int) ordered.size() - 1; i >= 0; --i) { - topBucketsPerOrd[ordIdx][i] = ordered.pop(); - otherDocCounts[ordIdx] -= topBucketsPerOrd[ordIdx][i].getDocCount(); - topBucketsPerOrd[ordIdx][i].setTermBytes(BytesRef.deepCopyOf(topBucketsPerOrd[ordIdx][i].getTermBytes())); + topBucketsPerOrd.set(ordIdx, new StringTerms.Bucket[(int) ordered.size()]); + for (int i = (int) ordered.size() - 1; i >= 0; --i) { + topBucketsPerOrd.get(ordIdx)[i] = ordered.pop(); + otherDocCounts.increment(ordIdx, -topBucketsPerOrd.get(ordIdx)[i].getDocCount()); + topBucketsPerOrd.get(ordIdx)[i].setTermBytes(BytesRef.deepCopyOf(topBucketsPerOrd.get(ordIdx)[i].getTermBytes())); + } } } - } - buildSubAggsForAllBuckets(topBucketsPerOrd, InternalTerms.Bucket::getBucketOrd, InternalTerms.Bucket::setAggregations); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - final BucketOrder reduceOrder; - if (isKeyOrder(order) == false) { - reduceOrder = InternalOrder.key(true); - Arrays.sort(topBucketsPerOrd[ordIdx], reduceOrder.comparator()); - } else { - reduceOrder = order; + buildSubAggsForAllBuckets(topBucketsPerOrd, InternalTerms.Bucket::getBucketOrd, InternalTerms.Bucket::setAggregations); + InternalAggregation[] result = new InternalAggregation[Math.toIntExact(topBucketsPerOrd.size())]; + for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { + final BucketOrder reduceOrder; + if (isKeyOrder(order) == false) { + reduceOrder = InternalOrder.key(true); + Arrays.sort(topBucketsPerOrd.get(ordIdx), reduceOrder.comparator()); + } else { + reduceOrder = order; + } + result[ordIdx] = new StringTerms( + name, + reduceOrder, + order, + bucketCountThresholds.getRequiredSize(), + bucketCountThresholds.getMinDocCount(), + metadata(), + format, + bucketCountThresholds.getShardSize(), + false, + otherDocCounts.get(ordIdx), + Arrays.asList(topBucketsPerOrd.get(ordIdx)), + null + ); } - result[ordIdx] = new StringTerms( - name, - reduceOrder, - order, - bucketCountThresholds.getRequiredSize(), - bucketCountThresholds.getMinDocCount(), - metadata(), - format, - bucketCountThresholds.getShardSize(), - false, - otherDocCounts[ordIdx], - Arrays.asList(topBucketsPerOrd[ordIdx]), - null - ); + return result; } - return result; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java index fede97c7fddee..69eff3630a8f4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.AggregationExecutionContext; @@ -208,7 +209,7 @@ List filters() { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForFixedBucketCount( owningBucketOrds, filters.size() + (otherBucketKey == null ? 0 : 1), diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java index cde26bb2214ed..0e63e26e77a55 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java @@ -12,6 +12,8 @@ import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.ScoreMode; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -132,39 +134,40 @@ public void collect(int doc, long owningBucketOrd) throws IOException { protected abstract InternalGeoGridBucket newEmptyBucket(); @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - InternalGeoGridBucket[][] topBucketsPerOrd = new InternalGeoGridBucket[owningBucketOrds.length][]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - int size = (int) Math.min(bucketOrds.bucketsInOrd(owningBucketOrds[ordIdx]), shardSize); - - try (BucketPriorityQueue ordered = new BucketPriorityQueue<>(size, bigArrays())) { - InternalGeoGridBucket spare = null; - LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - while (ordsEnum.next()) { - if (spare == null) { - spare = newEmptyBucket(); - } + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + try (ObjectArray topBucketsPerOrd = bigArrays().newObjectArray(owningBucketOrds.size())) { + for (long ordIdx = 0; ordIdx < topBucketsPerOrd.size(); ordIdx++) { + int size = (int) Math.min(bucketOrds.bucketsInOrd(owningBucketOrds.get(ordIdx)), shardSize); + + try (BucketPriorityQueue ordered = new BucketPriorityQueue<>(size, bigArrays())) { + InternalGeoGridBucket spare = null; + LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx)); + while (ordsEnum.next()) { + if (spare == null) { + spare = newEmptyBucket(); + } - // need a special function to keep the source bucket - // up-to-date so it can get the appropriate key - spare.hashAsLong = ordsEnum.value(); - spare.docCount = bucketDocCount(ordsEnum.ord()); - spare.bucketOrd = ordsEnum.ord(); - spare = ordered.insertWithOverflow(spare); - } + // need a special function to keep the source bucket + // up-to-date so it can get the appropriate key + spare.hashAsLong = ordsEnum.value(); + spare.docCount = bucketDocCount(ordsEnum.ord()); + spare.bucketOrd = ordsEnum.ord(); + spare = ordered.insertWithOverflow(spare); + } - topBucketsPerOrd[ordIdx] = new InternalGeoGridBucket[(int) ordered.size()]; - for (int i = (int) ordered.size() - 1; i >= 0; --i) { - topBucketsPerOrd[ordIdx][i] = ordered.pop(); + topBucketsPerOrd.set(ordIdx, new InternalGeoGridBucket[(int) ordered.size()]); + for (int i = (int) ordered.size() - 1; i >= 0; --i) { + topBucketsPerOrd.get(ordIdx)[i] = ordered.pop(); + } } } + buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); + InternalAggregation[] results = new InternalAggregation[Math.toIntExact(topBucketsPerOrd.size())]; + for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { + results[ordIdx] = buildAggregation(name, requiredSize, Arrays.asList(topBucketsPerOrd.get(ordIdx)), metadata()); + } + return results; } - buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - results[ordIdx] = buildAggregation(name, requiredSize, Arrays.asList(topBucketsPerOrd[ordIdx]), metadata()); - } - return results; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java index b5d3485e72f82..b83001c34377e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java @@ -14,6 +14,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.Weight; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.CardinalityUpperBound; @@ -62,8 +63,8 @@ public void setScorer(Scorable scorer) throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - assert owningBucketOrds.length == 1 && owningBucketOrds[0] == 0 : "global aggregator can only be a top level aggregator"; + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + assert owningBucketOrds.size() == 1 && owningBucketOrds.get(0) == 0 : "global aggregator can only be a top level aggregator"; return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalGlobal( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java index b81d8b002b6b2..ed687df6377dd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java @@ -10,6 +10,7 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.apache.lucene.util.CollectionUtil; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -79,7 +80,7 @@ public AbstractHistogramAggregator( } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds, (bucketValue, docCount, subAggregationResults) -> { double roundKey = Double.longBitsToDouble(bucketValue); double key = roundKey * interval + offset; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java index 86c320d8dc319..cc2db63fa5ec5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java @@ -17,6 +17,7 @@ import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.Rounding.DateTimeUnit; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasables; @@ -337,7 +338,7 @@ private void addRoundedValue(long rounded, int doc, long owningBucketOrd, LeafBu } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds, (bucketValue, docCount, subAggregationResults) -> { return new InternalDateHistogram.Bucket(bucketValue, docCount, keyed, formatter, subAggregationResults); }, (owningBucketOrd, buckets) -> { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java index 2bfd85e5fe03a..f385f7c34f6b7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java @@ -12,6 +12,7 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Rounding; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.FieldData; @@ -163,7 +164,7 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForVariableBuckets( owningBucketOrds, bucketOrds, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java index 1afb06067f770..86ec1666e2cea 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java @@ -14,6 +14,7 @@ import org.apache.lucene.util.InPlaceMergeSorter; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -565,34 +566,35 @@ public void collect(int doc, long bucket) throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { int numClusters = collector.finalNumBuckets(); - long[] bucketOrdsToCollect = new long[numClusters]; - for (int i = 0; i < numClusters; i++) { - bucketOrdsToCollect[i] = i; - } + try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(numClusters)) { + for (int i = 0; i < numClusters; i++) { + bucketOrdsToCollect.set(i, i); + } - var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); + var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); - List buckets = new ArrayList<>(numClusters); - for (int bucketOrd = 0; bucketOrd < numClusters; bucketOrd++) { - buckets.add(collector.buildBucket(bucketOrd, subAggregationResults.apply(bucketOrd))); - } + List buckets = new ArrayList<>(numClusters); + for (int bucketOrd = 0; bucketOrd < numClusters; bucketOrd++) { + buckets.add(collector.buildBucket(bucketOrd, subAggregationResults.apply(bucketOrd))); + } - Function, InternalAggregation> resultBuilder = bucketsToFormat -> { - // The contract of the histogram aggregation is that shards must return - // buckets ordered by centroid in ascending order - CollectionUtil.introSort(bucketsToFormat, BucketOrder.key(true).comparator()); + Function, InternalAggregation> resultBuilder = bucketsToFormat -> { + // The contract of the histogram aggregation is that shards must return + // buckets ordered by centroid in ascending order + CollectionUtil.introSort(bucketsToFormat, BucketOrder.key(true).comparator()); - InternalVariableWidthHistogram.EmptyBucketInfo emptyBucketInfo = new InternalVariableWidthHistogram.EmptyBucketInfo( - buildEmptySubAggregations() - ); + InternalVariableWidthHistogram.EmptyBucketInfo emptyBucketInfo = new InternalVariableWidthHistogram.EmptyBucketInfo( + buildEmptySubAggregations() + ); - return new InternalVariableWidthHistogram(name, bucketsToFormat, emptyBucketInfo, numBuckets, formatter, metadata()); - }; + return new InternalVariableWidthHistogram(name, bucketsToFormat, emptyBucketInfo, numBuckets, formatter, metadata()); + }; - return new InternalAggregation[] { resultBuilder.apply(buckets) }; + return new InternalAggregation[] { resultBuilder.apply(buckets) }; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java index 5c8f8ab9c562e..b49668e45b889 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.search.aggregations.bucket.missing; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.fielddata.DocValueBits; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -67,7 +68,7 @@ public void collect(int doc, long bucket) throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalMissing( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java index 0fbb9745aa400..23a2d6380c290 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitSet; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.mapper.NestedObjectMapper; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -124,7 +125,7 @@ private void processBufferedDocs() throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalNested( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java index 0e3e4679c7a2d..2477b67367e14 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java @@ -13,6 +13,7 @@ import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitSet; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.mapper.NestedObjectMapper; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -86,7 +87,7 @@ public void collect(int childDoc, long bucket) throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalReverseNested( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java index 9548cd871e161..e8ba0393208a0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java @@ -12,6 +12,8 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CollectionUtil; +import org.elasticsearch.common.util.IntArray; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; @@ -160,57 +162,63 @@ private static void maskIpAddress(final BytesRef ipAddress, final BytesRef subne } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { long totalOrdsToCollect = 0; - final int[] bucketsInOrd = new int[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - final long bucketCount = bucketOrds.bucketsInOrd(owningBucketOrds[ordIdx]); - bucketsInOrd[ordIdx] = (int) bucketCount; - totalOrdsToCollect += bucketCount; - } - - long[] bucketOrdsToCollect = new long[(int) totalOrdsToCollect]; - int b = 0; - for (long owningBucketOrd : owningBucketOrds) { - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); - while (ordsEnum.next()) { - bucketOrdsToCollect[b++] = ordsEnum.ord(); + try (IntArray bucketsInOrd = bigArrays().newIntArray(owningBucketOrds.size())) { + for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { + final long bucketCount = bucketOrds.bucketsInOrd(owningBucketOrds.get(ordIdx)); + bucketsInOrd.set(ordIdx, (int) bucketCount); + totalOrdsToCollect += bucketCount; } - } - var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - b = 0; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - List buckets = new ArrayList<>(bucketsInOrd[ordIdx]); - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - while (ordsEnum.next()) { - long ordinal = ordsEnum.ord(); - if (bucketOrdsToCollect[b] != ordinal) { - throw AggregationErrors.iterationOrderChangedWithoutMutating(bucketOrds.toString(), ordinal, bucketOrdsToCollect[b]); + try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(totalOrdsToCollect)) { + int b = 0; + for (long i = 0; i < owningBucketOrds.size(); i++) { + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(i)); + while (ordsEnum.next()) { + bucketOrdsToCollect.set(b++, ordsEnum.ord()); + } + } + + var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); + InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; + b = 0; + for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { + List buckets = new ArrayList<>(bucketsInOrd.get(ordIdx)); + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx)); + while (ordsEnum.next()) { + long ordinal = ordsEnum.ord(); + if (bucketOrdsToCollect.get(b) != ordinal) { + throw AggregationErrors.iterationOrderChangedWithoutMutating( + bucketOrds.toString(), + ordinal, + bucketOrdsToCollect.get(b) + ); + } + BytesRef ipAddress = new BytesRef(); + ordsEnum.readValue(ipAddress); + long docCount = bucketDocCount(ordinal); + buckets.add( + new InternalIpPrefix.Bucket( + config.format(), + BytesRef.deepCopyOf(ipAddress), + keyed, + ipPrefix.isIpv6, + ipPrefix.prefixLength, + ipPrefix.appendPrefixLength, + docCount, + subAggregationResults.apply(b++) + ) + ); + + // NOTE: the aggregator is expected to return sorted results + CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); + } + results[ordIdx] = new InternalIpPrefix(name, config.format(), keyed, minDocCount, buckets, metadata()); } - BytesRef ipAddress = new BytesRef(); - ordsEnum.readValue(ipAddress); - long docCount = bucketDocCount(ordinal); - buckets.add( - new InternalIpPrefix.Bucket( - config.format(), - BytesRef.deepCopyOf(ipAddress), - keyed, - ipPrefix.isIpv6, - ipPrefix.prefixLength, - ipPrefix.appendPrefixLength, - docCount, - subAggregationResults.apply(b++) - ) - ); - - // NOTE: the aggregator is expected to return sorted results - CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); + return results; } - results[ordIdx] = new InternalIpPrefix(name, config.format(), keyed, minDocCount, buckets, metadata()); } - return results; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java index 6119af3cb6a57..9bde8d007c1b7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java @@ -14,6 +14,7 @@ import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.search.DocValueFormat; @@ -359,7 +360,7 @@ private interface DocCollector { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForFixedBucketCount( owningBucketOrds, ranges.length, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java index 6d63bb786c29f..0654a788a10a9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.NumericDoubleValues; @@ -531,7 +532,7 @@ protected long subBucketOrdinal(long owningBucketOrdinal, int rangeOrd) { @Override @SuppressWarnings("unchecked") - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForFixedBucketCount( owningBucketOrds, ranges.length, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java index 37cee75c11b48..70f72fafba7b5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java @@ -19,6 +19,7 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -120,7 +121,7 @@ public void postCollection() throws IOException { } @Override - public void prepareSelectedBuckets(long... selectedBuckets) throws IOException { + public void prepareSelectedBuckets(LongArray selectedBuckets) { // no-op - deferred aggs processed in postCollection call } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java index 78b2cdfe7655d..a4c06a194fbf7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java @@ -11,6 +11,7 @@ import org.apache.lucene.misc.search.DiversifiedTopDocsCollector; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.aggregations.AggregationExecutionContext; @@ -212,7 +213,7 @@ protected boolean shouldDefer(Aggregator aggregator) { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalSampler( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java index fc03786356f87..921cbb96385ad 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java @@ -15,6 +15,7 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; import org.elasticsearch.common.CheckedSupplier; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -60,7 +61,7 @@ public class RandomSamplerAggregator extends BucketsAggregator implements Single } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalRandomSampler( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index 0f7c61dc9f25b..d04d7528ea938 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.LongHash; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.ObjectArrayPriorityQueue; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; @@ -191,7 +192,7 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return resultStrategy.buildAggregations(owningBucketOrds); } @@ -696,61 +697,66 @@ abstract class ResultStrategy< B extends InternalMultiBucketAggregation.InternalBucket, TB extends InternalMultiBucketAggregation.InternalBucket> implements Releasable { - private InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + if (valueCount == 0) { // no context in this reader - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - results[ordIdx] = buildNoValuesResult(owningBucketOrds[ordIdx]); + InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; + for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { + results[ordIdx] = buildNoValuesResult(owningBucketOrds.get(ordIdx)); } return results; } - - B[][] topBucketsPreOrd = buildTopBucketsPerOrd(owningBucketOrds.length); - long[] otherDocCount = new long[owningBucketOrds.length]; - GlobalOrdLookupFunction lookupGlobalOrd = valuesSupplier.get()::lookupOrd; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - final int size; - if (bucketCountThresholds.getMinDocCount() == 0) { - // if minDocCount == 0 then we can end up with more buckets then maxBucketOrd() returns - size = (int) Math.min(valueCount, bucketCountThresholds.getShardSize()); - } else { - size = (int) Math.min(maxBucketOrd(), bucketCountThresholds.getShardSize()); - } - try (ObjectArrayPriorityQueue ordered = buildPriorityQueue(size)) { - final int finalOrdIdx = ordIdx; - BucketUpdater updater = bucketUpdater(owningBucketOrds[ordIdx], lookupGlobalOrd); - collectionStrategy.forEach(owningBucketOrds[ordIdx], new BucketInfoConsumer() { - TB spare = null; - - @Override - public void accept(long globalOrd, long bucketOrd, long docCount) throws IOException { - otherDocCount[finalOrdIdx] += docCount; - if (docCount >= bucketCountThresholds.getShardMinDocCount()) { - if (spare == null) { - spare = buildEmptyTemporaryBucket(); + try ( + LongArray otherDocCount = bigArrays().newLongArray(owningBucketOrds.size(), true); + ObjectArray topBucketsPreOrd = buildTopBucketsPerOrd(owningBucketOrds.size()) + ) { + GlobalOrdLookupFunction lookupGlobalOrd = valuesSupplier.get()::lookupOrd; + for (long ordIdx = 0; ordIdx < topBucketsPreOrd.size(); ordIdx++) { + final int size; + if (bucketCountThresholds.getMinDocCount() == 0) { + // if minDocCount == 0 then we can end up with more buckets then maxBucketOrd() returns + size = (int) Math.min(valueCount, bucketCountThresholds.getShardSize()); + } else { + size = (int) Math.min(maxBucketOrd(), bucketCountThresholds.getShardSize()); + } + try (ObjectArrayPriorityQueue ordered = buildPriorityQueue(size)) { + final long finalOrdIdx = ordIdx; + final long owningBucketOrd = owningBucketOrds.get(ordIdx); + BucketUpdater updater = bucketUpdater(owningBucketOrd, lookupGlobalOrd); + collectionStrategy.forEach(owningBucketOrd, new BucketInfoConsumer() { + TB spare = null; + + @Override + public void accept(long globalOrd, long bucketOrd, long docCount) throws IOException { + otherDocCount.increment(finalOrdIdx, docCount); + if (docCount >= bucketCountThresholds.getShardMinDocCount()) { + if (spare == null) { + spare = buildEmptyTemporaryBucket(); + } + updater.updateBucket(spare, globalOrd, bucketOrd, docCount); + spare = ordered.insertWithOverflow(spare); } - updater.updateBucket(spare, globalOrd, bucketOrd, docCount); - spare = ordered.insertWithOverflow(spare); } + }); + + // Get the top buckets + topBucketsPreOrd.set(ordIdx, buildBuckets((int) ordered.size())); + for (int i = (int) ordered.size() - 1; i >= 0; --i) { + B bucket = convertTempBucketToRealBucket(ordered.pop(), lookupGlobalOrd); + topBucketsPreOrd.get(ordIdx)[i] = bucket; + otherDocCount.increment(ordIdx, -bucket.getDocCount()); } - }); - - // Get the top buckets - topBucketsPreOrd[ordIdx] = buildBuckets((int) ordered.size()); - for (int i = (int) ordered.size() - 1; i >= 0; --i) { - topBucketsPreOrd[ordIdx][i] = convertTempBucketToRealBucket(ordered.pop(), lookupGlobalOrd); - otherDocCount[ordIdx] -= topBucketsPreOrd[ordIdx][i].getDocCount(); } } - } - buildSubAggs(topBucketsPreOrd); + buildSubAggs(topBucketsPreOrd); - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - results[ordIdx] = buildResult(owningBucketOrds[ordIdx], otherDocCount[ordIdx], topBucketsPreOrd[ordIdx]); + InternalAggregation[] results = new InternalAggregation[Math.toIntExact(topBucketsPreOrd.size())]; + for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { + results[ordIdx] = buildResult(owningBucketOrds.get(ordIdx), otherDocCount.get(ordIdx), topBucketsPreOrd.get(ordIdx)); + } + return results; } - return results; } /** @@ -785,7 +791,7 @@ public void accept(long globalOrd, long bucketOrd, long docCount) throws IOExcep /** * Build an array to hold the "top" buckets for each ordinal. */ - abstract B[][] buildTopBucketsPerOrd(int size); + abstract ObjectArray buildTopBucketsPerOrd(long size); /** * Build an array of buckets for a particular ordinal to collect the @@ -802,7 +808,7 @@ public void accept(long globalOrd, long bucketOrd, long docCount) throws IOExcep * Build the sub-aggregations into the buckets. This will usually * delegate to {@link #buildSubAggsForAllBuckets}. */ - abstract void buildSubAggs(B[][] topBucketsPreOrd) throws IOException; + abstract void buildSubAggs(ObjectArray topBucketsPreOrd) throws IOException; /** * Turn the buckets into an aggregation result. @@ -841,8 +847,8 @@ LeafBucketCollector wrapCollector(LeafBucketCollector primary) { } @Override - StringTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new StringTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -879,7 +885,7 @@ StringTerms.Bucket convertTempBucketToRealBucket(OrdBucket temp, GlobalOrdLookup } @Override - void buildSubAggs(StringTerms.Bucket[][] topBucketsPreOrd) throws IOException { + void buildSubAggs(ObjectArray topBucketsPreOrd) throws IOException { buildSubAggsForAllBuckets(topBucketsPreOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); } @@ -973,8 +979,8 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } @Override - SignificantStringTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new SignificantStringTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -1026,7 +1032,7 @@ SignificantStringTerms.Bucket convertTempBucketToRealBucket( } @Override - void buildSubAggs(SignificantStringTerms.Bucket[][] topBucketsPreOrd) throws IOException { + void buildSubAggs(ObjectArray topBucketsPreOrd) throws IOException { buildSubAggsForAllBuckets(topBucketsPreOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index a60911b466847..eeb7305ac51fa 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -62,7 +62,7 @@ public interface Reader> { long supersetSize; /** * Ordinal of the bucket while it is being built. Not used after it is - * returned from {@link Aggregator#buildAggregations(long[])} and not + * returned from {@link Aggregator#buildAggregations(org.elasticsearch.common.util.LongArray)} and not * serialized. */ transient long bucketOrd; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java index 651705bd71ef8..877bd2cac4b05 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java @@ -12,7 +12,9 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.LongHash; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.SetBackedScalingCuckooFilter; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; @@ -118,70 +120,75 @@ private void collectValue(long val, int docId, long owningBucketOrd, LeafBucketC } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { /* * Collect the list of buckets, populate the filter with terms * that are too frequent, and figure out how to merge sub-buckets. */ - LongRareTerms.Bucket[][] rarestPerOrd = new LongRareTerms.Bucket[owningBucketOrds.length][]; - SetBackedScalingCuckooFilter[] filters = new SetBackedScalingCuckooFilter[owningBucketOrds.length]; - long keepCount = 0; - long[] mergeMap = new long[(int) bucketOrds.size()]; - Arrays.fill(mergeMap, -1); - long offset = 0; - for (int owningOrdIdx = 0; owningOrdIdx < owningBucketOrds.length; owningOrdIdx++) { - try (LongHash bucketsInThisOwningBucketToCollect = new LongHash(1, bigArrays())) { - filters[owningOrdIdx] = newFilter(); - List builtBuckets = new ArrayList<>(); - LongKeyedBucketOrds.BucketOrdsEnum collectedBuckets = bucketOrds.ordsEnum(owningBucketOrds[owningOrdIdx]); - while (collectedBuckets.next()) { - long docCount = bucketDocCount(collectedBuckets.ord()); - // if the key is below threshold, reinsert into the new ords - if (docCount <= maxDocCount) { - LongRareTerms.Bucket bucket = new LongRareTerms.Bucket(collectedBuckets.value(), docCount, null, format); - bucket.bucketOrd = offset + bucketsInThisOwningBucketToCollect.add(collectedBuckets.value()); - mergeMap[(int) collectedBuckets.ord()] = bucket.bucketOrd; - builtBuckets.add(bucket); - keepCount++; - } else { - filters[owningOrdIdx].add(collectedBuckets.value()); + try ( + ObjectArray rarestPerOrd = bigArrays().newObjectArray(owningBucketOrds.size()); + ObjectArray filters = bigArrays().newObjectArray(owningBucketOrds.size()) + ) { + try (LongArray mergeMap = bigArrays().newLongArray(bucketOrds.size())) { + mergeMap.fill(0, mergeMap.size(), -1); + long keepCount = 0; + long offset = 0; + for (long owningOrdIdx = 0; owningOrdIdx < owningBucketOrds.size(); owningOrdIdx++) { + try (LongHash bucketsInThisOwningBucketToCollect = new LongHash(1, bigArrays())) { + filters.set(owningOrdIdx, newFilter()); + List builtBuckets = new ArrayList<>(); + LongKeyedBucketOrds.BucketOrdsEnum collectedBuckets = bucketOrds.ordsEnum(owningBucketOrds.get(owningOrdIdx)); + while (collectedBuckets.next()) { + long docCount = bucketDocCount(collectedBuckets.ord()); + // if the key is below threshold, reinsert into the new ords + if (docCount <= maxDocCount) { + LongRareTerms.Bucket bucket = new LongRareTerms.Bucket(collectedBuckets.value(), docCount, null, format); + bucket.bucketOrd = offset + bucketsInThisOwningBucketToCollect.add(collectedBuckets.value()); + mergeMap.set(collectedBuckets.ord(), bucket.bucketOrd); + builtBuckets.add(bucket); + keepCount++; + } else { + filters.get(owningOrdIdx).add(collectedBuckets.value()); + } + } + rarestPerOrd.set(owningOrdIdx, builtBuckets.toArray(LongRareTerms.Bucket[]::new)); + offset += bucketsInThisOwningBucketToCollect.size(); } } - rarestPerOrd[owningOrdIdx] = builtBuckets.toArray(LongRareTerms.Bucket[]::new); - offset += bucketsInThisOwningBucketToCollect.size(); - } - } - /* - * Only merge/delete the ordinals if we have actually deleted one, - * to save on some redundant work. - */ - if (keepCount != mergeMap.length) { - LongUnaryOperator howToMerge = b -> mergeMap[(int) b]; - rewriteBuckets(offset, howToMerge); - if (deferringCollector() != null) { - ((BestBucketsDeferringCollector) deferringCollector()).rewriteBuckets(howToMerge); + /* + * Only merge/delete the ordinals if we have actually deleted one, + * to save on some redundant work. + */ + if (keepCount != mergeMap.size()) { + LongUnaryOperator howToMerge = mergeMap::get; + rewriteBuckets(offset, howToMerge); + if (deferringCollector() != null) { + ((BestBucketsDeferringCollector) deferringCollector()).rewriteBuckets(howToMerge); + } + } } - } - /* - * Now build the results! - */ - buildSubAggsForAllBuckets(rarestPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - Arrays.sort(rarestPerOrd[ordIdx], ORDER.comparator()); - result[ordIdx] = new LongRareTerms( - name, - ORDER, - metadata(), - format, - Arrays.asList(rarestPerOrd[ordIdx]), - maxDocCount, - filters[ordIdx] - ); + /* + * Now build the results! + */ + buildSubAggsForAllBuckets(rarestPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); + InternalAggregation[] result = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; + for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { + LongRareTerms.Bucket[] buckets = rarestPerOrd.get(ordIdx); + Arrays.sort(buckets, ORDER.comparator()); + result[ordIdx] = new LongRareTerms( + name, + ORDER, + metadata(), + format, + Arrays.asList(buckets), + maxDocCount, + filters.get(ordIdx) + ); + } + return result; } - return result; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java index 76202b6386a73..c02ed5509e6ae 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java @@ -18,6 +18,7 @@ import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.ObjectArrayPriorityQueue; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -117,7 +118,7 @@ public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return resultStrategy.buildAggregations(owningBucketOrds); } @@ -282,45 +283,49 @@ abstract class ResultStrategy ordered = buildPriorityQueue(size)) { - B spare = null; - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - Supplier emptyBucketBuilder = emptyBucketBuilder(owningBucketOrds[ordIdx]); - while (ordsEnum.next()) { - long docCount = bucketDocCount(ordsEnum.ord()); - otherDocCounts[ordIdx] += docCount; - if (docCount < bucketCountThresholds.getShardMinDocCount()) { - continue; - } - if (spare == null) { - spare = emptyBucketBuilder.get(); + private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + try ( + LongArray otherDocCounts = bigArrays().newLongArray(owningBucketOrds.size(), true); + ObjectArray topBucketsPerOrd = buildTopBucketsPerOrd(Math.toIntExact(owningBucketOrds.size())) + ) { + for (long ordIdx = 0; ordIdx < topBucketsPerOrd.size(); ordIdx++) { + long owningOrd = owningBucketOrds.get(ordIdx); + collectZeroDocEntriesIfNeeded(owningOrd, excludeDeletedDocs); + int size = (int) Math.min(bucketOrds.size(), bucketCountThresholds.getShardSize()); + + try (ObjectArrayPriorityQueue ordered = buildPriorityQueue(size)) { + B spare = null; + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningOrd); + Supplier emptyBucketBuilder = emptyBucketBuilder(owningOrd); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + otherDocCounts.increment(ordIdx, docCount); + if (docCount < bucketCountThresholds.getShardMinDocCount()) { + continue; + } + if (spare == null) { + spare = emptyBucketBuilder.get(); + } + updateBucket(spare, ordsEnum, docCount); + spare = ordered.insertWithOverflow(spare); } - updateBucket(spare, ordsEnum, docCount); - spare = ordered.insertWithOverflow(spare); - } - topBucketsPerOrd[ordIdx] = buildBuckets((int) ordered.size()); - for (int i = (int) ordered.size() - 1; i >= 0; --i) { - topBucketsPerOrd[ordIdx][i] = ordered.pop(); - otherDocCounts[ordIdx] -= topBucketsPerOrd[ordIdx][i].getDocCount(); - finalizeBucket(topBucketsPerOrd[ordIdx][i]); + topBucketsPerOrd.set(ordIdx, buildBuckets((int) ordered.size())); + for (int i = (int) ordered.size() - 1; i >= 0; --i) { + topBucketsPerOrd.get(ordIdx)[i] = ordered.pop(); + otherDocCounts.increment(ordIdx, -topBucketsPerOrd.get(ordIdx)[i].getDocCount()); + finalizeBucket(topBucketsPerOrd.get(ordIdx)[i]); + } } } - } - buildSubAggs(topBucketsPerOrd); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - result[ordIdx] = buildResult(owningBucketOrds[ordIdx], otherDocCounts[ordIdx], topBucketsPerOrd[ordIdx]); + buildSubAggs(topBucketsPerOrd); + InternalAggregation[] result = new InternalAggregation[Math.toIntExact(topBucketsPerOrd.size())]; + for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { + result[ordIdx] = buildResult(owningBucketOrds.get(ordIdx), otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)); + } + return result; } - return result; } /** @@ -361,7 +366,7 @@ private InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws /** * Build an array to hold the "top" buckets for each ordinal. */ - abstract B[][] buildTopBucketsPerOrd(int size); + abstract ObjectArray buildTopBucketsPerOrd(long size); /** * Build an array of buckets for a particular ordinal to collect the @@ -379,7 +384,7 @@ private InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws * Build the sub-aggregations into the buckets. This will usually * delegate to {@link #buildSubAggsForAllBuckets}. */ - abstract void buildSubAggs(B[][] topBucketsPerOrd) throws IOException; + abstract void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException; /** * Turn the buckets into an aggregation result. @@ -501,8 +506,8 @@ void updateBucket(StringTerms.Bucket spare, BytesKeyedBucketOrds.BucketOrdsEnum } @Override - StringTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new StringTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -521,7 +526,7 @@ void finalizeBucket(StringTerms.Bucket bucket) { } @Override - void buildSubAggs(StringTerms.Bucket[][] topBucketsPerOrd) throws IOException { + void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException { buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); } @@ -637,8 +642,8 @@ void updateBucket(SignificantStringTerms.Bucket spare, BytesKeyedBucketOrds.Buck } @Override - SignificantStringTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new SignificantStringTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -657,7 +662,7 @@ void finalizeBucket(SignificantStringTerms.Bucket bucket) { } @Override - void buildSubAggs(SignificantStringTerms.Bucket[][] topBucketsPerOrd) throws IOException { + void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException { buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java index d39348d80df14..e10f0b8944027 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java @@ -15,6 +15,7 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.ObjectArrayPriorityQueue; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -39,7 +40,6 @@ import java.io.IOException; import java.util.Arrays; -import java.util.List; import java.util.Map; import java.util.function.BiConsumer; import java.util.function.Function; @@ -136,7 +136,7 @@ private void collectValue(long val, int doc, long owningBucketOrd, LeafBucketCol } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return resultStrategy.buildAggregations(owningBucketOrds); } @@ -163,48 +163,52 @@ public void collectDebugInfo(BiConsumer add) { abstract class ResultStrategy implements Releasable { - private InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - B[][] topBucketsPerOrd = buildTopBucketsPerOrd(owningBucketOrds.length); - long[] otherDocCounts = new long[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - collectZeroDocEntriesIfNeeded(owningBucketOrds[ordIdx], excludeDeletedDocs); - long bucketsInOrd = bucketOrds.bucketsInOrd(owningBucketOrds[ordIdx]); - - int size = (int) Math.min(bucketsInOrd, bucketCountThresholds.getShardSize()); - try (ObjectArrayPriorityQueue ordered = buildPriorityQueue(size)) { - B spare = null; - BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - Supplier emptyBucketBuilder = emptyBucketBuilder(owningBucketOrds[ordIdx]); - while (ordsEnum.next()) { - long docCount = bucketDocCount(ordsEnum.ord()); - otherDocCounts[ordIdx] += docCount; - if (docCount < bucketCountThresholds.getShardMinDocCount()) { - continue; - } - if (spare == null) { - spare = emptyBucketBuilder.get(); + private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + try ( + LongArray otherDocCounts = bigArrays().newLongArray(owningBucketOrds.size(), true); + ObjectArray topBucketsPerOrd = buildTopBucketsPerOrd(owningBucketOrds.size()) + ) { + for (long ordIdx = 0; ordIdx < topBucketsPerOrd.size(); ordIdx++) { + final long owningBucketOrd = owningBucketOrds.get(ordIdx); + collectZeroDocEntriesIfNeeded(owningBucketOrd, excludeDeletedDocs); + long bucketsInOrd = bucketOrds.bucketsInOrd(owningBucketOrd); + + int size = (int) Math.min(bucketsInOrd, bucketCountThresholds.getShardSize()); + try (ObjectArrayPriorityQueue ordered = buildPriorityQueue(size)) { + B spare = null; + BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); + Supplier emptyBucketBuilder = emptyBucketBuilder(owningBucketOrd); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + otherDocCounts.increment(ordIdx, docCount); + if (docCount < bucketCountThresholds.getShardMinDocCount()) { + continue; + } + if (spare == null) { + spare = emptyBucketBuilder.get(); + } + updateBucket(spare, ordsEnum, docCount); + spare = ordered.insertWithOverflow(spare); } - updateBucket(spare, ordsEnum, docCount); - spare = ordered.insertWithOverflow(spare); - } - // Get the top buckets - B[] bucketsForOrd = buildBuckets((int) ordered.size()); - topBucketsPerOrd[ordIdx] = bucketsForOrd; - for (int b = (int) ordered.size() - 1; b >= 0; --b) { - topBucketsPerOrd[ordIdx][b] = ordered.pop(); - otherDocCounts[ordIdx] -= topBucketsPerOrd[ordIdx][b].getDocCount(); + // Get the top buckets + B[] bucketsForOrd = buildBuckets((int) ordered.size()); + topBucketsPerOrd.set(ordIdx, bucketsForOrd); + for (int b = (int) ordered.size() - 1; b >= 0; --b) { + topBucketsPerOrd.get(ordIdx)[b] = ordered.pop(); + otherDocCounts.increment(ordIdx, -topBucketsPerOrd.get(ordIdx)[b].getDocCount()); + } } } - } - buildSubAggs(topBucketsPerOrd); + buildSubAggs(topBucketsPerOrd); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - result[ordIdx] = buildResult(owningBucketOrds[ordIdx], otherDocCounts[ordIdx], topBucketsPerOrd[ordIdx]); + InternalAggregation[] result = new InternalAggregation[Math.toIntExact(topBucketsPerOrd.size())]; + for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { + result[ordIdx] = buildResult(owningBucketOrds.get(ordIdx), otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)); + } + return result; } - return result; } /** @@ -227,7 +231,7 @@ private InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws /** * Build an array to hold the "top" buckets for each ordinal. */ - abstract B[][] buildTopBucketsPerOrd(int size); + abstract ObjectArray buildTopBucketsPerOrd(long size); /** * Build an array of buckets for a particular ordinal. These arrays @@ -258,7 +262,7 @@ private InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws * Build the sub-aggregations into the buckets. This will usually * delegate to {@link #buildSubAggsForAllBuckets}. */ - abstract void buildSubAggs(B[][] topBucketsPerOrd) throws IOException; + abstract void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException; /** * Collect extra entries for "zero" hit documents if they were requested @@ -297,7 +301,7 @@ final ObjectArrayPriorityQueue buildPriorityQueue(int size) { } @Override - final void buildSubAggs(B[][] topBucketsPerOrd) throws IOException { + final void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException { buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); } @@ -356,8 +360,8 @@ SortedNumericDocValues getValues(LeafReaderContext ctx) throws IOException { } @Override - LongTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new LongTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -397,7 +401,7 @@ LongTerms buildResult(long owningBucketOrd, long otherDocCount, LongTerms.Bucket bucketCountThresholds.getShardSize(), showTermDocCountError, otherDocCount, - List.of(topBuckets), + Arrays.asList(topBuckets), null ); } @@ -438,8 +442,8 @@ SortedNumericDocValues getValues(LeafReaderContext ctx) throws IOException { } @Override - DoubleTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new DoubleTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -479,7 +483,7 @@ DoubleTerms buildResult(long owningBucketOrd, long otherDocCount, DoubleTerms.Bu bucketCountThresholds.getShardSize(), showTermDocCountError, otherDocCount, - List.of(topBuckets), + Arrays.asList(topBuckets), null ); } @@ -551,8 +555,8 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } @Override - SignificantLongTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new SignificantLongTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -583,7 +587,7 @@ ObjectArrayPriorityQueue buildPriorityQueue(int siz } @Override - void buildSubAggs(SignificantLongTerms.Bucket[][] topBucketsPerOrd) throws IOException { + void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException { buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); } @@ -601,7 +605,7 @@ SignificantLongTerms buildResult(long owningBucketOrd, long otherDocCoun, Signif subsetSizes.get(owningBucketOrd), supersetSize, significanceHeuristic, - List.of(topBuckets) + Arrays.asList(topBuckets) ); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java index 2bc2833f0ddce..7200c33c71f70 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java @@ -12,6 +12,8 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.elasticsearch.common.util.BytesRefHash; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.SetBackedScalingCuckooFilter; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.FieldData; @@ -119,72 +121,82 @@ private void collectValue(BytesRef val, int doc, long owningBucketOrd, LeafBucke } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { /* * Collect the list of buckets, populate the filter with terms * that are too frequent, and figure out how to merge sub-buckets. */ - StringRareTerms.Bucket[][] rarestPerOrd = new StringRareTerms.Bucket[owningBucketOrds.length][]; - SetBackedScalingCuckooFilter[] filters = new SetBackedScalingCuckooFilter[owningBucketOrds.length]; - long keepCount = 0; - long[] mergeMap = new long[(int) bucketOrds.size()]; - Arrays.fill(mergeMap, -1); - long offset = 0; - for (int owningOrdIdx = 0; owningOrdIdx < owningBucketOrds.length; owningOrdIdx++) { - try (BytesRefHash bucketsInThisOwningBucketToCollect = new BytesRefHash(1, bigArrays())) { - filters[owningOrdIdx] = newFilter(); - List builtBuckets = new ArrayList<>(); - BytesKeyedBucketOrds.BucketOrdsEnum collectedBuckets = bucketOrds.ordsEnum(owningBucketOrds[owningOrdIdx]); - BytesRef scratch = new BytesRef(); - while (collectedBuckets.next()) { - collectedBuckets.readValue(scratch); - long docCount = bucketDocCount(collectedBuckets.ord()); - // if the key is below threshold, reinsert into the new ords - if (docCount <= maxDocCount) { - StringRareTerms.Bucket bucket = new StringRareTerms.Bucket(BytesRef.deepCopyOf(scratch), docCount, null, format); - bucket.bucketOrd = offset + bucketsInThisOwningBucketToCollect.add(scratch); - mergeMap[(int) collectedBuckets.ord()] = bucket.bucketOrd; - builtBuckets.add(bucket); - keepCount++; - } else { - filters[owningOrdIdx].add(scratch); + try ( + ObjectArray rarestPerOrd = bigArrays().newObjectArray(owningBucketOrds.size()); + ObjectArray filters = bigArrays().newObjectArray(owningBucketOrds.size()) + ) { + try (LongArray mergeMap = bigArrays().newLongArray(bucketOrds.size())) { + mergeMap.fill(0, mergeMap.size(), -1); + long keepCount = 0; + long offset = 0; + for (long owningOrdIdx = 0; owningOrdIdx < owningBucketOrds.size(); owningOrdIdx++) { + try (BytesRefHash bucketsInThisOwningBucketToCollect = new BytesRefHash(1, bigArrays())) { + filters.set(owningOrdIdx, newFilter()); + List builtBuckets = new ArrayList<>(); + BytesKeyedBucketOrds.BucketOrdsEnum collectedBuckets = bucketOrds.ordsEnum(owningBucketOrds.get(owningOrdIdx)); + BytesRef scratch = new BytesRef(); + while (collectedBuckets.next()) { + collectedBuckets.readValue(scratch); + long docCount = bucketDocCount(collectedBuckets.ord()); + // if the key is below threshold, reinsert into the new ords + if (docCount <= maxDocCount) { + StringRareTerms.Bucket bucket = new StringRareTerms.Bucket( + BytesRef.deepCopyOf(scratch), + docCount, + null, + format + ); + bucket.bucketOrd = offset + bucketsInThisOwningBucketToCollect.add(scratch); + mergeMap.set(collectedBuckets.ord(), bucket.bucketOrd); + builtBuckets.add(bucket); + keepCount++; + } else { + filters.get(owningOrdIdx).add(scratch); + } + } + rarestPerOrd.set(owningOrdIdx, builtBuckets.toArray(StringRareTerms.Bucket[]::new)); + offset += bucketsInThisOwningBucketToCollect.size(); } } - rarestPerOrd[owningOrdIdx] = builtBuckets.toArray(StringRareTerms.Bucket[]::new); - offset += bucketsInThisOwningBucketToCollect.size(); - } - } - /* - * Only merge/delete the ordinals if we have actually deleted one, - * to save on some redundant work. - */ - if (keepCount != mergeMap.length) { - LongUnaryOperator howToMerge = b -> mergeMap[(int) b]; - rewriteBuckets(offset, howToMerge); - if (deferringCollector() != null) { - ((BestBucketsDeferringCollector) deferringCollector()).rewriteBuckets(howToMerge); + /* + * Only merge/delete the ordinals if we have actually deleted one, + * to save on some redundant work. + */ + if (keepCount != mergeMap.size()) { + LongUnaryOperator howToMerge = mergeMap::get; + rewriteBuckets(offset, howToMerge); + if (deferringCollector() != null) { + ((BestBucketsDeferringCollector) deferringCollector()).rewriteBuckets(howToMerge); + } + } } - } - /* - * Now build the results! - */ - buildSubAggsForAllBuckets(rarestPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - Arrays.sort(rarestPerOrd[ordIdx], ORDER.comparator()); - result[ordIdx] = new StringRareTerms( - name, - ORDER, - metadata(), - format, - Arrays.asList(rarestPerOrd[ordIdx]), - maxDocCount, - filters[ordIdx] - ); + /* + * Now build the results! + */ + buildSubAggsForAllBuckets(rarestPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); + InternalAggregation[] result = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; + for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { + StringRareTerms.Bucket[] buckets = rarestPerOrd.get(ordIdx); + Arrays.sort(buckets, ORDER.comparator()); + result[ordIdx] = new StringRareTerms( + name, + ORDER, + metadata(), + format, + Arrays.asList(buckets), + maxDocCount, + filters.get(ordIdx) + ); + } + return result; } - return result; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregator.java index 8742136c86ec6..0d767e356108a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregator.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.aggregations.metrics; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorBase; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -36,10 +37,10 @@ protected MetricsAggregator(String name, AggregationContext context, Aggregator public abstract InternalAggregation buildAggregation(long owningBucketOrd) throws IOException; @Override - public final InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - results[ordIdx] = buildAggregation(owningBucketOrds[ordIdx]); + public final InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; + for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { + results[ordIdx] = buildAggregation(owningBucketOrds.get(ordIdx)); } return results; } diff --git a/server/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingAggregator.java b/server/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingAggregator.java index fff1990c29750..90e84acc7cad5 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingAggregator.java @@ -10,6 +10,7 @@ package org.elasticsearch.search.profile.aggregation; import org.apache.lucene.search.ScoreMode; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -68,7 +69,7 @@ public BucketComparator bucketComparator(String key, SortOrder order) { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { Timer timer = profileBreakdown.getNewTimer(AggregationTimingType.BUILD_AGGREGATION); InternalAggregation[] result; timer.start(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AdaptingAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AdaptingAggregatorTests.java index 125b2d20cf9f3..6e9bb596e944b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AdaptingAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AdaptingAggregatorTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.aggregations; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperServiceTestCase; @@ -113,7 +114,7 @@ protected LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCt } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) { return new InternalAggregation[] { null }; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java index 8d3fe0f7f6e79..2d0622dbb6322 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java @@ -15,6 +15,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; @@ -47,7 +48,7 @@ protected LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCt } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) { throw new UnsupportedOperationException(); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java index 9b6ea7272d0f9..e796cee92c0dc 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java @@ -28,6 +28,8 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.CheckedBiConsumer; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.BucketCollector; @@ -77,7 +79,7 @@ public ScoreMode scoreMode() { collector.preCollection(); indexSearcher.search(termQuery, collector.asCollector()); collector.postCollection(); - collector.prepareSelectedBuckets(0); + collector.prepareSelectedBuckets(BigArrays.NON_RECYCLING_INSTANCE.newLongArray(1, true)); assertEquals(topDocs.scoreDocs.length, deferredCollectedDocIds.size()); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { @@ -91,7 +93,7 @@ public ScoreMode scoreMode() { collector.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), collector.asCollector()); collector.postCollection(); - collector.prepareSelectedBuckets(0); + collector.prepareSelectedBuckets(BigArrays.NON_RECYCLING_INSTANCE.newLongArray(1, true)); assertEquals(topDocs.scoreDocs.length, deferredCollectedDocIds.size()); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { @@ -141,7 +143,7 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } } }, (deferringCollector, finalCollector) -> { - deferringCollector.prepareSelectedBuckets(0, 8, 9); + deferringCollector.prepareSelectedBuckets(toLongArray(0, 8, 9)); equalTo(Map.of(0L, List.of(0, 1, 2, 3, 4, 5, 6, 7), 1L, List.of(8), 2L, List.of(9))); }); @@ -158,7 +160,7 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } } }, (deferringCollector, finalCollector) -> { - deferringCollector.prepareSelectedBuckets(0, 8, 9); + deferringCollector.prepareSelectedBuckets(toLongArray(0, 8, 9)); assertThat(finalCollector.collection, equalTo(Map.of(0L, List.of(4, 5, 6, 7), 1L, List.of(8), 2L, List.of(9)))); }); @@ -176,12 +178,20 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } } }, (deferringCollector, finalCollector) -> { - deferringCollector.prepareSelectedBuckets(0, 8, 9); + deferringCollector.prepareSelectedBuckets(toLongArray(0, 8, 9)); assertThat(finalCollector.collection, equalTo(Map.of(0L, List.of(0, 1, 2, 3), 1L, List.of(8), 2L, List.of(9)))); }); } + private LongArray toLongArray(long... lons) { + LongArray longArray = BigArrays.NON_RECYCLING_INSTANCE.newLongArray(lons.length); + for (int i = 0; i < lons.length; i++) { + longArray.set(i, lons[i]); + } + return longArray; + } + private void testCase( BiFunction leafCollector, CheckedBiConsumer verify diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java index 80f27b31ca65b..fb4c62ad66f19 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -72,7 +73,7 @@ protected LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCt } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) { return new InternalAggregation[0]; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java index 2df6a0cfb91ca..a0a24e98ae721 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -68,7 +69,7 @@ public void testReplay() throws Exception { collector.preCollection(); indexSearcher.search(termQuery, collector.asCollector()); collector.postCollection(); - collector.prepareSelectedBuckets(0); + collector.prepareSelectedBuckets(BigArrays.NON_RECYCLING_INSTANCE.newLongArray(1, true)); assertEquals(topDocs.scoreDocs.length, deferredCollectedDocIds.size()); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java index 85882a5c56851..0c6e94a15ec36 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java @@ -20,6 +20,8 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.ObjectArrayPriorityQueue; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Releasables; @@ -235,57 +237,62 @@ protected void doClose() { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - InternalMultiTerms.Bucket[][] topBucketsPerOrd = new InternalMultiTerms.Bucket[owningBucketOrds.length][]; - long[] otherDocCounts = new long[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - long bucketsInOrd = bucketOrds.bucketsInOrd(owningBucketOrds[ordIdx]); - - int size = (int) Math.min(bucketsInOrd, bucketCountThresholds.getShardSize()); - try ( - ObjectArrayPriorityQueue ordered = new BucketPriorityQueue<>( - size, - bigArrays(), - partiallyBuiltBucketComparator - ) - ) { - InternalMultiTerms.Bucket spare = null; - BytesRef spareKey = null; - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - while (ordsEnum.next()) { - long docCount = bucketDocCount(ordsEnum.ord()); - otherDocCounts[ordIdx] += docCount; - if (docCount < bucketCountThresholds.getShardMinDocCount()) { - continue; - } - if (spare == null) { - spare = new InternalMultiTerms.Bucket(null, 0, null, showTermDocCountError, 0, formats, keyConverters); - spareKey = new BytesRef(); + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + try ( + LongArray otherDocCounts = bigArrays().newLongArray(owningBucketOrds.size(), true); + ObjectArray topBucketsPerOrd = bigArrays().newObjectArray(owningBucketOrds.size()) + ) { + for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { + final long owningBucketOrd = owningBucketOrds.get(ordIdx); + long bucketsInOrd = bucketOrds.bucketsInOrd(owningBucketOrd); + + int size = (int) Math.min(bucketsInOrd, bucketCountThresholds.getShardSize()); + try ( + ObjectArrayPriorityQueue ordered = new BucketPriorityQueue<>( + size, + bigArrays(), + partiallyBuiltBucketComparator + ) + ) { + InternalMultiTerms.Bucket spare = null; + BytesRef spareKey = null; + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + otherDocCounts.increment(ordIdx, docCount); + if (docCount < bucketCountThresholds.getShardMinDocCount()) { + continue; + } + if (spare == null) { + spare = new InternalMultiTerms.Bucket(null, 0, null, showTermDocCountError, 0, formats, keyConverters); + spareKey = new BytesRef(); + } + ordsEnum.readValue(spareKey); + spare.terms = unpackTerms(spareKey); + spare.docCount = docCount; + spare.bucketOrd = ordsEnum.ord(); + spare = ordered.insertWithOverflow(spare); } - ordsEnum.readValue(spareKey); - spare.terms = unpackTerms(spareKey); - spare.docCount = docCount; - spare.bucketOrd = ordsEnum.ord(); - spare = ordered.insertWithOverflow(spare); - } - // Get the top buckets - InternalMultiTerms.Bucket[] bucketsForOrd = new InternalMultiTerms.Bucket[(int) ordered.size()]; - topBucketsPerOrd[ordIdx] = bucketsForOrd; - for (int b = (int) ordered.size() - 1; b >= 0; --b) { - topBucketsPerOrd[ordIdx][b] = ordered.pop(); - otherDocCounts[ordIdx] -= topBucketsPerOrd[ordIdx][b].getDocCount(); + // Get the top buckets + InternalMultiTerms.Bucket[] bucketsForOrd = new InternalMultiTerms.Bucket[(int) ordered.size()]; + topBucketsPerOrd.set(ordIdx, bucketsForOrd); + for (int b = (int) ordered.size() - 1; b >= 0; --b) { + InternalMultiTerms.Bucket[] buckets = topBucketsPerOrd.get(ordIdx); + buckets[b] = ordered.pop(); + otherDocCounts.increment(ordIdx, -buckets[b].getDocCount()); + } } } - } - buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); + buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - result[ordIdx] = buildResult(otherDocCounts[ordIdx], topBucketsPerOrd[ordIdx]); + InternalAggregation[] result = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; + for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { + result[ordIdx] = buildResult(otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)); + } + return result; } - return result; } InternalMultiTerms buildResult(long otherDocCount, InternalMultiTerms.Bucket[] topBuckets) { @@ -305,7 +312,7 @@ InternalMultiTerms buildResult(long otherDocCount, InternalMultiTerms.Bucket[] t bucketCountThresholds.getShardSize(), showTermDocCountError, otherDocCount, - List.of(topBuckets), + Arrays.asList(topBuckets), 0, formats, keyConverters, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java index e55736cf43607..5b1ed7c954fe9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.util.BytesRefHash; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Releasables; @@ -110,31 +111,32 @@ protected void doClose() { } @Override - public InternalAggregation[] buildAggregations(long[] ordsToCollect) throws IOException { - Bucket[][] topBucketsPerOrd = new Bucket[ordsToCollect.length][]; - for (int ordIdx = 0; ordIdx < ordsToCollect.length; ordIdx++) { - final long ord = ordsToCollect[ordIdx]; - final TokenListCategorizer categorizer = (ord < categorizers.size()) ? categorizers.get(ord) : null; - if (categorizer == null) { - topBucketsPerOrd[ordIdx] = new Bucket[0]; - continue; + public InternalAggregation[] buildAggregations(LongArray ordsToCollect) throws IOException { + try (ObjectArray topBucketsPerOrd = bigArrays().newObjectArray(ordsToCollect.size())) { + for (long ordIdx = 0; ordIdx < ordsToCollect.size(); ordIdx++) { + final long ord = ordsToCollect.get(ordIdx); + final TokenListCategorizer categorizer = (ord < categorizers.size()) ? categorizers.get(ord) : null; + if (categorizer == null) { + topBucketsPerOrd.set(ordIdx, new Bucket[0]); + continue; + } + int size = (int) Math.min(bucketOrds.bucketsInOrd(ordIdx), bucketCountThresholds.getShardSize()); + topBucketsPerOrd.set(ordIdx, categorizer.toOrderedBuckets(size)); } - int size = (int) Math.min(bucketOrds.bucketsInOrd(ordIdx), bucketCountThresholds.getShardSize()); - topBucketsPerOrd[ordIdx] = categorizer.toOrderedBuckets(size); - } - buildSubAggsForAllBuckets(topBucketsPerOrd, Bucket::getBucketOrd, Bucket::setAggregations); - InternalAggregation[] results = new InternalAggregation[ordsToCollect.length]; - for (int ordIdx = 0; ordIdx < ordsToCollect.length; ordIdx++) { - results[ordIdx] = new InternalCategorizationAggregation( - name, - bucketCountThresholds.getRequiredSize(), - bucketCountThresholds.getMinDocCount(), - similarityThreshold, - metadata(), - Arrays.asList(topBucketsPerOrd[ordIdx]) - ); + buildSubAggsForAllBuckets(topBucketsPerOrd, Bucket::getBucketOrd, Bucket::setAggregations); + InternalAggregation[] results = new InternalAggregation[Math.toIntExact(ordsToCollect.size())]; + for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { + results[ordIdx] = new InternalCategorizationAggregation( + name, + bucketCountThresholds.getRequiredSize(), + bucketCountThresholds.getMinDocCount(), + similarityThreshold, + metadata(), + Arrays.asList(topBucketsPerOrd.get(ordIdx)) + ); + } + return results; } - return results; } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/DelegatingCircuitBreakerService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/DelegatingCircuitBreakerService.java index 350f45afb9e1f..1b28ebbb3eec6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/DelegatingCircuitBreakerService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/DelegatingCircuitBreakerService.java @@ -40,10 +40,12 @@ * At the time of writing circuit breakers are a global gauge.) * * After the map phase and before reduce, the {@link ItemSetMapReduceAggregator} creates instances of - * {@link InternalItemSetMapReduceAggregation}, see {@link ItemSetMapReduceAggregator#buildAggregations(long[])}. + * {@link InternalItemSetMapReduceAggregation}, see + * {@link ItemSetMapReduceAggregator#buildAggregations(org.elasticsearch.common.util.LongArray)}. * * (Note 1: Instead of keeping the existing instance, it would have been possible to deep-copy the object like - * {@link CardinalityAggregator#buildAggregations(long[])}. I decided against this approach mainly because the deep-copy isn't + * {@link CardinalityAggregator#buildAggregations(org.elasticsearch.common.util.LongArray)}. + * I decided against this approach mainly because the deep-copy isn't * secured by circuit breakers, meaning the node could run out of memory during the deep-copy.) * (Note 2: Between {@link ItemSetMapReduceAggregator#doClose()} and serializing {@link InternalItemSetMapReduceAggregation} * memory accounting is broken, meaning the agg context gets closed and bytes get returned to the circuit breaker before memory is diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java index 0f9555c77341f..1a5e5d7a0790e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.LongObjectPagedHashMap; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.Tuple; @@ -117,9 +118,9 @@ public InternalAggregation buildEmptyAggregation() { } @Override - public final InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { + public final InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; + for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { results[ordIdx] = buildAggregation(ordIdx); } From ea90fbc10d3371e5e3b9921a81a4936c2e39d5f4 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 19 Nov 2024 11:32:54 +0000 Subject: [PATCH 037/386] Skip FIPS JVMs in `testReloadCredentialsFromKeystore` (#116814) This test doesn't need to run in FIPS mode, and apparently it fails sometimes, so with this commit we skip it. Closes #116811 --- .../org/elasticsearch/repositories/s3/RepositoryS3RestIT.java | 3 ++- muted-tests.yml | 3 --- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java index ead2cb36ad150..dcd29c6d26c6e 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java @@ -51,8 +51,9 @@ protected String getTestRestCluster() { return cluster.getHttpAddresses(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/116811") public void testReloadCredentialsFromKeystore() throws IOException { + assumeFalse("doesn't work in a FIPS JVM, but that's ok", inFipsJvm()); + // Register repository (?verify=false because we don't have access to the blob store yet) final var repositoryName = randomIdentifier(); registerRepository( diff --git a/muted-tests.yml b/muted-tests.yml index 2b3c2a64d5ab5..7083767d0451e 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -217,9 +217,6 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=synonyms/90_synonyms_reloading_for_synset/Reload analyzers for specific synonym set} issue: https://github.com/elastic/elasticsearch/issues/116777 -- class: org.elasticsearch.repositories.s3.RepositoryS3RestIT - method: testReloadCredentialsFromKeystore - issue: https://github.com/elastic/elasticsearch/issues/116811 - class: org.elasticsearch.xpack.searchablesnapshots.hdfs.SecureHdfsSearchableSnapshotsIT issue: https://github.com/elastic/elasticsearch/issues/116851 - class: org.elasticsearch.xpack.esql.analysis.VerifierTests From d33bff6468c26e7a15a0f3c30d3c1383bb72b947 Mon Sep 17 00:00:00 2001 From: Fang Xing <155562079+fang-xing-esql@users.noreply.github.com> Date: Tue, 19 Nov 2024 07:48:35 -0500 Subject: [PATCH 038/386] [ES|QL][DOCS] Add docs for date_period and time_duration (#116368) * add docs for date_period and time_duration --- docs/reference/esql/esql-language.asciidoc | 2 + docs/reference/esql/esql-syntax.asciidoc | 18 +-- docs/reference/esql/functions/binary.asciidoc | 2 + docs/reference/esql/implicit-casting.asciidoc | 40 ++++--- docs/reference/esql/time-spans.asciidoc | 111 ++++++++++++++++++ 5 files changed, 143 insertions(+), 30 deletions(-) create mode 100644 docs/reference/esql/time-spans.asciidoc diff --git a/docs/reference/esql/esql-language.asciidoc b/docs/reference/esql/esql-language.asciidoc index a7c0e5e01a867..151ca803bf2eb 100644 --- a/docs/reference/esql/esql-language.asciidoc +++ b/docs/reference/esql/esql-language.asciidoc @@ -14,6 +14,7 @@ Detailed reference documentation for the {esql} language: * <> * <> * <> +* <> include::esql-syntax.asciidoc[] include::esql-commands.asciidoc[] @@ -23,3 +24,4 @@ include::multivalued-fields.asciidoc[] include::esql-process-data-with-dissect-grok.asciidoc[] include::esql-enrich-data.asciidoc[] include::implicit-casting.asciidoc[] +include::time-spans.asciidoc[] diff --git a/docs/reference/esql/esql-syntax.asciidoc b/docs/reference/esql/esql-syntax.asciidoc index c7f741d064310..ba1c4ca820381 100644 --- a/docs/reference/esql/esql-syntax.asciidoc +++ b/docs/reference/esql/esql-syntax.asciidoc @@ -157,21 +157,15 @@ FROM employees ==== Timespan literals Datetime intervals and timespans can be expressed using timespan literals. -Timespan literals are a combination of a number and a qualifier. These -qualifiers are supported: - -* `millisecond`/`milliseconds`/`ms` -* `second`/`seconds`/`sec`/`s` -* `minute`/`minutes`/`min` -* `hour`/`hours`/`h` -* `day`/`days`/`d` -* `week`/`weeks`/`w` -* `month`/`months`/`mo` -* `quarter`/`quarters`/`q` -* `year`/`years`/`yr`/`y` +Timespan literals are a combination of a number and a temporal unit. The +supported temporal units are listed in <>. +More examples of the usages of time spans can be found in +<>. + Timespan literals are not whitespace sensitive. These expressions are all valid: * `1day` * `1 day` * `1 day` + diff --git a/docs/reference/esql/functions/binary.asciidoc b/docs/reference/esql/functions/binary.asciidoc index 72d466ae83d11..59bdadecc4923 100644 --- a/docs/reference/esql/functions/binary.asciidoc +++ b/docs/reference/esql/functions/binary.asciidoc @@ -87,6 +87,7 @@ Supported types: include::types/greater_than_or_equal.asciidoc[] +[[esql-add]] ==== Add `+` [.text-center] image::esql/functions/signature/add.svg[Embedded,opts=inline] @@ -98,6 +99,7 @@ Supported types: include::types/add.asciidoc[] +[[esql-subtract]] ==== Subtract `-` [.text-center] image::esql/functions/signature/sub.svg[Embedded,opts=inline] diff --git a/docs/reference/esql/implicit-casting.asciidoc b/docs/reference/esql/implicit-casting.asciidoc index ffb6d3fc35acb..b24be0b645472 100644 --- a/docs/reference/esql/implicit-casting.asciidoc +++ b/docs/reference/esql/implicit-casting.asciidoc @@ -5,7 +5,7 @@ Implicit casting ++++ -Often users will input `date`, `ip`, `version`, `date_period` or `time_duration` as simple strings in their queries for use in predicates, functions, or expressions. {esql} provides <> to explicitly convert these strings into the desired data types. +Often users will input `date`, `date_period`, `time_duration`, `ip` or `version` as simple strings in their queries for use in predicates, functions, or expressions. {esql} provides <> to explicitly convert these strings into the desired data types. Without implicit casting users must explicitly code these `to_X` functions in their queries, when string literals don't match the target data types they are assigned or compared to. Here is an example of using `to_datetime` to explicitly perform a data type conversion. @@ -18,7 +18,10 @@ FROM employees | LIMIT 1 ---- -Implicit casting improves usability, by automatically converting string literals to the target data type. This is most useful when the target data type is `date`, `ip`, `version`, `date_period` or `time_duration`. It is natural to specify these as a string in queries. +[discrete] +[[esql-implicit-casting-example]] +==== Implicit casting example +Implicit casting automatically converts string literals to the target data type. This allows users to specify string values for types like `date`, `date_period`, `time_duration`, `ip` and `version` in their queries. The first query can be coded without calling the `to_datetime` function, as follows: @@ -31,35 +34,36 @@ FROM employees | LIMIT 1 ---- -[float] -=== Implicit casting support +[discrete] +[[esql-implicit-casting-supported-operations]] +==== Operations that support implicit casting The following table details which {esql} operations support implicit casting for different data types. [%header.monospaced.styled,format=dsv,separator=|] |=== -||ScalarFunction*|Operator*|<>|<> -|DATE|Y|Y|Y|N -|IP|Y|Y|Y|N -|VERSION|Y|Y|Y|N -|BOOLEAN|Y|Y|Y|N -|DATE_PERIOD/TIME_DURATION|Y|N|Y|N +|ScalarFunctions|Operators|<>|<> +DATE|Y|Y|Y|N +DATE_PERIOD/TIME_DURATION|Y|N|Y|N +IP|Y|Y|Y|N +VERSION|Y|Y|Y|N +BOOLEAN|Y|Y|Y|N |=== -ScalarFunction* includes: +ScalarFunctions includes: -<> +* <> -<> +* <> -<> +* <> -Operator* includes: +Operators includes: -<> +* <> -<> +* <> -<> +* <> diff --git a/docs/reference/esql/time-spans.asciidoc b/docs/reference/esql/time-spans.asciidoc new file mode 100644 index 0000000000000..d2aa0c4fa252e --- /dev/null +++ b/docs/reference/esql/time-spans.asciidoc @@ -0,0 +1,111 @@ +[[esql-time-spans]] +=== {esql} time spans + +++++ +Time spans +++++ + +Time spans represent intervals between two datetime values. There are currently two supported types of time spans: + +* `DATE_PERIOD` specifies intervals in years, quarters, months, weeks and days +* `TIME_DURATION` specifies intervals in hours, minutes, seconds and milliseconds + +A time span requires two elements: an integer value and a temporal unit. + +Time spans work with grouping functions such as <>, scalar functions such as <> and arithmetic operators such as <> and <>. Convert strings to time spans using <>, <>, or the cast operators `::DATE_PERIOD`, `::TIME_DURATION`. + +[discrete] +[[esql-time-spans-examples]] +==== Examples of using time spans in {esql} + + +With `BUCKET`: +[source.merge.styled,esql] +---- +include::{esql-specs}/bucket.csv-spec[tag=docsBucketWeeklyHistogramWithSpan] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/bucket.csv-spec[tag=docsBucketWeeklyHistogramWithSpan-result] +|=== + + +With `DATE_TRUNC`: +[source.merge.styled,esql] +---- +include::{esql-specs}/date.csv-spec[tag=docsDateTrunc] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/date.csv-spec[tag=docsDateTrunc-result] +|=== + + +With `+` and/or `-`: +[source.merge.styled,esql] +---- +include::{esql-specs}/date.csv-spec[tag=docsNowWhere] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/date.csv-spec[tag=docsNowWhere-result] +|=== + + +When a time span is provided as a named parameter in string format, `TO_DATEPERIOD`, `::DATE_PERIOD`, `TO_TIMEDURATION` or `::TIME_DURATION` can be used to convert to its corresponding time span value for arithmetic operations like `+` and/or `-`. +[source, esql] +---- +POST /_query +{ + "query": """ + FROM employees + | EVAL x = hire_date + ?timespan::DATE_PERIOD, y = hire_date - TO_DATEPERIOD(?timespan) + """, + "params": [{"timespan" : "1 day"}] +} +---- + +When a time span is provided as a named parameter in string format, it can be automatically converted to its corresponding time span value in grouping functions and scalar functions, like `BUCKET` and `DATE_TRUNC`. +[source, esql] +---- +POST /_query +{ + "query": """ + FROM employees + | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" + | STATS hires_per_week = COUNT(*) BY week = BUCKET(hire_date, ?timespan) + | SORT week + """, + "params": [{"timespan" : "1 week"}] +} +---- + +[source, esql] +---- +POST /_query +{ + "query": """ + FROM employees + | KEEP first_name, last_name, hire_date + | EVAL year_hired = DATE_TRUNC(?timespan, hire_date) + """, + "params": [{"timespan" : "1 year"}] +} +---- + +[discrete] +[[esql-time-spans-table]] +==== Supported temporal units +[%header.monospaced.styled,format=dsv,separator=|] +|=== +Temporal Units|Valid Abbreviations +year|y, yr, years +quarter|q, quarters +month|mo, months +week|w, weeks +day|d, days +hour|h, hours +minute|min, minutes +second|s, sec, seconds +millisecond|ms, milliseconds +|=== From 0db51d532669eb24989a2d368792cdda63f0a90e Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Tue, 19 Nov 2024 12:55:55 +0000 Subject: [PATCH 039/386] Remove disabling field names test (#116937) The test is conditional on the old version being pre-v8, which is never true on main now --- .../test/rest/RestTestLegacyFeatures.java | 3 - .../xpack/restart/FullClusterRestartIT.java | 79 ------------------- 2 files changed, 82 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java index a10394b4156d6..5a228e2540007 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java @@ -78,8 +78,6 @@ public class RestTestLegacyFeatures implements FeatureSpecification { @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) public static final NodeFeature NEW_DATA_STREAMS_INDEX_NAME_FORMAT = new NodeFeature("data_stream.new_index_name_format"); @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature DISABLE_FIELD_NAMES_FIELD_REMOVED = new NodeFeature("disable_of_field_names_field_removed"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) public static final NodeFeature ML_NLP_SUPPORTED = new NodeFeature("ml.nlp_supported"); // YAML @@ -113,7 +111,6 @@ public Map getHistoricalFeatures() { entry(SLM_SUPPORTED, Version.V_7_4_0), entry(DATA_STREAMS_SUPPORTED, Version.V_7_9_0), entry(NEW_DATA_STREAMS_INDEX_NAME_FORMAT, Version.V_7_11_0), - entry(DISABLE_FIELD_NAMES_FIELD_REMOVED, Version.V_8_0_0), entry(ML_NLP_SUPPORTED, Version.V_8_0_0) ); } diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index a56ddaabe8280..3cdd968fcc2e7 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.search.RestSearchAction; -import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.StreamsUtils; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestLegacyFeatures; @@ -53,8 +52,6 @@ import java.util.stream.Collectors; import static org.elasticsearch.core.TimeValue.timeValueSeconds; -import static org.elasticsearch.test.MapMatcher.assertMap; -import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.elasticsearch.upgrades.FullClusterRestartIT.assertNumHits; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; @@ -965,82 +962,6 @@ public void testDataStreams() throws Exception { assertNumHits("ds", 1, 1); } - /** - * Tests that a single document survives. Super basic smoke test. - */ - @UpdateForV9(owner = UpdateForV9.Owner.SEARCH_FOUNDATIONS) // Can be removed - public void testDisableFieldNameField() throws IOException { - assumeFalse( - "can only disable field names field before 8.0", - oldClusterHasFeature(RestTestLegacyFeatures.DISABLE_FIELD_NAMES_FIELD_REMOVED) - ); - - String docLocation = "/nofnf/_doc/1"; - String doc = """ - { - "dv": "test", - "no_dv": "test" - }"""; - - if (isRunningAgainstOldCluster()) { - Request createIndex = new Request("PUT", "/nofnf"); - createIndex.setJsonEntity(""" - { - "settings": { - "index": { - "number_of_replicas": 1 - } - }, - "mappings": { - "_field_names": { "enabled": false }, - "properties": { - "dv": { "type": "keyword" }, - "no_dv": { "type": "keyword", "doc_values": false } - } - } - }"""); - createIndex.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(fieldNamesFieldOk())); - client().performRequest(createIndex); - - Request createDoc = new Request("PUT", docLocation); - createDoc.addParameter("refresh", "true"); - createDoc.setJsonEntity(doc); - client().performRequest(createDoc); - } - - Request getRequest = new Request("GET", docLocation); - assertThat(toStr(client().performRequest(getRequest)), containsString(doc)); - - if (isRunningAgainstOldCluster() == false) { - Request esql = new Request("POST", "_query"); - esql.setJsonEntity(""" - { - "query": "FROM nofnf | LIMIT 1" - }"""); - // {"columns":[{"name":"dv","type":"keyword"},{"name":"no_dv","type":"keyword"}],"values":[["test",null]]} - try { - Map result = entityAsMap(client().performRequest(esql)); - MapMatcher mapMatcher = matchesMap(); - if (result.get("took") != null) { - mapMatcher = mapMatcher.entry("took", ((Integer) result.get("took")).intValue()); - } - assertMap( - result, - mapMatcher.entry( - "columns", - List.of(Map.of("name", "dv", "type", "keyword"), Map.of("name", "no_dv", "type", "keyword")) - ).entry("values", List.of(List.of("test", "test"))) - ); - } catch (ResponseException e) { - logger.error( - "failed to query index without field name field. Existing indices:\n{}", - EntityUtils.toString(client().performRequest(new Request("GET", "_cat/indices")).getEntity()) - ); - throw e; - } - } - } - /** * Ignore the warning about the {@code _field_names} field. We intentionally * turn that field off sometimes. And other times old versions spuriously From b30a4b23f2f6d6dd8505656ff6fe71e3fc9d75d7 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Tue, 19 Nov 2024 13:35:04 +0000 Subject: [PATCH 040/386] Output a consistent format when generating error json (#90529) Now, error fields will always have 'type' and 'reason' fields, and the information in those fields is the same regardless of whether the output is detailed or not --- docs/changelog/90529.yaml | 26 +++ docs/reference/modules/http.asciidoc | 8 +- .../elasticsearch/ElasticsearchException.java | 54 +++-- .../common/xcontent/XContentHelper.java | 18 +- .../org/elasticsearch/rest/RestResponse.java | 7 +- .../ElasticsearchExceptionTests.java | 207 +++++++++++++++--- .../synonyms/PutSynonymRuleActionTests.java | 2 +- .../synonyms/PutSynonymsActionTests.java | 2 +- .../AbstractHttpServerTransportTests.java | 8 +- .../rest/BaseRestHandlerTests.java | 18 +- .../ChunkedRestResponseBodyPartTests.java | 2 +- .../rest/RestControllerTests.java | 82 +++---- .../rest/RestHttpResponseHeadersTests.java | 2 +- .../elasticsearch/rest/RestResponseTests.java | 80 ++----- .../rest/action/RestBuilderListenerTests.java | 6 +- .../rest/action/cat/RestTasksActionTests.java | 2 +- .../action/document/RestBulkActionTests.java | 2 +- .../action/search/RestSearchActionTests.java | 2 +- .../scroll/RestClearScrollActionTests.java | 2 +- .../scroll/RestSearchScrollActionTests.java | 2 +- .../org/elasticsearch/test/ESTestCase.java | 18 +- .../EnterpriseSearchBaseRestHandlerTests.java | 2 +- .../action/SecurityBaseRestHandlerTests.java | 2 +- .../apikey/ApiKeyBaseRestHandlerTests.java | 2 +- .../apikey/RestCreateApiKeyActionTests.java | 2 +- ...stCreateCrossClusterApiKeyActionTests.java | 2 +- .../apikey/RestGetApiKeyActionTests.java | 6 +- .../RestInvalidateApiKeyActionTests.java | 4 +- .../apikey/RestQueryApiKeyActionTests.java | 8 +- ...stUpdateCrossClusterApiKeyActionTests.java | 2 +- .../oauth2/RestGetTokenActionTests.java | 6 +- .../action/user/RestQueryUserActionTests.java | 4 +- 32 files changed, 389 insertions(+), 201 deletions(-) create mode 100644 docs/changelog/90529.yaml diff --git a/docs/changelog/90529.yaml b/docs/changelog/90529.yaml new file mode 100644 index 0000000000000..a014c82259a9e --- /dev/null +++ b/docs/changelog/90529.yaml @@ -0,0 +1,26 @@ +pr: 90529 +summary: Output a consistent format when generating error json +area: Infra/REST API +type: "breaking" +issues: + - 89387 +breaking: + title: Error JSON structure has changed when detailed errors are disabled + area: REST API + details: |- + This change modifies the JSON format of error messages returned to REST clients + when detailed messages are turned off. + Previously, JSON returned when an exception occurred, and `http.detailed_errors.enabled: false` was set, + just consisted of a single `"error"` text field with some basic information. + Setting `http.detailed_errors.enabled: true` (the default) changed this field + to an object with more detailed information. + With this change, non-detailed errors now have the same structure as detailed errors. `"error"` will now always + be an object with, at a minimum, a `"type"` and `"reason"` field. Additional fields are included when detailed + errors are enabled. + To use the previous structure for non-detailed errors, use the v8 REST API. + impact: |- + If you have set `http.detailed_errors.enabled: false` (the default is `true`) + the structure of JSON when any exceptions occur now matches the structure when + detailed errors are enabled. + To use the previous structure for non-detailed errors, use the v8 REST API. + notable: false diff --git a/docs/reference/modules/http.asciidoc b/docs/reference/modules/http.asciidoc index 984fb0d5bf1c1..17d67e00e2ebb 100644 --- a/docs/reference/modules/http.asciidoc +++ b/docs/reference/modules/http.asciidoc @@ -145,11 +145,9 @@ NOTE: This header is only returned when the setting is set to `true`. `http.detailed_errors.enabled`:: (<>, boolean) -Configures whether detailed error reporting in HTTP responses is enabled. -Defaults to `true`, which means that HTTP requests that include the -<> will return a -detailed error message including a stack trace if they encounter an exception. -If set to `false`, requests with the `?error_trace` parameter are rejected. +Configures whether detailed error reporting in HTTP responses is enabled. Defaults to `true`. +When this option is set to `false`, only basic information is returned if an error occurs in the request, +and requests with <> set are rejected. `http.pipelining.max_events`:: (<>, integer) diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index 32198ba7584be..3c5c365654206 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -25,7 +25,9 @@ import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Tuple; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.health.node.action.HealthNodeNotDiscoveredException; import org.elasticsearch.index.Index; import org.elasticsearch.index.mapper.DocumentParsingException; @@ -611,23 +613,31 @@ protected static void generateThrowableXContent(XContentBuilder builder, Params */ public static XContentBuilder generateFailureXContent(XContentBuilder builder, Params params, @Nullable Exception e, boolean detailed) throws IOException { - // No exception to render as an error + if (builder.getRestApiVersion() == RestApiVersion.V_8) { + if (e == null) { + return builder.field(ERROR, "unknown"); + } + if (detailed == false) { + return generateNonDetailedFailureXContentV8(builder, e); + } + // else fallthrough + } + if (e == null) { - return builder.field(ERROR, "unknown"); + // No exception to render as an error + builder.startObject(ERROR); + builder.field(TYPE, "unknown"); + builder.field(REASON, "unknown"); + return builder.endObject(); } - // Render the exception with a simple message if (detailed == false) { - String message = "No ElasticsearchException found"; - Throwable t = e; - for (int counter = 0; counter < 10 && t != null; counter++) { - if (t instanceof ElasticsearchException) { - message = t.getClass().getSimpleName() + "[" + t.getMessage() + "]"; - break; - } - t = t.getCause(); - } - return builder.field(ERROR, message); + // just render the type & message + Throwable t = ExceptionsHelper.unwrapCause(e); + builder.startObject(ERROR); + builder.field(TYPE, getExceptionName(t)); + builder.field(REASON, t.getMessage()); + return builder.endObject(); } // Render the exception with all details @@ -646,6 +656,20 @@ public static XContentBuilder generateFailureXContent(XContentBuilder builder, P return builder.endObject(); } + @UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // remove V8 API + private static XContentBuilder generateNonDetailedFailureXContentV8(XContentBuilder builder, @Nullable Exception e) throws IOException { + String message = "No ElasticsearchException found"; + Throwable t = e; + for (int counter = 0; counter < 10 && t != null; counter++) { + if (t instanceof ElasticsearchException) { + message = t.getClass().getSimpleName() + "[" + t.getMessage() + "]"; + break; + } + t = t.getCause(); + } + return builder.field(ERROR, message); + } + /** * Parses the output of {@link #generateFailureXContent(XContentBuilder, Params, Exception, boolean)} */ @@ -729,8 +753,8 @@ public static String getExceptionName(Throwable ex) { static String buildMessage(String type, String reason, String stack) { StringBuilder message = new StringBuilder("Elasticsearch exception ["); - message.append(TYPE).append('=').append(type).append(", "); - message.append(REASON).append('=').append(reason); + message.append(TYPE).append('=').append(type); + message.append(", ").append(REASON).append('=').append(reason); if (stack != null) { message.append(", ").append(STACK_TRACE).append('=').append(stack); } diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java index 9cfa22d0a3cfb..9464ccbcc7aa3 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Tuple; import org.elasticsearch.plugins.internal.XContentParserDecorator; import org.elasticsearch.xcontent.DeprecationHandler; @@ -626,7 +627,22 @@ public static BytesReference toXContent(ChunkedToXContent toXContent, XContentTy */ public static BytesReference toXContent(ToXContent toXContent, XContentType xContentType, Params params, boolean humanReadable) throws IOException { - try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { + return toXContent(toXContent, xContentType, RestApiVersion.current(), params, humanReadable); + } + + /** + * Returns the bytes that represent the XContent output of the provided {@link ToXContent} object, using the provided + * {@link XContentType}. Wraps the output into a new anonymous object according to the value returned + * by the {@link ToXContent#isFragment()} method returns. + */ + public static BytesReference toXContent( + ToXContent toXContent, + XContentType xContentType, + RestApiVersion restApiVersion, + Params params, + boolean humanReadable + ) throws IOException { + try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent(), restApiVersion)) { builder.humanReadable(humanReadable); if (toXContent.isFragment()) { builder.startObject(); diff --git a/server/src/main/java/org/elasticsearch/rest/RestResponse.java b/server/src/main/java/org/elasticsearch/rest/RestResponse.java index 29cae343fb09e..d043974055667 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestResponse.java +++ b/server/src/main/java/org/elasticsearch/rest/RestResponse.java @@ -22,6 +22,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -146,12 +147,12 @@ public RestResponse(RestChannel channel, RestStatus status, Exception e) throws params = new ToXContent.DelegatingMapParams(singletonMap(REST_EXCEPTION_SKIP_STACK_TRACE, "false"), params); } - if (channel.detailedErrorsEnabled() == false) { + if (channel.request().getRestApiVersion() == RestApiVersion.V_8 && channel.detailedErrorsEnabled() == false) { deprecationLogger.warn( DeprecationCategory.API, "http_detailed_errors", - "The JSON format of non-detailed errors will change in Elasticsearch 9.0 to match the JSON structure" - + " used for detailed errors. To keep using the existing format, use the V8 REST API." + "The JSON format of non-detailed errors has changed in Elasticsearch 9.0 to match the JSON structure" + + " used for detailed errors." ); } diff --git a/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java b/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java index 9863d2156422d..f5a23cf68a26e 100644 --- a/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java +++ b/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -509,12 +510,12 @@ public void testGetDetailedMessage() { public void testToXContent() throws IOException { { ElasticsearchException e = new ElasticsearchException("test"); - assertExceptionAsJson(e, """ + assertThrowableAsJson(e, """ {"type":"exception","reason":"test"}"""); } { ElasticsearchException e = new IndexShardRecoveringException(new ShardId("_test", "_0", 5)); - assertExceptionAsJson(e, """ + assertThrowableAsJson(e, """ { "type": "index_shard_recovering_exception", "reason": "CurrentState[RECOVERING] Already recovering", @@ -529,7 +530,7 @@ public void testToXContent() throws IOException { "foo", new IllegalStateException("bar") ); - assertExceptionAsJson(e, """ + assertThrowableAsJson(e, """ { "type": "illegal_state_exception", "reason": "bar" @@ -537,7 +538,7 @@ public void testToXContent() throws IOException { } { ElasticsearchException e = new ElasticsearchException(new IllegalArgumentException("foo")); - assertExceptionAsJson(e, """ + assertThrowableAsJson(e, """ { "type": "exception", "reason": "java.lang.IllegalArgumentException: foo", @@ -552,7 +553,7 @@ public void testToXContent() throws IOException { "foo", new ElasticsearchException("bar", new IllegalArgumentException("index is closed", new RuntimeException("foobar"))) ); - assertExceptionAsJson(ex, """ + assertThrowableAsJson(ex, """ { "type": "exception", "reason": "foo", @@ -573,7 +574,7 @@ public void testToXContent() throws IOException { { ElasticsearchException e = new ElasticsearchException("foo", new IllegalStateException("bar")); - assertExceptionAsJson(e, """ + assertThrowableAsJson(e, """ { "type": "exception", "reason": "foo", @@ -602,21 +603,91 @@ public void testToXContent() throws IOException { } } + public void testGenerateFailureToXContentWithNoDetails() throws IOException { + { + Exception ex = new FileNotFoundException("foo not found"); + for (int i = 0; i < randomInt(10); i++) { + ex = new RemoteTransportException("foobar", ex); + } + assertFailureAsJson(ex, """ + {"error":{"type":"file_not_found_exception","reason":"foo not found"}}""", false); + } + { + ParsingException ex = new ParsingException(1, 2, "foobar", null); + assertFailureAsJson(ex, """ + {"error":{"type":"parsing_exception","reason":"foobar"}}""", false); + } + + { // header and metadata shouldn't be rendered + ParsingException ex = new ParsingException(1, 2, "foobar", null); + ex.addMetadata("es.test1", "value1"); + ex.addMetadata("es.test2", "value2"); + ex.addHeader("test", "some value"); + ex.addHeader("test_multi", "some value", "another value"); + + String expected = """ + {"error":{"type": "parsing_exception","reason": "foobar"}}"""; + assertFailureAsJson(ex, expected, false); + } + } + + public void testGenerateFailureToXContentWithDetails() throws IOException { + { + Exception ex = new FileNotFoundException("foo not found"); + for (int i = 0; i < randomInt(10); i++) { + ex = new RemoteTransportException("foobar", ex); + } + assertFailureAsJson(ex, """ + {"error":{"type":"file_not_found_exception","reason":"foo not found", + "root_cause":[{"type":"file_not_found_exception","reason":"foo not found"}]}}""", true); + } + { + ParsingException ex = new ParsingException(1, 2, "foobar", null); + assertFailureAsJson(ex, """ + {"error":{"type":"parsing_exception","reason":"foobar","line":1,"col":2, + "root_cause":[{"type":"parsing_exception","reason":"foobar","line":1,"col":2}]}}""", true); + } + + { // render header and metadata + ParsingException ex = new ParsingException(1, 2, "foobar", null); + ex.addMetadata("es.test1", "value1"); + ex.addMetadata("es.test2", "value2"); + ex.addHeader("test", "some value"); + ex.addHeader("test_multi", "some value", "another value"); + + String expectedFragment = """ + { + "type": "parsing_exception", + "reason": "foobar", + "line": 1, + "col": 2, + "test1": "value1", + "test2": "value2", + "header": { + "test_multi": [ + "some value", + "another value" + ], + "test": "some value" + } + """; + String expected = "{\"error\":" + expectedFragment + ",\"root_cause\":[" + expectedFragment + "}]}}"; + assertFailureAsJson(ex, expected, true); + } + } + public void testGenerateThrowableToXContent() throws IOException { { - Exception ex; - if (randomBoolean()) { - // just a wrapper which is omitted - ex = new RemoteTransportException("foobar", new FileNotFoundException("foo not found")); - } else { - ex = new FileNotFoundException("foo not found"); + Exception ex = new FileNotFoundException("foo not found"); + for (int i = 0; i < randomInt(10); i++) { + ex = new RemoteTransportException("foobar", ex); } - assertExceptionAsJson(ex, """ + assertThrowableAsJson(ex, """ {"type":"file_not_found_exception","reason":"foo not found"}"""); } { ParsingException ex = new ParsingException(1, 2, "foobar", null); - assertExceptionAsJson(ex, """ + assertThrowableAsJson(ex, """ {"type":"parsing_exception","reason":"foobar","line":1,"col":2}"""); } @@ -656,7 +727,7 @@ public void testGenerateThrowableToXContent() throws IOException { "test": "some value" } }"""; - assertExceptionAsJson(ex, expected); + assertThrowableAsJson(ex, expected); } } @@ -697,7 +768,7 @@ public void testToXContentWithHeadersAndMetadata() throws IOException { } }"""; - assertExceptionAsJson(e, expectedJson); + assertThrowableAsJson(e, expectedJson); ElasticsearchException parsed; try (XContentParser parser = createParser(XContentType.JSON.xContent(), expectedJson)) { @@ -859,7 +930,7 @@ public void testFromXContentWithHeadersAndMetadata() throws IOException { } assertNotNull(parsed); - assertEquals(parsed.getMessage(), "Elasticsearch exception [type=exception, reason=foo]"); + assertEquals("Elasticsearch exception [type=exception, reason=foo]", parsed.getMessage()); assertThat(parsed.getHeaderKeys(), hasSize(1)); assertThat(parsed.getHeader("foo_1"), hasItem("foo1")); assertThat(parsed.getMetadataKeys(), hasSize(1)); @@ -996,11 +1067,40 @@ public void testThrowableToAndFromXContent() throws IOException { public void testUnknownFailureToAndFromXContent() throws IOException { final XContent xContent = randomFrom(XContentType.values()).xContent(); - BytesReference failureBytes = toShuffledXContent((builder, params) -> { - // Prints a null failure using generateFailureXContent() - ElasticsearchException.generateFailureXContent(builder, params, null, randomBoolean()); - return builder; - }, xContent.type(), ToXContent.EMPTY_PARAMS, randomBoolean()); + // Prints a null failure using generateFailureXContent() + BytesReference failureBytes = toShuffledXContent( + (builder, params) -> ElasticsearchException.generateFailureXContent(builder, params, null, randomBoolean()), + xContent.type(), + ToXContent.EMPTY_PARAMS, + randomBoolean() + ); + + ElasticsearchException parsedFailure; + try (XContentParser parser = createParser(xContent, failureBytes)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + parsedFailure = ElasticsearchException.failureFromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } + + // Failure was null, expecting a "unknown" reason + assertEquals("Elasticsearch exception [type=unknown, reason=unknown]", parsedFailure.getMessage()); + assertEquals(0, parsedFailure.getHeaders().size()); + assertEquals(0, parsedFailure.getMetadata().size()); + } + + public void testUnknownFailureToAndFromXContentV8() throws IOException { + final XContent xContent = randomFrom(XContentType.values()).xContent(); + + // Prints a null failure using generateFailureXContent() + BytesReference failureBytes = toShuffledXContent( + (builder, params) -> ElasticsearchException.generateFailureXContent(builder, params, null, randomBoolean()), + xContent.type(), + RestApiVersion.V_8, + ToXContent.EMPTY_PARAMS, + randomBoolean() + ); ElasticsearchException parsedFailure; try (XContentParser parser = createParser(xContent, failureBytes)) { @@ -1021,10 +1121,46 @@ public void testFailureToAndFromXContentWithNoDetails() throws IOException { final XContent xContent = randomFrom(XContentType.values()).xContent(); final Exception failure = (Exception) randomExceptions().v1(); - BytesReference failureBytes = toShuffledXContent((builder, params) -> { - ElasticsearchException.generateFailureXContent(builder, params, failure, false); - return builder; - }, xContent.type(), ToXContent.EMPTY_PARAMS, randomBoolean()); + BytesReference failureBytes = toShuffledXContent( + (builder, params) -> ElasticsearchException.generateFailureXContent(builder, params, failure, false), + xContent.type(), + ToXContent.EMPTY_PARAMS, + randomBoolean() + ); + + try (XContentParser parser = createParser(xContent, failureBytes)) { + failureBytes = BytesReference.bytes(shuffleXContent(parser, randomBoolean())); + } + + ElasticsearchException parsedFailure; + try (XContentParser parser = createParser(xContent, failureBytes)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + parsedFailure = ElasticsearchException.failureFromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } + assertNotNull(parsedFailure); + + String type = ElasticsearchException.getExceptionName(failure); + String reason = failure.getMessage(); + assertEquals(ElasticsearchException.buildMessage(type, reason, null), parsedFailure.getMessage()); + assertEquals(0, parsedFailure.getHeaders().size()); + assertEquals(0, parsedFailure.getMetadata().size()); + assertNull(parsedFailure.getCause()); + } + + public void testFailureToAndFromXContentWithNoDetailsV8() throws IOException { + final XContent xContent = randomFrom(XContentType.values()).xContent(); + + final Exception failure = (Exception) randomExceptions().v1(); + BytesReference failureBytes = toShuffledXContent( + (builder, params) -> ElasticsearchException.generateFailureXContent(builder, params, failure, false), + xContent.type(), + RestApiVersion.V_8, + ToXContent.EMPTY_PARAMS, + randomBoolean() + ); try (XContentParser parser = createParser(xContent, failureBytes)) { failureBytes = BytesReference.bytes(shuffleXContent(parser, randomBoolean())); @@ -1165,10 +1301,12 @@ public void testFailureToAndFromXContentWithDetails() throws IOException { } Exception finalFailure = failure; - BytesReference failureBytes = toShuffledXContent((builder, params) -> { - ElasticsearchException.generateFailureXContent(builder, params, finalFailure, true); - return builder; - }, xContent.type(), ToXContent.EMPTY_PARAMS, randomBoolean()); + BytesReference failureBytes = toShuffledXContent( + (builder, params) -> ElasticsearchException.generateFailureXContent(builder, params, finalFailure, true), + xContent.type(), + ToXContent.EMPTY_PARAMS, + randomBoolean() + ); try (XContentParser parser = createParser(xContent, failureBytes)) { failureBytes = BytesReference.bytes(shuffleXContent(parser, randomBoolean())); @@ -1197,13 +1335,20 @@ private static void assertToXContentAsJson(ToXContent e, String expectedJson) th assertToXContentEquivalent(new BytesArray(expectedJson), actual, XContentType.JSON); } - private static void assertExceptionAsJson(Exception e, String expectedJson) throws IOException { + private static void assertThrowableAsJson(Throwable e, String expectedJson) throws IOException { assertToXContentAsJson((builder, params) -> { ElasticsearchException.generateThrowableXContent(builder, params, e); return builder; }, expectedJson); } + private static void assertFailureAsJson(Exception e, String expectedJson, boolean detailed) throws IOException { + assertToXContentAsJson( + (builder, params) -> ElasticsearchException.generateFailureXContent(builder, params, e, detailed), + expectedJson + ); + } + public static void assertDeepEquals(ElasticsearchException expected, ElasticsearchException actual) { do { if (expected == null) { diff --git a/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymRuleActionTests.java b/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymRuleActionTests.java index a1b9c59571496..303b75098ab67 100644 --- a/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymRuleActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymRuleActionTests.java @@ -26,7 +26,7 @@ public void testEmptyRequestBody() throws Exception { .withParams(Map.of("synonymsSet", "testSet", "synonymRuleId", "testRule")) .build(); - FakeRestChannel channel = new FakeRestChannel(request, true, 0); + FakeRestChannel channel = new FakeRestChannel(request, randomBoolean(), 0); try (var threadPool = createThreadPool()) { final var nodeClient = new NoOpNodeClient(threadPool); expectThrows(IllegalArgumentException.class, () -> action.handleRequest(request, channel, nodeClient)); diff --git a/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymsActionTests.java b/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymsActionTests.java index 4dce73fcf0e89..915c338195c86 100644 --- a/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymsActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymsActionTests.java @@ -26,7 +26,7 @@ public void testEmptyRequestBody() throws Exception { .withParams(Map.of("synonymsSet", "test")) .build(); - FakeRestChannel channel = new FakeRestChannel(request, true, 0); + FakeRestChannel channel = new FakeRestChannel(request, randomBoolean(), 0); try (var threadPool = createThreadPool()) { final var nodeClient = new NoOpNodeClient(threadPool); expectThrows(IllegalArgumentException.class, () -> action.handleRequest(request, channel, nodeClient)); diff --git a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java index 19d92568e6528..fa774c0bcfd12 100644 --- a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java +++ b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java @@ -271,7 +271,7 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th final RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(restHeaders).build(); final RestControllerTests.AssertingChannel channel = new RestControllerTests.AssertingChannel( fakeRequest, - true, + randomBoolean(), RestStatus.BAD_REQUEST ); @@ -361,7 +361,11 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th Map> restHeaders = new HashMap<>(); restHeaders.put(Task.TRACE_PARENT_HTTP_HEADER, Collections.singletonList(traceParentValue)); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(restHeaders).build(); - RestControllerTests.AssertingChannel channel = new RestControllerTests.AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + RestControllerTests.AssertingChannel channel = new RestControllerTests.AssertingChannel( + fakeRequest, + randomBoolean(), + RestStatus.BAD_REQUEST + ); try ( AbstractHttpServerTransport transport = new AbstractHttpServerTransport( diff --git a/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java b/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java index 8a8bed9ca73db..9f82911ed121f 100644 --- a/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java @@ -73,7 +73,7 @@ public List routes() { params.put("consumed", randomAlphaOfLength(8)); params.put("unconsumed", randomAlphaOfLength(8)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); - RestChannel channel = new FakeRestChannel(request, true, 1); + RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> handler.handleRequest(request, channel, mockClient) @@ -108,7 +108,7 @@ public List routes() { params.put("unconsumed-first", randomAlphaOfLength(8)); params.put("unconsumed-second", randomAlphaOfLength(8)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); - RestChannel channel = new FakeRestChannel(request, true, 1); + RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> handler.handleRequest(request, channel, mockClient) @@ -155,7 +155,7 @@ public List routes() { params.put("very_close_to_parametre", randomAlphaOfLength(8)); params.put("very_far_from_every_consumed_parameter", randomAlphaOfLength(8)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); - RestChannel channel = new FakeRestChannel(request, true, 1); + RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> handler.handleRequest(request, channel, mockClient) @@ -206,7 +206,7 @@ public List routes() { params.put("consumed", randomAlphaOfLength(8)); params.put("response_param", randomAlphaOfLength(8)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); - RestChannel channel = new FakeRestChannel(request, true, 1); + RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mockClient); assertTrue(restChannelConsumer.executed); assertTrue(restChannelConsumer.closed); @@ -238,7 +238,7 @@ public List routes() { params.put("human", null); params.put("error_trace", randomFrom("true", "false", null)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); - RestChannel channel = new FakeRestChannel(request, true, 1); + RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mockClient); assertTrue(restChannelConsumer.executed); assertTrue(restChannelConsumer.closed); @@ -283,7 +283,7 @@ public List routes() { params.put("size", randomAlphaOfLength(8)); params.put("time", randomAlphaOfLength(8)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); - RestChannel channel = new FakeRestChannel(request, true, 1); + RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mockClient); assertTrue(restChannelConsumer.executed); assertTrue(restChannelConsumer.closed); @@ -314,7 +314,7 @@ public List routes() { new BytesArray(builder.toString()), XContentType.JSON ).build(); - final RestChannel channel = new FakeRestChannel(request, true, 1); + final RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mockClient); assertTrue(restChannelConsumer.executed); assertTrue(restChannelConsumer.closed); @@ -341,7 +341,7 @@ public List routes() { }; final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).build(); - final RestChannel channel = new FakeRestChannel(request, true, 1); + final RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mockClient); assertTrue(restChannelConsumer.executed); assertTrue(restChannelConsumer.closed); @@ -371,7 +371,7 @@ public List routes() { new BytesArray(builder.toString()), XContentType.JSON ).build(); - final RestChannel channel = new FakeRestChannel(request, true, 1); + final RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> handler.handleRequest(request, channel, mockClient) diff --git a/server/src/test/java/org/elasticsearch/rest/ChunkedRestResponseBodyPartTests.java b/server/src/test/java/org/elasticsearch/rest/ChunkedRestResponseBodyPartTests.java index 907c16aad5fdc..eece90ed94cf9 100644 --- a/server/src/test/java/org/elasticsearch/rest/ChunkedRestResponseBodyPartTests.java +++ b/server/src/test/java/org/elasticsearch/rest/ChunkedRestResponseBodyPartTests.java @@ -56,7 +56,7 @@ public void testEncodesChunkedXContentCorrectly() throws IOException { ToXContent.EMPTY_PARAMS, new FakeRestChannel( new FakeRestRequest.Builder(xContentRegistry()).withContent(BytesArray.EMPTY, randomXContent.type()).build(), - true, + randomBoolean(), 1 ) ); diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index afdad1045b4de..b7d38f6f299c7 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -161,7 +161,7 @@ public void testApplyProductSpecificResponseHeaders() { final ThreadContext threadContext = client.threadPool().getThreadContext(); final RestController restController = new RestController(null, null, circuitBreakerService, usageService, telemetryProvider); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).build(); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchRequest(fakeRequest, channel, threadContext); // the rest controller relies on the caller to stash the context, so we should expect these values here as we didn't stash the // context in this test @@ -180,7 +180,7 @@ public void testRequestWithDisallowedMultiValuedHeader() { restHeaders.put("header.1", Collections.singletonList("boo")); restHeaders.put("header.2", List.of("foo", "bar")); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(restHeaders).build(); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchRequest(fakeRequest, channel, threadContext); assertTrue(channel.getSendResponseCalled()); } @@ -211,7 +211,7 @@ public String getName() { }); } }); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.OK); spyRestController.dispatchRequest(fakeRequest, channel, threadContext); verify(requestsCounter).incrementBy( eq(1L), @@ -235,7 +235,7 @@ public MethodHandlers next() { return null; } }); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.BAD_REQUEST); spyRestController.dispatchRequest(fakeRequest, channel, threadContext); verify(requestsCounter).incrementBy(eq(1L), eq(Map.of(STATUS_CODE_KEY, 400))); } @@ -257,7 +257,7 @@ public MethodHandlers next() { } }); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.BAD_REQUEST); spyRestController.dispatchRequest(fakeRequest, channel, threadContext); verify(requestsCounter).incrementBy(eq(1L), eq(Map.of(STATUS_CODE_KEY, 400))); } @@ -280,7 +280,7 @@ public String getName() { })); when(spyRestController.getAllHandlers(any(), eq(fakeRequest.rawPath()))).thenAnswer(x -> handlers.iterator()); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.METHOD_NOT_ALLOWED); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.METHOD_NOT_ALLOWED); spyRestController.dispatchRequest(fakeRequest, channel, threadContext); verify(requestsCounter).incrementBy(eq(1L), eq(Map.of(STATUS_CODE_KEY, 405))); } @@ -290,7 +290,7 @@ public void testDispatchBadRequestEmitsMetric() { final RestController restController = new RestController(null, null, circuitBreakerService, usageService, telemetryProvider); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).build(); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchBadRequest(channel, threadContext, new Exception()); verify(requestsCounter).incrementBy(eq(1L), eq(Map.of(STATUS_CODE_KEY, 400))); } @@ -314,7 +314,7 @@ public MethodHandlers next() { return new MethodHandlers("/").addMethod(GET, RestApiVersion.current(), (request, channel, client) -> {}); } }); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchRequest(fakeRequest, channel, threadContext); verify(tracer).startTrace( eq(threadContext), @@ -340,7 +340,7 @@ public void testRequestWithDisallowedMultiValuedHeaderButSameValues() { new RestResponse(RestStatus.OK, RestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY) ) ); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.OK); restController.dispatchRequest(fakeRequest, channel, threadContext); assertTrue(channel.getSendResponseCalled()); } @@ -466,7 +466,7 @@ public void testRestInterceptor() throws Exception { ); restController.registerHandler(new Route(GET, "/wrapped"), handler); RestRequest request = testRestRequest("/wrapped", "{}", XContentType.JSON); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); httpServerTransport.start(); assertThat(wrapperCalled.get(), is(true)); @@ -477,7 +477,7 @@ public void testDispatchRequestAddsAndFreesBytesOnSuccess() { int contentLength = BREAKER_LIMIT.bytesAsInt(); String content = randomAlphaOfLength((int) Math.round(contentLength / inFlightRequestsBreaker.getOverhead())); RestRequest request = testRestRequest("/", content, XContentType.JSON); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.OK); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); @@ -489,7 +489,7 @@ public void testDispatchRequestAddsAndFreesBytesOnError() { int contentLength = BREAKER_LIMIT.bytesAsInt(); String content = randomAlphaOfLength((int) Math.round(contentLength / inFlightRequestsBreaker.getOverhead())); RestRequest request = testRestRequest("/error", content, XContentType.JSON); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); @@ -502,7 +502,7 @@ public void testDispatchRequestAddsAndFreesBytesOnlyOnceOnError() { String content = randomAlphaOfLength((int) Math.round(contentLength / inFlightRequestsBreaker.getOverhead())); // we will produce an error in the rest handler and one more when sending the error response RestRequest request = testRestRequest("/error", content, XContentType.JSON); - ExceptionThrowingChannel channel = new ExceptionThrowingChannel(request, true); + ExceptionThrowingChannel channel = new ExceptionThrowingChannel(request, randomBoolean()); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); @@ -521,7 +521,7 @@ public void testDispatchRequestAddsAndFreesBytesOnlyOnceOnErrorDuringSend() { ); // we will produce an error in the rest handler and one more when sending the error response RestRequest request = testRestRequest("/foo", content, XContentType.JSON); - ExceptionThrowingChannel channel = new ExceptionThrowingChannel(request, true) { + ExceptionThrowingChannel channel = new ExceptionThrowingChannel(request, randomBoolean()) { @Override protected BytesStream newBytesOutput() { return new RecyclerBytesStreamOutput(recycler); @@ -538,7 +538,7 @@ public void testDispatchRequestLimitsBytes() { int contentLength = BREAKER_LIMIT.bytesAsInt() + 1; String content = randomAlphaOfLength((int) Math.round(contentLength / inFlightRequestsBreaker.getOverhead())); RestRequest request = testRestRequest("/", content, XContentType.JSON); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.TOO_MANY_REQUESTS); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.TOO_MANY_REQUESTS); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); @@ -549,7 +549,7 @@ public void testDispatchRequestLimitsBytes() { public void testDispatchRequiresContentTypeForRequestsWithContent() { String content = randomAlphaOfLength((int) Math.round(BREAKER_LIMIT.getBytes() / inFlightRequestsBreaker.getOverhead())); RestRequest request = testRestRequest("/", content, null); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.NOT_ACCEPTABLE); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.NOT_ACCEPTABLE); restController = new RestController(null, null, circuitBreakerService, usageService, telemetryProvider); restController.registerHandler( new Route(GET, "/"), @@ -566,7 +566,7 @@ public void testDispatchDoesNotRequireContentTypeForRequestsWithoutContent() { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); assertFalse(channel.getSendResponseCalled()); restController.dispatchRequest(fakeRestRequest, channel, client.threadPool().getThreadContext()); @@ -582,7 +582,7 @@ public void testDispatchFailsWithPlainText() { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.NOT_ACCEPTABLE); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.NOT_ACCEPTABLE); restController.registerHandler( new Route(GET, "/foo"), (request, channel1, client) -> channel1.sendResponse( @@ -603,7 +603,7 @@ public void testDispatchUnsupportedContentType() { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.NOT_ACCEPTABLE); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.NOT_ACCEPTABLE); assertFalse(channel.getSendResponseCalled()); restController.dispatchRequest(fakeRestRequest, channel, client.threadPool().getThreadContext()); @@ -620,7 +620,7 @@ public void testDispatchWorksWithNewlineDelimitedJson() { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); restController.registerHandler(new Route(GET, "/foo"), new RestHandler() { @Override public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { @@ -659,7 +659,7 @@ public void testDispatchWithContentStream() { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); restController.registerHandler(new Route(GET, "/foo"), new RestHandler() { @Override public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { @@ -683,7 +683,7 @@ public void testDispatchWithContentStreamNoContentType() { RestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withContent(new BytesArray("{}"), null) .withPath("/foo") .build(); - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.NOT_ACCEPTABLE); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.NOT_ACCEPTABLE); if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } @@ -712,7 +712,7 @@ public void testNonStreamingXContentCausesErrorResponse() throws IOException { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.NOT_ACCEPTABLE); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.NOT_ACCEPTABLE); restController.registerHandler(new Route(GET, "/foo"), new RestHandler() { @Override public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { @@ -737,7 +737,7 @@ public void testUnknownContentWithContentStream() { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.NOT_ACCEPTABLE); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.NOT_ACCEPTABLE); restController.registerHandler(new Route(GET, "/foo"), new RestHandler() { @Override public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { @@ -756,7 +756,7 @@ public boolean supportsBulkContent() { public void testDispatchBadRequest() { final FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).build(); - final AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.BAD_REQUEST); + final AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchBadRequest( channel, client.threadPool().getThreadContext(), @@ -789,7 +789,7 @@ public boolean canTripCircuitBreaker() { .withContent(BytesReference.bytes(content), content.contentType()) .build(); - final AssertingChannel channel = new AssertingChannel(restRequest, true, RestStatus.OK); + final AssertingChannel channel = new AssertingChannel(restRequest, randomBoolean(), RestStatus.OK); assertFalse(channel.getSendResponseCalled()); assertFalse(restRequest.isContentConsumed()); @@ -801,7 +801,7 @@ public boolean canTripCircuitBreaker() { public void testDispatchBadRequestUnknownCause() { final FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).build(); - final AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.BAD_REQUEST); + final AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchBadRequest(channel, client.threadPool().getThreadContext(), null); assertTrue(channel.getSendResponseCalled()); assertThat(channel.getRestResponse().content().utf8ToString(), containsString("unknown cause")); @@ -813,14 +813,14 @@ public void testDispatchBadRequestWithValidationException() { final FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).build(); // it's always a 400 bad request when dispatching "regular" {@code ElasticsearchException} - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.BAD_REQUEST); assertFalse(channel.getSendResponseCalled()); restController.dispatchBadRequest(channel, client.threadPool().getThreadContext(), exception); assertTrue(channel.getSendResponseCalled()); assertThat(channel.getRestResponse().content().utf8ToString(), containsString("bad bad exception")); // but {@code HttpHeadersValidationException} do carry over the rest response code - channel = new AssertingChannel(fakeRestRequest, true, status); + channel = new AssertingChannel(fakeRestRequest, randomBoolean(), status); assertFalse(channel.getSendResponseCalled()); restController.dispatchBadRequest(channel, client.threadPool().getThreadContext(), new HttpHeadersValidationException(exception)); assertTrue(channel.getSendResponseCalled()); @@ -831,7 +831,7 @@ public void testFavicon() { final FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withMethod(GET) .withPath("/favicon.ico") .build(); - final AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + final AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); restController.dispatchRequest(fakeRestRequest, channel, client.threadPool().getThreadContext()); assertTrue(channel.getSendResponseCalled()); assertThat(channel.getRestResponse().contentType(), containsString("image/x-icon")); @@ -841,7 +841,7 @@ public void testFaviconWithWrongHttpMethod() { final FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withMethod( randomValueOtherThanMany(m -> m == GET || m == OPTIONS, () -> randomFrom(RestRequest.Method.values())) ).withPath("/favicon.ico").build(); - final AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.METHOD_NOT_ALLOWED); + final AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.METHOD_NOT_ALLOWED); restController.dispatchRequest(fakeRestRequest, channel, client.threadPool().getThreadContext()); assertTrue(channel.getSendResponseCalled()); assertThat(channel.getRestResponse().getHeaders().containsKey("Allow"), equalTo(true)); @@ -917,7 +917,7 @@ public Exception getInboundException() { } }, null); - final AssertingChannel channel = new AssertingChannel(request, true, RestStatus.METHOD_NOT_ALLOWED); + final AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.METHOD_NOT_ALLOWED); assertFalse(channel.getSendResponseCalled()); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); assertTrue(channel.getSendResponseCalled()); @@ -937,7 +937,7 @@ public Method method() { } }; - final AssertingChannel channel = new AssertingChannel(request, true, RestStatus.METHOD_NOT_ALLOWED); + final AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.METHOD_NOT_ALLOWED); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); verify(tracer).startTrace(any(), any(RestRequest.class), anyString(), anyMap()); verify(tracer).addError(any(RestRequest.class), any(IllegalArgumentException.class)); @@ -951,7 +951,7 @@ public void testDispatchCompatibleHandler() { final String mediaType = randomCompatibleMediaType(version); FakeRestRequest fakeRestRequest = requestWithContent(mediaType); - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); // dispatch to a compatible handler restController.registerHandler(GET, "/foo", RestApiVersion.minimumSupported(), (request, channel1, client) -> { @@ -975,7 +975,7 @@ public void testDispatchCompatibleRequestToNewlyAddedHandler() { final String mediaType = randomCompatibleMediaType(version); FakeRestRequest fakeRestRequest = requestWithContent(mediaType); - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); // dispatch to a CURRENT newly added handler restController.registerHandler(new Route(GET, "/foo"), (request, channel1, client) -> { @@ -1018,7 +1018,7 @@ public void testCurrentVersionVNDMediaTypeIsNotUsingCompatibility() { final String mediaType = randomCompatibleMediaType(version); FakeRestRequest fakeRestRequest = requestWithContent(mediaType); - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); // dispatch to a CURRENT newly added handler restController.registerHandler(new Route(GET, "/foo"), (request, channel1, client) -> { @@ -1041,7 +1041,7 @@ public void testCustomMediaTypeValidation() { final String mediaType = "application/x-protobuf"; FakeRestRequest fakeRestRequest = requestWithContent(mediaType); - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); // register handler that handles custom media type validation restController.registerHandler(new Route(GET, "/foo"), new RestHandler() { @@ -1068,7 +1068,7 @@ public void testBrowserSafelistedContentTypesAreRejected() { final String mediaType = randomFrom(RestController.SAFELISTED_MEDIA_TYPES); FakeRestRequest fakeRestRequest = requestWithContent(mediaType); - final AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.NOT_ACCEPTABLE); + final AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.NOT_ACCEPTABLE); restController.registerHandler(new Route(GET, "/foo"), new RestHandler() { @Override @@ -1115,7 +1115,7 @@ public void testApiProtectionWithServerlessDisabled() { List accessiblePaths = List.of("/public", "/internal", "/hidden"); accessiblePaths.forEach(path -> { RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath(path).build(); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.OK); restController.dispatchRequest(request, channel, new ThreadContext(Settings.EMPTY)); }); } @@ -1137,12 +1137,12 @@ public void testApiProtectionWithServerlessEnabledAsEndUser() { final Consumer> checkUnprotected = paths -> paths.forEach(path -> { RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath(path).build(); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.OK); restController.dispatchRequest(request, channel, new ThreadContext(Settings.EMPTY)); }); final Consumer> checkProtected = paths -> paths.forEach(path -> { RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath(path).build(); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.GONE); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.GONE); restController.dispatchRequest(request, channel, new ThreadContext(Settings.EMPTY)); RestResponse restResponse = channel.getRestResponse(); diff --git a/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java b/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java index 4345f3c5e3fb4..7fe2388ec5113 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java @@ -97,7 +97,7 @@ public void testUnsupportedMethodResponseHttpHeader() throws Exception { RestRequest restRequest = fakeRestRequestBuilder.build(); // Send the request and verify the response status code - FakeRestChannel restChannel = new FakeRestChannel(restRequest, true, 1); + FakeRestChannel restChannel = new FakeRestChannel(restRequest, randomBoolean(), 1); restController.dispatchRequest(restRequest, restChannel, new ThreadContext(Settings.EMPTY)); assertThat(restChannel.capturedResponse().status().getStatus(), is(405)); diff --git a/server/src/test/java/org/elasticsearch/rest/RestResponseTests.java b/server/src/test/java/org/elasticsearch/rest/RestResponseTests.java index cfed83f352951..b85ad31288c8c 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestResponseTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestResponseTests.java @@ -93,7 +93,6 @@ public void testWithHeaders() throws Exception { assertThat(response.getHeaders().get("n1"), contains("v11", "v12")); assertThat(response.getHeaders().get("n2"), notNullValue()); assertThat(response.getHeaders().get("n2"), contains("v21", "v22")); - assertChannelWarnings(channel); } public void testEmptyChunkedBody() { @@ -114,11 +113,11 @@ public void testSimpleExceptionMessage() throws Exception { Exception t = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar")); RestResponse response = new RestResponse(channel, t); String text = response.content().utf8ToString(); - assertThat(text, containsString("ElasticsearchException[an error occurred reading data]")); - assertThat(text, not(containsString("FileNotFoundException"))); + assertThat(text, containsString(""" + {"type":"exception","reason":"an error occurred reading data"}""")); + assertThat(text, not(containsString("file_not_found_exception"))); assertThat(text, not(containsString("/foo/bar"))); assertThat(text, not(containsString("error_trace"))); - assertChannelWarnings(channel); } public void testDetailedExceptionMessage() throws Exception { @@ -134,20 +133,6 @@ public void testDetailedExceptionMessage() throws Exception { {"type":"file_not_found_exception","reason":"/foo/bar"}""")); } - public void testNonElasticsearchExceptionIsNotShownAsSimpleMessage() throws Exception { - RestRequest request = new FakeRestRequest(); - RestChannel channel = new SimpleExceptionRestChannel(request); - - Exception t = new UnknownException("an error occurred reading data", new FileNotFoundException("/foo/bar")); - RestResponse response = new RestResponse(channel, t); - String text = response.content().utf8ToString(); - assertThat(text, not(containsString("UnknownException[an error occurred reading data]"))); - assertThat(text, not(containsString("FileNotFoundException[/foo/bar]"))); - assertThat(text, not(containsString("error_trace"))); - assertThat(text, containsString("\"error\":\"No ElasticsearchException found\"")); - assertChannelWarnings(channel); - } - public void testErrorTrace() throws Exception { RestRequest request = new FakeRestRequest(); request.params().put("error_trace", "true"); @@ -177,7 +162,6 @@ public void testAuthenticationFailedNoStackTrace() throws IOException { RestResponse response = new RestResponse(channel, authnException); assertThat(response.status(), is(RestStatus.UNAUTHORIZED)); assertThat(response.content().utf8ToString(), not(containsString(ElasticsearchException.STACK_TRACE))); - assertChannelWarnings(channel); } } } @@ -202,7 +186,6 @@ public void testStackTrace() throws IOException { } else { assertThat(response.content().utf8ToString(), not(containsString(ElasticsearchException.STACK_TRACE))); } - assertChannelWarnings(channel); } } } @@ -232,9 +215,9 @@ public void testNullThrowable() throws Exception { RestResponse response = new RestResponse(channel, null); String text = response.content().utf8ToString(); - assertThat(text, containsString("\"error\":\"unknown\"")); + assertThat(text, containsString("\"type\":\"unknown\"")); + assertThat(text, containsString("\"reason\":\"unknown\"")); assertThat(text, not(containsString("error_trace"))); - assertChannelWarnings(channel); } public void testConvert() throws IOException { @@ -324,32 +307,26 @@ public void testErrorToAndFromXContent() throws IOException { original = new ElasticsearchException("ElasticsearchException without cause"); if (detailed) { addHeadersOrMetadata = randomBoolean(); - reason = "ElasticsearchException without cause"; - } else { - reason = "ElasticsearchException[ElasticsearchException without cause]"; } + reason = "ElasticsearchException without cause"; } case 1 -> { original = new ElasticsearchException("ElasticsearchException with a cause", new FileNotFoundException("missing")); if (detailed) { addHeadersOrMetadata = randomBoolean(); - type = "exception"; - reason = "ElasticsearchException with a cause"; cause = new ElasticsearchException("Elasticsearch exception [type=file_not_found_exception, reason=missing]"); - } else { - reason = "ElasticsearchException[ElasticsearchException with a cause]"; } + type = "exception"; + reason = "ElasticsearchException with a cause"; } case 2 -> { original = new ResourceNotFoundException("ElasticsearchException with custom status"); status = RestStatus.NOT_FOUND; if (detailed) { addHeadersOrMetadata = randomBoolean(); - type = "resource_not_found_exception"; - reason = "ElasticsearchException with custom status"; - } else { - reason = "ResourceNotFoundException[ElasticsearchException with custom status]"; } + type = "resource_not_found_exception"; + reason = "ElasticsearchException with custom status"; } case 3 -> { TransportAddress address = buildNewFakeTransportAddress(); @@ -360,12 +337,8 @@ public void testErrorToAndFromXContent() throws IOException { new ResourceAlreadyExistsException("ElasticsearchWrapperException with a cause that has a custom status") ); status = RestStatus.BAD_REQUEST; - if (detailed) { - type = "resource_already_exists_exception"; - reason = "ElasticsearchWrapperException with a cause that has a custom status"; - } else { - reason = "RemoteTransportException[[remote][" + address.toString() + "][action]]"; - } + type = "resource_already_exists_exception"; + reason = "ElasticsearchWrapperException with a cause that has a custom status"; } case 4 -> { original = new RemoteTransportException( @@ -373,23 +346,17 @@ public void testErrorToAndFromXContent() throws IOException { new IllegalArgumentException("wrong") ); status = RestStatus.BAD_REQUEST; - if (detailed) { - type = "illegal_argument_exception"; - reason = "wrong"; - } else { - reason = "RemoteTransportException[[ElasticsearchWrapperException with a cause that has a special treatment]]"; - } + type = "illegal_argument_exception"; + reason = "wrong"; } case 5 -> { status = randomFrom(RestStatus.values()); original = new ElasticsearchStatusException("ElasticsearchStatusException with random status", status); if (detailed) { addHeadersOrMetadata = randomBoolean(); - type = "status_exception"; - reason = "ElasticsearchStatusException with random status"; - } else { - reason = "ElasticsearchStatusException[ElasticsearchStatusException with random status]"; } + type = "status_exception"; + reason = "ElasticsearchStatusException with random status"; } default -> throw new UnsupportedOperationException("Failed to generate random exception"); } @@ -435,7 +402,6 @@ public void testErrorToAndFromXContent() throws IOException { assertEquals(expected.status(), parsedError.status()); assertDeepEquals(expected, parsedError); - assertChannelWarnings(channel); } public void testNoErrorFromXContent() throws IOException { @@ -502,7 +468,9 @@ public void testResponseContentTypeUponException() throws Exception { Exception t = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar")); RestResponse response = new RestResponse(channel, t); assertThat(response.contentType(), equalTo(mediaType)); - assertChannelWarnings(channel); + assertWarnings( + "The JSON format of non-detailed errors has changed in Elasticsearch 9.0 to match the JSON structure used for detailed errors." + ); } public void testSupressedLogging() throws IOException { @@ -534,7 +502,6 @@ public void testSupressedLogging() throws IOException { "401", "unauthorized" ); - assertChannelWarnings(channel); } private void assertLogging( @@ -560,15 +527,6 @@ private void assertLogging( } } - private void assertChannelWarnings(RestChannel channel) { - if (channel.detailedErrorsEnabled() == false) { - assertWarnings( - "The JSON format of non-detailed errors will change in Elasticsearch 9.0" - + " to match the JSON structure used for detailed errors. To keep using the existing format, use the V8 REST API." - ); - } - } - public static class WithHeadersException extends ElasticsearchException { WithHeadersException() { diff --git a/server/src/test/java/org/elasticsearch/rest/action/RestBuilderListenerTests.java b/server/src/test/java/org/elasticsearch/rest/action/RestBuilderListenerTests.java index 03ae366050646..827a07b89b2b8 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/RestBuilderListenerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/RestBuilderListenerTests.java @@ -26,7 +26,7 @@ public class RestBuilderListenerTests extends ESTestCase { public void testXContentBuilderClosedInBuildResponse() throws Exception { AtomicReference builderAtomicReference = new AtomicReference<>(); RestBuilderListener builderListener = new RestBuilderListener( - new FakeRestChannel(new FakeRestRequest(), true, 1) + new FakeRestChannel(new FakeRestRequest(), randomBoolean(), 1) ) { @Override public RestResponse buildResponse(Empty empty, XContentBuilder builder) throws Exception { @@ -44,7 +44,7 @@ public RestResponse buildResponse(Empty empty, XContentBuilder builder) throws E public void testXContentBuilderNotClosedInBuildResponseAssertionsDisabled() throws Exception { AtomicReference builderAtomicReference = new AtomicReference<>(); RestBuilderListener builderListener = new RestBuilderListener( - new FakeRestChannel(new FakeRestRequest(), true, 1) + new FakeRestChannel(new FakeRestRequest(), randomBoolean(), 1) ) { @Override public RestResponse buildResponse(Empty empty, XContentBuilder builder) throws Exception { @@ -68,7 +68,7 @@ public void testXContentBuilderNotClosedInBuildResponseAssertionsEnabled() throw assumeTrue("tests are not being run with assertions", RestBuilderListener.class.desiredAssertionStatus()); RestBuilderListener builderListener = new RestBuilderListener( - new FakeRestChannel(new FakeRestRequest(), true, 1) + new FakeRestChannel(new FakeRestRequest(), randomBoolean(), 1) ) { @Override public RestResponse buildResponse(Empty empty, XContentBuilder builder) throws Exception { diff --git a/server/src/test/java/org/elasticsearch/rest/action/cat/RestTasksActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/cat/RestTasksActionTests.java index 8104ecfc31c3d..dad6885a08fa8 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/cat/RestTasksActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/cat/RestTasksActionTests.java @@ -34,7 +34,7 @@ public void testConsumesParameters() throws Exception { FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams( Map.of("parent_task_id", "the node:3", "nodes", "node1,node2", "actions", "*") ).build(); - FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, true, 1); + FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, randomBoolean(), 1); try (var threadPool = createThreadPool()) { final var nodeClient = buildNodeClient(threadPool); action.handleRequest(fakeRestRequest, fakeRestChannel, nodeClient); diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java index 0d35e4311032d..f83ba1704f954 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java @@ -222,7 +222,7 @@ public void next() { }) .withHeaders(Map.of("Content-Type", Collections.singletonList("application/json"))) .build(); - FakeRestChannel channel = new FakeRestChannel(request, true, 1); + FakeRestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); RestBulkAction.ChunkHandler chunkHandler = new RestBulkAction.ChunkHandler( true, diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java index 4822b1c64cf41..d6953e79a0c3f 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java @@ -51,7 +51,7 @@ public void testEnableFieldsEmulationNoErrors() throws Exception { .withParams(params) .build(); - action.handleRequest(request, new FakeRestChannel(request, true, 1), verifyingClient); + action.handleRequest(request, new FakeRestChannel(request, randomBoolean(), 1), verifyingClient); } public void testValidateSearchRequest() { diff --git a/server/src/test/java/org/elasticsearch/search/scroll/RestClearScrollActionTests.java b/server/src/test/java/org/elasticsearch/search/scroll/RestClearScrollActionTests.java index 33978b4cd6b9f..d91b4430e4f94 100644 --- a/server/src/test/java/org/elasticsearch/search/scroll/RestClearScrollActionTests.java +++ b/server/src/test/java/org/elasticsearch/search/scroll/RestClearScrollActionTests.java @@ -54,7 +54,7 @@ public void clearScroll(ClearScrollRequest request, ActionListener routes() { }; FakeRestRequest fakeRestRequest = new FakeRestRequest(); - FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, true, isLicensed ? 0 : 1); + FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, randomBoolean(), isLicensed ? 0 : 1); try (var threadPool = createThreadPool()) { final var client = new NoOpNodeClient(threadPool); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandlerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandlerTests.java index 8509a6475aa71..5d4ea0f30cb15 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandlerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandlerTests.java @@ -58,7 +58,7 @@ protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClien } }; FakeRestRequest fakeRestRequest = new FakeRestRequest(); - FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, true, securityEnabled ? 0 : 1); + FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, randomBoolean(), securityEnabled ? 0 : 1); try (var threadPool = createThreadPool()) { final var client = new NoOpNodeClient(threadPool); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/ApiKeyBaseRestHandlerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/ApiKeyBaseRestHandlerTests.java index b734e602ec291..6ff05faf22d11 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/ApiKeyBaseRestHandlerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/ApiKeyBaseRestHandlerTests.java @@ -56,7 +56,7 @@ protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClien } }; final var fakeRestRequest = new FakeRestRequest(); - final var fakeRestChannel = new FakeRestChannel(fakeRestRequest, true, requiredSettingsEnabled ? 0 : 1); + final var fakeRestChannel = new FakeRestChannel(fakeRestRequest, randomBoolean(), requiredSettingsEnabled ? 0 : 1); try (var threadPool = createThreadPool()) { final var client = new NoOpNodeClient(threadPool); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateApiKeyActionTests.java index 79dba637d53d0..9a05230d82ae6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateApiKeyActionTests.java @@ -75,7 +75,7 @@ public void testCreateApiKeyApi() throws Exception { ).withParams(Collections.singletonMap("refresh", randomFrom("false", "true", "wait_for"))).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateCrossClusterApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateCrossClusterApiKeyActionTests.java index a47855731b37a..812354986d5bc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateCrossClusterApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateCrossClusterApiKeyActionTests.java @@ -115,7 +115,7 @@ public void testLicenseEnforcement() throws Exception { } }"""), XContentType.JSON).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java index c65634a76b532..d88a217cd0949 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java @@ -91,7 +91,7 @@ public void testGetApiKey() throws Exception { final FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(params).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -159,7 +159,7 @@ public void testGetApiKeyWithProfileUid() throws Exception { } final FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(param).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -224,7 +224,7 @@ public void testGetApiKeyOwnedByCurrentAuthenticatedUser() throws Exception { final FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(param).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestInvalidateApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestInvalidateApiKeyActionTests.java index 2cb1b6a66b02b..ac472378d4874 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestInvalidateApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestInvalidateApiKeyActionTests.java @@ -77,7 +77,7 @@ public void testInvalidateApiKey() throws Exception { ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -144,7 +144,7 @@ public void testInvalidateApiKeyOwnedByCurrentAuthenticatedUser() throws Excepti ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java index 7005b5158e626..d5aa249b1d0f5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java @@ -110,7 +110,7 @@ public void testQueryParsing() throws Exception { ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -184,7 +184,7 @@ public void testAggsAndAggregationsTogether() { XContentType.JSON ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -230,7 +230,7 @@ public void testParsingSearchParameters() throws Exception { ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -290,7 +290,7 @@ public void testQueryApiKeyWithProfileUid() throws Exception { } FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(param).build(); SetOnce responseSetOnce = new SetOnce<>(); - RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java index 6c71f30243eaf..f2fe28b2a936f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java @@ -94,7 +94,7 @@ public void testLicenseEnforcement() throws Exception { "metadata": {} }"""), XContentType.JSON).withParams(Map.of("id", randomAlphaOfLength(10))).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenActionTests.java index bd665560f425f..2ac33a780313e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenActionTests.java @@ -43,7 +43,7 @@ public class RestGetTokenActionTests extends ESTestCase { public void testListenerHandlesExceptionProperly() { FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).build(); final SetOnce responseSetOnce = new SetOnce<>(); - RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -67,7 +67,7 @@ public void sendResponse(RestResponse restResponse) { public void testSendResponse() { FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).build(); final SetOnce responseSetOnce = new SetOnce<>(); - RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -114,7 +114,7 @@ public void sendResponse(RestResponse restResponse) { public void testSendResponseKerberosError() { FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).build(); final SetOnce responseSetOnce = new SetOnce<>(); - RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserActionTests.java index 38405a2167808..4a593eeb24ac6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserActionTests.java @@ -73,7 +73,7 @@ public void testQueryParsing() throws Exception { ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -132,7 +132,7 @@ public void testParsingSearchParameters() throws Exception { ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); From 286c4bb0cec55a81e448eac98a493d566f12d351 Mon Sep 17 00:00:00 2001 From: Michael Peterson Date: Tue, 19 Nov 2024 09:01:11 -0500 Subject: [PATCH 041/386] Added ESQL skip_unavailable testing for non-matching index expressions under RCS2 (#116846) --- .../CrossClusterEsqlRCS1MissingIndicesIT.java | 8 +- .../RemoteClusterSecurityEsqlIT.java | 525 +++++++++++++++++- 2 files changed, 524 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1MissingIndicesIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1MissingIndicesIT.java index 0f39104511be0..8bccc2e3c5c23 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1MissingIndicesIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1MissingIndicesIT.java @@ -76,7 +76,7 @@ public class CrossClusterEsqlRCS1MissingIndicesIT extends AbstractRemoteClusterS record ExpectedCluster(String clusterAlias, String indexExpression, String status, Integer totalShards) {} @SuppressWarnings("unchecked") - public void assertExpectedClustersForMissingIndicesTests(Map responseMap, List expected) { + void assertExpectedClustersForMissingIndicesTests(Map responseMap, List expected) { Map clusters = (Map) responseMap.get("_clusters"); assertThat((int) responseMap.get("took"), greaterThan(0)); @@ -220,7 +220,7 @@ public void testSearchesAgainstNonMatchingIndicesWithSkipUnavailableTrue() throw ); } - // since at least one index of the query matches on some cluster, a wildcarded index on skip_un=true is not an error + // since at least one index of the query matches on some cluster, a missing wildcarded index on skip_un=true is not an error { String q = Strings.format("FROM %s,%s:nomatch*", INDEX1, REMOTE_CLUSTER_ALIAS); @@ -358,7 +358,7 @@ public void testSearchesAgainstNonMatchingIndicesWithSkipUnavailableFalse() thro String limit0 = q + " | LIMIT 0"; e = expectThrows(ResponseException.class, () -> client().performRequest(esqlRequest(limit0))); - assertThat(e.getMessage(), Matchers.containsString("Unknown index [nomatch]")); + assertThat(e.getMessage(), containsString("Unknown index [nomatch]")); } // missing concrete remote index is not fatal when skip_unavailable=true (as long as an index matches on another cluster) @@ -371,7 +371,7 @@ public void testSearchesAgainstNonMatchingIndicesWithSkipUnavailableFalse() thro String limit0 = q + " | LIMIT 0"; e = expectThrows(ResponseException.class, () -> client().performRequest(esqlRequest(limit0))); - assertThat(e.getMessage(), Matchers.containsString(Strings.format("Unknown index [%s:nomatch]", REMOTE_CLUSTER_ALIAS))); + assertThat(e.getMessage(), containsString(Strings.format("Unknown index [%s:nomatch]", REMOTE_CLUSTER_ALIAS))); } // since there is at least one matching index in the query, the missing wildcarded local index is not an error diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java index 74ef6f0dafe63..09449f81121fd 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; @@ -31,6 +32,7 @@ import java.io.IOException; import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.Base64; import java.util.List; import java.util.Map; @@ -43,9 +45,12 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; public class RemoteClusterSecurityEsqlIT extends AbstractRemoteClusterSecurityTestCase { private static final AtomicReference> API_KEY_MAP_REF = new AtomicReference<>(); @@ -347,7 +352,7 @@ public void testCrossClusterQuery() throws Exception { assertRemoteOnlyResults(response); // same as above but authenticate with API key - response = performRequestWithRemoteSearchUserViaAPIKey(request); + response = performRequestWithRemoteSearchUserViaAPIKey(request, createRemoteSearchUserAPIKey()); assertRemoteOnlyResults(response); // query remote and local cluster @@ -704,7 +709,7 @@ public void testCrossClusterEnrich() throws Exception { assertWithEnrich(response); // same as above but authenticate with API key - response = performRequestWithRemoteSearchUserViaAPIKey(request); + response = performRequestWithRemoteSearchUserViaAPIKey(request, createRemoteSearchUserAPIKey()); assertWithEnrich(response); // Query cluster @@ -968,6 +973,462 @@ public void testAlias() throws Exception { removeAliases(); } + @SuppressWarnings("unchecked") + public void testSearchesAgainstNonMatchingIndicesWithSkipUnavailableTrue() throws Exception { + configureRemoteCluster(REMOTE_CLUSTER_ALIAS, fulfillingCluster, false, randomBoolean(), true); + populateData(); + { + final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); + putRoleRequest.setJsonEntity(""" + { + "indices": [{"names": ["employees*"], "privileges": ["read","read_cross_cluster"]}], + "cluster": [ "manage_own_api_key" ], + "remote_indices": [ + { + "names": ["employees*"], + "privileges": ["read"], + "clusters": ["my_remote_cluster"] + } + ] + }"""); + Response response = adminClient().performRequest(putRoleRequest); + assertOK(response); + } + + String remoteSearchUserAPIKey = createRemoteSearchUserAPIKey(); + + // sanity check - init queries to ensure we can query employees on local and employees,employees2 on remote + { + Request request = esqlRequest(""" + FROM employees,my_remote_cluster:employees,my_remote_cluster:employees2 + | SORT emp_id ASC + | LIMIT 9 + | KEEP emp_id, department"""); + + CheckedConsumer verifier = resp -> { + assertOK(resp); + Map map = responseAsMap(resp); + assertThat(((ArrayList) map.get("columns")).size(), greaterThanOrEqualTo(1)); + assertThat(((ArrayList) map.get("values")).size(), greaterThanOrEqualTo(1)); + assertExpectedClustersForMissingIndicesTests( + map, + List.of( + // local cluster is never marked as SKIPPED even when no matching indices - just marked as 0 shards searched + new ExpectedCluster("(local)", "nomatch*", "successful", null), + new ExpectedCluster(REMOTE_CLUSTER_ALIAS, "employees,employees2", "successful", null) + ) + ); + }; + + verifier.accept(performRequestWithRemoteSearchUser(request)); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(request, remoteSearchUserAPIKey)); + } + + // missing concrete local index is an error + { + String q = "FROM employees_nomatch,my_remote_cluster:employees"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + } + + // missing concrete remote index is not fatal when skip_unavailable=true (as long as an index matches on another cluster) + { + String q = "FROM employees,my_remote_cluster:employees_nomatch"; + + CheckedBiConsumer verifier = new CheckedBiConsumer() { + @Override + public void accept(Response response, Boolean limit0) throws Exception { + assertOK(response); + Map map = responseAsMap(response); + assertThat(((List) map.get("columns")).size(), greaterThanOrEqualTo(1)); + if (limit0) { + assertThat(((List) map.get("values")).size(), equalTo(0)); + } else { + assertThat(((List) map.get("values")).size(), greaterThanOrEqualTo(1)); + } + assertExpectedClustersForMissingIndicesTests( + map, + List.of( + new ExpectedCluster("(local)", "employees", "successful", limit0 ? 0 : null), + new ExpectedCluster(REMOTE_CLUSTER_ALIAS, "employees_nomatch", "skipped", 0) + ) + ); + } + }; + Request limit1 = esqlRequest(q + " | LIMIT 1"); + verifier.accept(performRequestWithRemoteSearchUser(limit1), false); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey), false); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + verifier.accept(performRequestWithRemoteSearchUser(limit0), true); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey), true); + } + + // since there is at least one matching index in the query, the missing wildcarded local index is not an error + { + String q = "FROM employees_nomatch*,my_remote_cluster:employees"; + + CheckedBiConsumer verifier = (response, limit0) -> { + assertOK(response); + Map map = responseAsMap(response); + assertThat(((List) map.get("columns")).size(), greaterThanOrEqualTo(1)); + if (limit0) { + assertThat(((List) map.get("values")).size(), equalTo(0)); + } else { + assertThat(((List) map.get("values")).size(), greaterThanOrEqualTo(1)); + } + assertExpectedClustersForMissingIndicesTests( + map, + List.of( + // local cluster is never marked as SKIPPED even when no matching indices - just marked as 0 shards searched + new ExpectedCluster("(local)", "employees_nomatch*", "successful", 0), + new ExpectedCluster(REMOTE_CLUSTER_ALIAS, "employees", "successful", limit0 ? 0 : null) + ) + ); + }; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + verifier.accept(performRequestWithRemoteSearchUser(limit1), false); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey), false); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + verifier.accept(performRequestWithRemoteSearchUser(limit0), true); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey), true); + } + + // since at least one index of the query matches on some cluster, a missing wildcarded index on skip_un=true is not an error + { + String q = "FROM employees,my_remote_cluster:employees_nomatch*"; + + CheckedBiConsumer verifier = (response, limit0) -> { + assertOK(response); + Map map = responseAsMap(response); + assertThat(((List) map.get("columns")).size(), greaterThanOrEqualTo(1)); + if (limit0) { + assertThat(((List) map.get("values")).size(), equalTo(0)); + } else { + assertThat(((List) map.get("values")).size(), greaterThanOrEqualTo(1)); + } + assertExpectedClustersForMissingIndicesTests( + map, + List.of( + new ExpectedCluster("(local)", "employees", "successful", limit0 ? 0 : null), + new ExpectedCluster("my_remote_cluster", "employees_nomatch*", "skipped", 0) + ) + ); + }; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + verifier.accept(performRequestWithRemoteSearchUser(limit1), false); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey), false); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + verifier.accept(performRequestWithRemoteSearchUser(limit0), true); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey), true); + } + + // an error is thrown if there are no matching indices at all, even when the cluster is skip_unavailable=true + { + // with non-matching concrete index + String q = "FROM my_remote_cluster:employees_nomatch"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + } + + // an error is thrown if there are no matching indices at all, even when the cluster is skip_unavailable=true and the + // index was wildcarded + { + String localExpr = randomFrom("nomatch", "nomatch*"); + String remoteExpr = randomFrom("nomatch", "nomatch*"); + String q = Strings.format("FROM %s,%s:%s", localExpr, REMOTE_CLUSTER_ALIAS, remoteExpr); + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + } + + // missing concrete index on skip_unavailable=true cluster is not an error + { + String q = "FROM employees,my_remote_cluster:employees_nomatch,my_remote_cluster:employees*"; + + CheckedBiConsumer verifier = (response, limit0) -> { + assertOK(response); + Map map = responseAsMap(response); + assertThat(((List) map.get("columns")).size(), greaterThanOrEqualTo(1)); + if (limit0) { + assertThat(((List) map.get("values")).size(), equalTo(0)); + } else { + assertThat(((List) map.get("values")).size(), greaterThanOrEqualTo(1)); + } + final List expectedClusters = List.of( + new ExpectedCluster("(local)", "employees", "successful", limit0 ? 0 : null), + new ExpectedCluster(REMOTE_CLUSTER_ALIAS, "employees_nomatch,employees*", "successful", 0) + ); + assertExpectedClustersForMissingIndicesTests(map, expectedClusters); + }; + + // TODO: uncomment in follow on PR handling skip_unavailable errors at execution time + // Request limit1 = esqlRequest(q + " | LIMIT 1"); + // verifier.accept(performRequestWithRemoteSearchUser(limit1), false); + // verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey), false); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + verifier.accept(performRequestWithRemoteSearchUser(limit0), true); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey), true); + } + } + + @SuppressWarnings("unchecked") + public void testSearchesAgainstNonMatchingIndicesWithSkipUnavailableFalse() throws Exception { + configureRemoteCluster(REMOTE_CLUSTER_ALIAS, fulfillingCluster, false, randomBoolean(), false); + populateData(); + + { + final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); + putRoleRequest.setJsonEntity(""" + { + "indices": [{"names": ["employees*"], "privileges": ["read","read_cross_cluster"]}], + "cluster": [ "manage_own_api_key" ], + "remote_indices": [ + { + "names": ["employees*"], + "privileges": ["read"], + "clusters": ["my_remote_cluster"] + } + ] + }"""); + Response response = adminClient().performRequest(putRoleRequest); + assertOK(response); + } + + String remoteSearchUserAPIKey = createRemoteSearchUserAPIKey(); + + // sanity check - init queries to ensure we can query employees on local and employees,employees2 on remote + { + Request request = esqlRequest(""" + FROM employees,my_remote_cluster:employees,my_remote_cluster:employees2 + | SORT emp_id ASC + | LIMIT 5 + | KEEP emp_id, department"""); + + CheckedConsumer verifier = resp -> { + assertOK(resp); + Map map = responseAsMap(resp); + assertThat(((List) map.get("columns")).size(), greaterThanOrEqualTo(1)); + assertThat(((List) map.get("values")).size(), greaterThanOrEqualTo(1)); + assertExpectedClustersForMissingIndicesTests( + map, + List.of( + // local cluster is never marked as SKIPPED even when no matching indices - just marked as 0 shards searched + new ExpectedCluster("(local)", "nomatch*", "successful", null), + new ExpectedCluster(REMOTE_CLUSTER_ALIAS, "employees,employees2", "successful", null) + ) + ); + }; + + final Response response = performRequestWithRemoteSearchUser(request); + assertOK(response); + verifier.accept(performRequestWithRemoteSearchUser(request)); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(request, remoteSearchUserAPIKey)); + } + + // missing concrete local index is an error + { + String q = "FROM employees_nomatch,my_remote_cluster:employees"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + } + + // missing concrete remote index is fatal error when skip_unavailable=false + { + String q = "FROM employees,my_remote_cluster:employees_nomatch"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + } + + // since there is at least one matching index in the query, the missing wildcarded local index is not an error + { + String q = "FROM employees_nomatch*,my_remote_cluster:employees"; + + CheckedBiConsumer verifier = (response, limit0) -> { + assertOK(response); + Map map = responseAsMap(response); + assertThat(((List) map.get("columns")).size(), greaterThanOrEqualTo(1)); + if (limit0) { + assertThat(((List) map.get("values")).size(), equalTo(0)); + } else { + assertThat(((List) map.get("values")).size(), greaterThanOrEqualTo(1)); + } + assertExpectedClustersForMissingIndicesTests( + map, + List.of( + // local cluster is never marked as SKIPPED even when no matching indices - just marked as 0 shards searched + new ExpectedCluster("(local)", "employees_nomatch*", "successful", 0), + new ExpectedCluster(REMOTE_CLUSTER_ALIAS, "employees", "successful", limit0 ? 0 : null) + ) + ); + }; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + verifier.accept(performRequestWithRemoteSearchUser(limit1), false); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey), false); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + verifier.accept(performRequestWithRemoteSearchUser(limit0), true); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey), true); + } + + // query is fatal since the remote cluster has skip_unavailable=false and has no matching indices + { + String q = "FROM employees,my_remote_cluster:employees_nomatch*"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch*]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch*]")); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch*]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch*]")); + } + + // an error is thrown if there are no matching indices at all + { + // with non-matching concrete index + String q = "FROM my_remote_cluster:employees_nomatch"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + } + + // an error is thrown if there are no matching indices at all + { + String localExpr = randomFrom("nomatch", "nomatch*"); + String remoteExpr = randomFrom("nomatch", "nomatch*"); + String q = Strings.format("FROM %s,%s:%s", localExpr, REMOTE_CLUSTER_ALIAS, remoteExpr); + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + } + + // error since the remote cluster with skip_unavailable=false specified a concrete index that is not found + { + String q = "FROM employees,my_remote_cluster:employees_nomatch,my_remote_cluster:employees*"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + /* Example error: + *{"error":{"root_cause":[{"type":"security_exception","reason":"action [indices:data/read/esql/cluster] towards + * remote cluster is unauthorized for user [remote_search_user] with assigned roles [remote_search] authenticated by + * API key id [zaeMK5MBeGk5jCIiFtqB] of user [test_user] on indices [employees_nomatch], this action is granted by + * the index privileges [read,all]"}],"type":"security_exception","reason":"action [indices:data/read/esql/cluster] + * towards remote cluster is unauthorized for user [remote_search_user] with assigned roles [remote_search] authenticated + * by API key id [zaeMK5MBeGk5jCIiFtqB] of user [test_user] on indices [employees_nomatch], this action is granted by the + * index privileges [read,all]"},"status":403}" + */ + assertThat(e.getMessage(), containsString("unauthorized for user [remote_search_user]")); + assertThat(e.getMessage(), containsString("on indices [employees_nomatch]")); + assertThat(e.getMessage(), containsString("security_exception")); + + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + /* Example error: + * {"error":{"root_cause":[{"type":"security_exception","reason":"action [indices:data/read/esql/cluster] towards + * remote cluster is unauthorized for API key id [sxuSK5MBSfGSGj4YFLyv] of user [remote_search_user] authenticated by + * API key id [cUiRK5MB5j18U5stsvQj] of user [test_user] on indices [employees_nomatch], this action is granted by + * the index privileges [read,all]"}],"type":"security_exception","reason":"action [indices:data/read/esql/cluster] + * towards remote cluster is unauthorized for API key id [sxuSK5MBSfGSGj4YFLyv] of user [remote_search_user] authenticated + * by API key id [cUiRK5MB5j18U5stsvQj] of user [test_user] on indices [employees_nomatch], this action is granted by the + * index privileges [read,all]"},"status":403}" + */ + assertThat(e.getMessage(), containsString("unauthorized for API key id")); + assertThat(e.getMessage(), containsString("of user [remote_search_user]")); + assertThat(e.getMessage(), containsString("on indices [employees_nomatch]")); + assertThat(e.getMessage(), containsString("security_exception")); + + // TODO: in follow on PR, add support for throwing a VerificationException for this scenario - no exception is currently thrown + // Request limit0 = esqlRequest(q + " | LIMIT 0"); + // e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + // assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + } + } + protected Request esqlRequest(String command) throws IOException { XContentBuilder body = JsonXContent.contentBuilder(); body.startObject(); @@ -1007,7 +1468,12 @@ private Response performRequestWithRemoteSearchUser(final Request request) throw return client().performRequest(request); } - private Response performRequestWithRemoteSearchUserViaAPIKey(final Request request) throws IOException { + private Response performRequestWithRemoteSearchUserViaAPIKey(Request request, String encodedApiKey) throws IOException { + request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "ApiKey " + encodedApiKey)); + return client().performRequest(request); + } + + private String createRemoteSearchUserAPIKey() throws IOException { final Request createApiKeyRequest = new Request("POST", "_security/api_key"); createApiKeyRequest.setJsonEntity(""" { @@ -1021,8 +1487,7 @@ private Response performRequestWithRemoteSearchUserViaAPIKey(final Request reque assertOK(response); final Map responseAsMap = responseAsMap(response); final String encoded = (String) responseAsMap.get("encoded"); - request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "ApiKey " + encoded)); - return client().performRequest(request); + return encoded; } @SuppressWarnings("unchecked") @@ -1145,4 +1610,54 @@ private void assertWithEnrich(Response response) throws IOException { assertThat(flatList, containsInAnyOrder(2, 3, "usa", "canada")); } + record ExpectedCluster(String clusterAlias, String indexExpression, String status, Integer totalShards) {} + + @SuppressWarnings("unchecked") + void assertExpectedClustersForMissingIndicesTests(Map responseMap, List expected) { + Map clusters = (Map) responseMap.get("_clusters"); + assertThat((int) responseMap.get("took"), greaterThan(0)); + + Map detailsMap = (Map) clusters.get("details"); + assertThat(detailsMap.size(), is(expected.size())); + + assertThat((int) clusters.get("total"), is(expected.size())); + assertThat((int) clusters.get("successful"), is((int) expected.stream().filter(ec -> ec.status().equals("successful")).count())); + assertThat((int) clusters.get("skipped"), is((int) expected.stream().filter(ec -> ec.status().equals("skipped")).count())); + assertThat((int) clusters.get("failed"), is((int) expected.stream().filter(ec -> ec.status().equals("failed")).count())); + + for (ExpectedCluster expectedCluster : expected) { + Map clusterDetails = (Map) detailsMap.get(expectedCluster.clusterAlias()); + String msg = expectedCluster.clusterAlias(); + + assertThat(msg, (int) clusterDetails.get("took"), greaterThan(0)); + assertThat(msg, clusterDetails.get("status"), is(expectedCluster.status())); + Map shards = (Map) clusterDetails.get("_shards"); + if (expectedCluster.totalShards() == null) { + assertThat(msg, (int) shards.get("total"), greaterThan(0)); + } else { + assertThat(msg, (int) shards.get("total"), is(expectedCluster.totalShards())); + } + + if (expectedCluster.status().equals("successful")) { + assertThat((int) shards.get("successful"), is((int) shards.get("total"))); + assertThat((int) shards.get("skipped"), is(0)); + + } else if (expectedCluster.status().equals("skipped")) { + assertThat((int) shards.get("successful"), is(0)); + assertThat((int) shards.get("skipped"), is((int) shards.get("total"))); + ArrayList failures = (ArrayList) clusterDetails.get("failures"); + assertThat(failures.size(), is(1)); + Map failure1 = (Map) failures.get(0); + Map innerReason = (Map) failure1.get("reason"); + String expectedMsg = "Unknown index [" + expectedCluster.indexExpression() + "]"; + assertThat(innerReason.get("reason").toString(), containsString(expectedMsg)); + assertThat(innerReason.get("type").toString(), containsString("verification_exception")); + + } else { + fail(msg + "; Unexpected status: " + expectedCluster.status()); + } + // currently failed shards is always zero - change this once we start allowing partial data for individual shard failures + assertThat((int) shards.get("failed"), is(0)); + } + } } From 8a7491cdb89c75e61120ebca7f7c16d1477e1f5c Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 19 Nov 2024 15:44:13 +0100 Subject: [PATCH 042/386] Remove support for type, fields, copy_to and boost in metadata field definition (#116944) Support for type, fields, copy_to and boost in metadata field definition has been deprecated in #90989. Such fields have been long parsed and silently ignored. --- docs/changelog/116944.yaml | 11 +++++ .../index/mapper/MetadataFieldMapper.java | 19 ------- .../index/mapper/MetadataMapperTestCase.java | 49 ------------------- 3 files changed, 11 insertions(+), 68 deletions(-) create mode 100644 docs/changelog/116944.yaml diff --git a/docs/changelog/116944.yaml b/docs/changelog/116944.yaml new file mode 100644 index 0000000000000..e7833e49cf965 --- /dev/null +++ b/docs/changelog/116944.yaml @@ -0,0 +1,11 @@ +pr: 116944 +summary: "Remove support for type, fields, `copy_to` and boost in metadata field definition" +area: Mapping +type: breaking +issues: [] +breaking: + title: "Remove support for type, fields, copy_to and boost in metadata field definition" + area: Mapping + details: The type, fields, copy_to and boost parameters are no longer supported in metadata field definition + impact: Users providing type, fields, copy_to or boost as part of metadata field definition should remove them from their mappings. + notable: false diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java index 9a2c9517dfd05..31aa787c3f758 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java @@ -10,16 +10,13 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.Iterator; import java.util.Map; -import java.util.Set; import java.util.function.Function; /** @@ -135,8 +132,6 @@ public final MetadataFieldMapper build(MapperBuilderContext context) { return build(); } - private static final Set UNSUPPORTED_PARAMETERS_8_6_0 = Set.of("type", "fields", "copy_to", "boost"); - public final void parseMetadataField(String name, MappingParserContext parserContext, Map fieldNode) { final Parameter[] params = getParameters(); Map> paramsMap = Maps.newHashMapWithExpectedSize(params.length); @@ -149,20 +144,6 @@ public final void parseMetadataField(String name, MappingParserContext parserCon final Object propNode = entry.getValue(); Parameter parameter = paramsMap.get(propName); if (parameter == null) { - if (UNSUPPORTED_PARAMETERS_8_6_0.contains(propName)) { - if (parserContext.indexVersionCreated().onOrAfter(IndexVersions.V_8_6_0)) { - // silently ignore type, and a few other parameters: sadly we've been doing this for a long time - deprecationLogger.warn( - DeprecationCategory.API, - propName, - "Parameter [{}] has no effect on metadata field [{}] and will be removed in future", - propName, - name - ); - } - iterator.remove(); - continue; - } throw new MapperParsingException("unknown parameter [" + propName + "] on metadata field [" + name + "]"); } parameter.parse(name, parserContext, propNode); diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java index e538a9955d9b6..e86cb8562537f 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; @@ -142,52 +141,4 @@ public final void testFixedMetaFieldsAreNotConfigurable() throws IOException { ); assertEquals("Failed to parse mapping: " + fieldName() + " is not configurable", exception.getMessage()); } - - public void testTypeAndFriendsAreSilentlyIgnoredBefore_8_6_0() throws IOException { - assumeTrue("Metadata field " + fieldName() + " isn't configurable", isConfigurable()); - IndexVersion previousVersion = IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_6_0); - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.MINIMUM_COMPATIBLE, previousVersion); - assumeTrue("Metadata field " + fieldName() + " is not supported on version " + version, isSupportedOn(version)); - MapperService mapperService = createMapperService(version, mapping(b -> {})); - // these parameters were previously silently ignored, they will still be ignored in existing indices - String[] unsupportedParameters = new String[] { "fields", "copy_to", "boost", "type" }; - for (String param : unsupportedParameters) { - String mappingAsString = "{\n" - + " \"_doc\" : {\n" - + " \"" - + fieldName() - + "\" : {\n" - + " \"" - + param - + "\" : \"any\"\n" - + " }\n" - + " }\n" - + "}"; - assertNotNull(mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString))); - } - } - - public void testTypeAndFriendsAreDeprecatedFrom_8_6_0() throws IOException { - assumeTrue("Metadata field " + fieldName() + " isn't configurable", isConfigurable()); - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_6_0, IndexVersion.current()); - assumeTrue("Metadata field " + fieldName() + " is not supported on version " + version, isSupportedOn(version)); - MapperService mapperService = createMapperService(version, mapping(b -> {})); - // these parameters were previously silently ignored, they are now deprecated in new indices - String[] unsupportedParameters = new String[] { "fields", "copy_to", "boost", "type" }; - for (String param : unsupportedParameters) { - String mappingAsString = "{\n" - + " \"_doc\" : {\n" - + " \"" - + fieldName() - + "\" : {\n" - + " \"" - + param - + "\" : \"any\"\n" - + " }\n" - + " }\n" - + "}"; - assertNotNull(mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString))); - assertWarnings("Parameter [" + param + "] has no effect on metadata field [" + fieldName() + "] and will be removed in future"); - } - } } From 1cf22ee5e743672ae251a0eea8db585b7251aa17 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Tue, 19 Nov 2024 09:49:15 -0500 Subject: [PATCH 043/386] Optimize IndexLifecycleMetadata#getPolicies (#116988) --- .../core/ilm/IndexLifecycleMetadata.java | 21 ++++++++++++------- .../core/template/IndexTemplateRegistry.java | 6 ++++-- 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java index f8cb371687d72..26f4f5c92073c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java @@ -58,10 +58,22 @@ public class IndexLifecycleMetadata implements Metadata.Custom { private final Map policyMetadatas; private final OperationMode operationMode; + // a slightly different view of the policyMetadatas -- it's hot in a couple of places so we pre-calculate it + private final Map policies; + + private static Map policiesMap(final Map policyMetadatas) { + final Map policies = new HashMap<>(policyMetadatas.size()); + for (LifecyclePolicyMetadata policyMetadata : policyMetadatas.values()) { + LifecyclePolicy policy = policyMetadata.getPolicy(); + policies.put(policy.getName(), policy); + } + return Collections.unmodifiableMap(policies); + } public IndexLifecycleMetadata(Map policies, OperationMode operationMode) { this.policyMetadatas = Collections.unmodifiableMap(policies); this.operationMode = operationMode; + this.policies = policiesMap(policyMetadatas); } public IndexLifecycleMetadata(StreamInput in) throws IOException { @@ -72,6 +84,7 @@ public IndexLifecycleMetadata(StreamInput in) throws IOException { } this.policyMetadatas = policies; this.operationMode = in.readEnum(OperationMode.class); + this.policies = policiesMap(policyMetadatas); } @Override @@ -93,13 +106,7 @@ public OperationMode getOperationMode() { } public Map getPolicies() { - // note: this loop is unrolled rather than streaming-style because it's hot enough to show up in a flamegraph - Map policies = new HashMap<>(policyMetadatas.size()); - for (LifecyclePolicyMetadata policyMetadata : policyMetadatas.values()) { - LifecyclePolicy policy = policyMetadata.getPolicy(); - policies.put(policy.getName(), policy); - } - return Collections.unmodifiableMap(policies); + return policies; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java index 05f4e560b73c1..f160b704e9e12 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java @@ -580,14 +580,16 @@ private void addIndexLifecyclePoliciesIfMissing(ClusterState state) { logger.trace("running in data stream lifecycle only mode. skipping the installation of ILM policies."); return; } - IndexLifecycleMetadata metadata = state.metadata().custom(IndexLifecycleMetadata.TYPE); + final IndexLifecycleMetadata metadata = state.metadata().custom(IndexLifecycleMetadata.TYPE); + final Map policies = metadata != null ? metadata.getPolicies() : Map.of(); + for (LifecyclePolicy policy : getLifecyclePolicies()) { final AtomicBoolean creationCheck = policyCreationsInProgress.computeIfAbsent( policy.getName(), key -> new AtomicBoolean(false) ); if (creationCheck.compareAndSet(false, true)) { - final LifecyclePolicy currentPolicy = metadata != null ? metadata.getPolicies().get(policy.getName()) : null; + final LifecyclePolicy currentPolicy = policies.get(policy.getName()); if (Objects.isNull(currentPolicy)) { logger.debug("adding lifecycle policy [{}] for [{}], because it doesn't exist", policy.getName(), getOrigin()); putPolicy(policy, creationCheck); From 07957030dee6bdc91392c480b0a93a81fd7b875b Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Tue, 19 Nov 2024 08:57:37 -0600 Subject: [PATCH 044/386] Fix RoleDescriptor test that fails randomly (#116852) This commit fixes a test fails based on the random seed. The change updates the name of the test to match the updated name of the method it is testing. It also re-implements the test to rely less on randomness and explicitly tests the possible inputs. fixes #116376 --- .../security/authz/RoleDescriptorTests.java | 189 ++++++++++++++++-- 1 file changed, 172 insertions(+), 17 deletions(-) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java index 218876c7d40e8..3ca6777512420 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java @@ -31,9 +31,12 @@ import org.elasticsearch.xpack.core.XPackClientPlugin; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; +import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissionGroup; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; +import org.elasticsearch.xpack.core.security.authz.restriction.Workflow; +import org.elasticsearch.xpack.core.security.authz.restriction.WorkflowResolver; import org.hamcrest.Matchers; import java.io.IOException; @@ -47,7 +50,6 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptor.SECURITY_ROLE_DESCRIPTION; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptor.WORKFLOWS_RESTRICTION_VERSION; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivileges; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivilegesBuilder; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRemoteClusterPermissions; import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS; @@ -1338,38 +1340,191 @@ public void testIsEmpty() { } } - public void testHasPrivilegesOtherThanIndex() { + public void testHasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster() { + // any index and some cluster privileges are allowed assertThat( new RoleDescriptor( "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), // all of these are allowed + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(false) + ); + // any index and some cluster privileges are allowed + assertThat( + new RoleDescriptor( + "name", + new String[] { "manage_security" }, // unlikely we will ever support allowing manage security across clusters + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + + // application privileges are not allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + new ApplicationResourcePrivileges[] { + ApplicationResourcePrivileges.builder().application("app").privileges("foo").resources("res").build() }, + null, + null, + null, + null, + null, + null, + null, + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + + // configurable cluster privileges are not allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.ManageApplicationPrivileges(Collections.singleton("foo")) }, + null, + null, + null, + null, + null, + null, + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + + // run as is not allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + null, + new String[] { "foo" }, + null, + null, + null, + null, + null, + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + + // workflows restriction is not allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, null, - randomBoolean() ? null : randomIndicesPrivileges(1, 5), null, null, null, null, null, null, + new RoleDescriptor.Restriction(WorkflowResolver.allWorkflows().stream().map(Workflow::name).toArray(String[]::new)), + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + // remote indices privileges are not allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + null, + null, + null, + null, + new RoleDescriptor.RemoteIndicesPrivileges[] { + RoleDescriptor.RemoteIndicesPrivileges.builder("rmt").indices("idx").privileges("foo").build() }, null, null, null ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + // remote cluster privileges are not allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + null, + null, + null, + null, + null, + new RemoteClusterPermissions().addGroup( + new RemoteClusterPermissionGroup( + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new String[] { "rmt" } + ) + ), + null, + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + + // metadata, transient metadata and description are allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + null, + null, + Collections.singletonMap("foo", "bar"), + Collections.singletonMap("foo", "bar"), + null, + null, + null, + "description" + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), is(false) ); - final RoleDescriptor roleDescriptor = RoleDescriptorTestHelper.builder() - .allowReservedMetadata(true) - .allowRemoteIndices(true) - .allowRestriction(true) - .allowDescription(true) - .allowRemoteClusters(true) - .build(); - final boolean expected = roleDescriptor.hasClusterPrivileges() - || roleDescriptor.hasConfigurableClusterPrivileges() - || roleDescriptor.hasApplicationPrivileges() - || roleDescriptor.hasRunAs() - || roleDescriptor.hasRemoteIndicesPrivileges() - || roleDescriptor.hasWorkflowsRestriction(); - assertThat(roleDescriptor.hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), equalTo(expected)); } private static void resetFieldPermssionsCache() { From f3cd48209e2e364640c23dcab7da502c7d36fe50 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Tue, 19 Nov 2024 16:34:21 +0100 Subject: [PATCH 045/386] Added stricter range type checks and runtime warnings for ENRICH (#115091) It has been noted that strange or incorrect error messages are returned if the ENRICH command uses incompatible data types, for example a KEYWORD with value 'foo' using in an int_range match: https://github.com/elastic/elasticsearch/issues/107357 This error is thrown at runtime and contradicts the ES|QL policy of only throwing errors at planning time, while at runtime we should instead set results to null and add a warning. However, we could make the planner stricter and block potentially mismatching types earlier. However runtime parsing of KEYWORD fields has been a feature of ES|QL ENRICH since it's inception, in particular we even have tests asserting that KEYWORD fields containing parsable IP data can be joined to an ip_range ENRICH index. In order to not create a backwards compatibility problem, we have compromised with the following: * Strict range type checking at the planner time for incompatible range types, unless the incoming index field is KEYWORD * For KEYWORD fields, allow runtime parsing of the fields, but when parsing fails, set the result to null and add a warning Added extra tests to verify behaviour of match policies on non-keyword fields. They all behave as keywords (the enrich field is converted to keyword at policy execution time, and the input data is converted to keyword at lookup time). --- docs/changelog/115091.yaml | 7 + docs/reference/esql/esql-enrich-data.asciidoc | 31 ++- .../org/elasticsearch/TransportVersions.java | 1 + x-pack/plugin/build.gradle | 1 + .../xpack/esql/core/type/DataType.java | 7 + .../lookup/EnrichQuerySourceOperator.java | 27 ++- .../EnrichQuerySourceOperatorTests.java | 8 +- .../xpack/esql/action/LookupFromIndexIT.java | 3 +- .../xpack/esql/action/EsqlCapabilities.java | 5 + .../esql/enrich/AbstractLookupService.java | 31 ++- .../esql/enrich/EnrichLookupOperator.java | 25 +- .../esql/enrich/EnrichLookupService.java | 54 ++++- .../esql/enrich/LookupFromIndexOperator.java | 15 +- .../esql/enrich/LookupFromIndexService.java | 25 +- .../xpack/esql/io/stream/PlanStreamInput.java | 3 +- .../esql/planner/LocalExecutionPlanner.java | 3 +- .../rest-api-spec/test/esql/61_enrich_ip.yml | 32 ++- .../test/esql/63_enrich_int_range.yml | 199 ++++++++++++++++ .../test/esql/64_enrich_int_match.yml | 222 ++++++++++++++++++ 19 files changed, 657 insertions(+), 42 deletions(-) create mode 100644 docs/changelog/115091.yaml create mode 100644 x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/63_enrich_int_range.yml create mode 100644 x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/64_enrich_int_match.yml diff --git a/docs/changelog/115091.yaml b/docs/changelog/115091.yaml new file mode 100644 index 0000000000000..762bcca5e8c52 --- /dev/null +++ b/docs/changelog/115091.yaml @@ -0,0 +1,7 @@ +pr: 115091 +summary: Added stricter range type checks and runtime warnings for ENRICH +area: ES|QL +type: bug +issues: + - 107357 + - 116799 diff --git a/docs/reference/esql/esql-enrich-data.asciidoc b/docs/reference/esql/esql-enrich-data.asciidoc index c48118d1c367a..ad34e29f1a55b 100644 --- a/docs/reference/esql/esql-enrich-data.asciidoc +++ b/docs/reference/esql/esql-enrich-data.asciidoc @@ -138,8 +138,33 @@ include::{es-ref-dir}/ingest/apis/enrich/execute-enrich-policy.asciidoc[tag=upda include::../ingest/enrich.asciidoc[tag=update-enrich-policy] -==== Limitations +==== Enrich Policy Types and Limitations +The {esql} `ENRICH` command supports all three enrich policy types: + +`geo_match`:: +Matches enrich data to incoming documents based on a <>. +For an example, see <>. + +`match`:: +Matches enrich data to incoming documents based on a <>. +For an example, see <>. + +`range`:: +Matches a number, date, or IP address in incoming documents to a range in the +enrich index based on a <>. For an example, +see <>. + // tag::limitations[] -The {esql} `ENRICH` command only supports enrich policies of type `match`. -Furthermore, `ENRICH` only supports enriching on a column of type `keyword`. +While all three enrich policy types are supported, there are some limitations to be aware of: + +* The `geo_match` enrich policy type only supports the `intersects` spatial relation. +* It is required that the `match_field` in the `ENRICH` command is of the correct type. +For example, if the enrich policy is of type `geo_match`, the `match_field` in the `ENRICH` +command must be of type `geo_point` or `geo_shape`. +Likewise, a `range` enrich policy requires a `match_field` of type `integer`, `long`, `date`, or `ip`, +depending on the type of the range field in the original enrich index. +* However, this constraint is relaxed for `range` policies when the `match_field` is of type `KEYWORD`. +In this case the field values will be parsed during query execution, row by row. +If any value fails to parse, the output values for that row will be set to `null`, +an appropriate warning will be produced and the query will continue to execute. // end::limitations[] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index a1fb241861061..1a99123ebdac6 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -202,6 +202,7 @@ static TransportVersion def(int id) { public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS = def(8_793_00_0); public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS_REVERT = def(8_794_00_0); public static final TransportVersion FAST_REFRESH_RCO_2 = def(8_795_00_0); + public static final TransportVersion ESQL_ENRICH_RUNTIME_WARNINGS = def(8_796_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index e25d7fb359acb..b13f7903bc8b5 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -89,5 +89,6 @@ tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("esql/80_text/reverse text", "The output type changed from TEXT to KEYWORD.") task.skipTest("esql/80_text/values function", "The output type changed from TEXT to KEYWORD.") task.skipTest("privileges/11_builtin/Test get builtin privileges" ,"unnecessary to test compatibility") + task.skipTest("esql/61_enrich_ip/Invalid IP strings", "We switched from exceptions to null+warnings for ENRICH runtime errors") }) diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index 347e6b43099fc..e980b1509813e 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -591,6 +591,13 @@ public DataType noText() { return isString(this) ? KEYWORD : this; } + public boolean isDate() { + return switch (this) { + case DATETIME, DATE_NANOS -> true; + default -> false; + }; + } + /** * Named parameters with default values. It's just easier to do this with * a builder in java.... diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperator.java index 2093094fb8af5..0cd34d2ad4066 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperator.java @@ -22,6 +22,7 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.operator.Warnings; import org.elasticsearch.core.Releasables; import java.io.IOException; @@ -38,17 +39,25 @@ public final class EnrichQuerySourceOperator extends SourceOperator { private int queryPosition = -1; private final IndexReader indexReader; private final IndexSearcher searcher; + private final Warnings warnings; private final int maxPageSize; // using smaller pages enables quick cancellation and reduces sorting costs public static final int DEFAULT_MAX_PAGE_SIZE = 256; - public EnrichQuerySourceOperator(BlockFactory blockFactory, int maxPageSize, QueryList queryList, IndexReader indexReader) { + public EnrichQuerySourceOperator( + BlockFactory blockFactory, + int maxPageSize, + QueryList queryList, + IndexReader indexReader, + Warnings warnings + ) { this.blockFactory = blockFactory; this.maxPageSize = maxPageSize; this.queryList = queryList; this.indexReader = indexReader; this.searcher = new IndexSearcher(indexReader); + this.warnings = warnings; } @Override @@ -73,12 +82,18 @@ public Page getOutput() { } int totalMatches = 0; do { - Query query = nextQuery(); - if (query == null) { - assert isFinished(); - break; + Query query; + try { + query = nextQuery(); + if (query == null) { + assert isFinished(); + break; + } + query = searcher.rewrite(new ConstantScoreQuery(query)); + } catch (Exception e) { + warnings.registerException(e); + continue; } - query = searcher.rewrite(new ConstantScoreQuery(query)); final var weight = searcher.createWeight(query, ScoreMode.COMPLETE_NO_SCORES, 1.0f); if (weight == null) { continue; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java index 6daace76dd8b8..2af52b6bab5a8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java @@ -32,6 +32,8 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.Warnings; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -120,7 +122,8 @@ public void testQueries() throws Exception { // 3 -> [] -> [] // 4 -> [a1] -> [3] // 5 -> [] -> [] - EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator(blockFactory, 128, queryList, reader); + var warnings = Warnings.createWarnings(DriverContext.WarningsMode.IGNORE, 0, 0, "test enrich"); + EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator(blockFactory, 128, queryList, reader, warnings); Page p0 = queryOperator.getOutput(); assertNotNull(p0); assertThat(p0.getPositionCount(), equalTo(6)); @@ -187,7 +190,8 @@ public void testRandomMatchQueries() throws Exception { MappedFieldType uidField = new KeywordFieldMapper.KeywordFieldType("uid"); var queryList = QueryList.rawTermQueryList(uidField, mock(SearchExecutionContext.class), inputTerms); int maxPageSize = between(1, 256); - EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator(blockFactory, maxPageSize, queryList, reader); + var warnings = Warnings.createWarnings(DriverContext.WarningsMode.IGNORE, 0, 0, "test enrich"); + EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator(blockFactory, maxPageSize, queryList, reader, warnings); Map> actualPositions = new HashMap<>(); while (queryOperator.isFinished() == false) { Page page = queryOperator.getOutput(); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java index cff9604053903..5c0c13b48df3b 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java @@ -183,7 +183,8 @@ public void testLookupIndex() throws IOException { DataType.KEYWORD, "lookup", "data", - List.of(new Alias(Source.EMPTY, "l", new ReferenceAttribute(Source.EMPTY, "l", DataType.LONG))) + List.of(new Alias(Source.EMPTY, "l", new ReferenceAttribute(Source.EMPTY, "l", DataType.LONG))), + Source.EMPTY ); DriverContext driverContext = driverContext(); try ( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index d2bee9c67af5b..9532e3dc77cb4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -278,6 +278,11 @@ public enum Cap { */ RANGEQUERY_FOR_DATETIME, + /** + * Enforce strict type checking on ENRICH range types, and warnings for KEYWORD parsing at runtime. Done in #115091. + */ + ENRICH_STRICT_RANGE_TYPES, + /** * Fix for non-unique attribute names in ROW and logical plans. * https://github.com/elastic/elasticsearch/issues/110541 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java index 286ddbaa29a5b..e52e9ae989a92 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java @@ -41,6 +41,7 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OutputOperator; +import org.elasticsearch.compute.operator.Warnings; import org.elasticsearch.compute.operator.lookup.EnrichQuerySourceOperator; import org.elasticsearch.compute.operator.lookup.MergePositionsOperator; import org.elasticsearch.compute.operator.lookup.QueryList; @@ -78,6 +79,7 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders; import org.elasticsearch.xpack.esql.planner.PlannerUtils; @@ -166,6 +168,10 @@ abstract class AbstractLookupService list releasables.add(mergePositionsOperator); SearchExecutionContext searchExecutionContext = searchContext.getSearchExecutionContext(); QueryList queryList = queryList(request, searchExecutionContext, inputBlock, request.inputDataType); + var warnings = Warnings.createWarnings( + DriverContext.WarningsMode.COLLECT, + request.source.source().getLineNumber(), + request.source.source().getColumnNumber(), + request.source.text() + ); var queryOperator = new EnrichQuerySourceOperator( driverContext.blockFactory(), EnrichQuerySourceOperator.DEFAULT_MAX_PAGE_SIZE, queryList, - searchExecutionContext.getIndexReader() + searchExecutionContext.getIndexReader(), + warnings ); releasables.add(queryOperator); var extractFieldsOperator = extractFieldsOperator(searchContext, driverContext, request.extractFields); @@ -447,13 +460,22 @@ abstract static class Request { final DataType inputDataType; final Page inputPage; final List extractFields; + final Source source; - Request(String sessionId, String index, DataType inputDataType, Page inputPage, List extractFields) { + Request( + String sessionId, + String index, + DataType inputDataType, + Page inputPage, + List extractFields, + Source source + ) { this.sessionId = sessionId; this.index = index; this.inputDataType = inputDataType; this.inputPage = inputPage; this.extractFields = extractFields; + this.source = source; } } @@ -467,6 +489,7 @@ abstract static class TransportRequest extends org.elasticsearch.transport.Trans final DataType inputDataType; final Page inputPage; final List extractFields; + final Source source; // TODO: Remove this workaround once we have Block RefCount final Page toRelease; final RefCounted refs = AbstractRefCounted.of(this::releasePage); @@ -477,7 +500,8 @@ abstract static class TransportRequest extends org.elasticsearch.transport.Trans DataType inputDataType, Page inputPage, Page toRelease, - List extractFields + List extractFields, + Source source ) { this.sessionId = sessionId; this.shardId = shardId; @@ -485,6 +509,7 @@ abstract static class TransportRequest extends org.elasticsearch.transport.Trans this.inputPage = inputPage; this.toRelease = toRelease; this.extractFields = extractFields; + this.source = source; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java index 6e5845fae33b7..df608a04632a2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java @@ -16,9 +16,11 @@ import org.elasticsearch.compute.operator.AsyncOperator; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; +import org.elasticsearch.compute.operator.ResponseHeadersCollector; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import java.io.IOException; @@ -35,6 +37,8 @@ public final class EnrichLookupOperator extends AsyncOperator { private final String matchType; private final String matchField; private final List enrichFields; + private final ResponseHeadersCollector responseHeadersCollector; + private final Source source; private long totalTerms = 0L; public record Factory( @@ -47,7 +51,8 @@ public record Factory( String enrichIndex, String matchType, String matchField, - List enrichFields + List enrichFields, + Source source ) implements OperatorFactory { @Override public String describe() { @@ -75,7 +80,8 @@ public Operator get(DriverContext driverContext) { enrichIndex, matchType, matchField, - enrichFields + enrichFields, + source ); } } @@ -91,7 +97,8 @@ public EnrichLookupOperator( String enrichIndex, String matchType, String matchField, - List enrichFields + List enrichFields, + Source source ) { super(driverContext, maxOutstandingRequests); this.sessionId = sessionId; @@ -103,6 +110,8 @@ public EnrichLookupOperator( this.matchType = matchType; this.matchField = matchField; this.enrichFields = enrichFields; + this.source = source; + this.responseHeadersCollector = new ResponseHeadersCollector(enrichLookupService.getThreadContext()); } @Override @@ -116,9 +125,14 @@ protected void performAsync(Page inputPage, ActionListener listener) { matchType, matchField, new Page(inputBlock), - enrichFields + enrichFields, + source + ); + enrichLookupService.lookupAsync( + request, + parentTask, + ActionListener.runBefore(listener.map(inputPage::appendPage), responseHeadersCollector::collect) ); - enrichLookupService.lookupAsync(request, parentTask, listener.map(inputPage::appendPage)); } @Override @@ -140,6 +154,7 @@ public String toString() { protected void doClose() { // TODO: Maybe create a sub-task as the parent task of all the lookup tasks // then cancel it when this operator terminates early (e.g., have enough result). + responseHeadersCollector.finish(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 2d85b46e33a8c..50a1ffce4841f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -18,6 +18,8 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.lookup.QueryList; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.RangeFieldMapper; +import org.elasticsearch.index.mapper.RangeType; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchService; @@ -27,6 +29,7 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.action.EsqlQueryAction; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; @@ -71,13 +74,15 @@ protected TransportRequest transportRequest(EnrichLookupService.Request request, request.matchField, request.inputPage, null, - request.extractFields + request.extractFields, + request.source ); } @Override protected QueryList queryList(TransportRequest request, SearchExecutionContext context, Block inputBlock, DataType inputDataType) { MappedFieldType fieldType = context.getFieldType(request.matchField); + validateTypes(inputDataType, fieldType); return switch (request.matchType) { case "match", "range" -> termQueryList(fieldType, context, inputBlock, inputDataType); case "geo_match" -> QueryList.geoShapeQueryList(fieldType, context, inputBlock); @@ -85,6 +90,33 @@ protected QueryList queryList(TransportRequest request, SearchExecutionContext c }; } + private static void validateTypes(DataType inputDataType, MappedFieldType fieldType) { + if (fieldType instanceof RangeFieldMapper.RangeFieldType rangeType) { + // For range policy types, the ENRICH index field type will be one of a list of supported range types, + // which need to match the input data type (eg. ip-range -> ip, date-range -> date, etc.) + if (rangeTypesCompatible(rangeType.rangeType(), inputDataType) == false) { + throw new EsqlIllegalArgumentException( + "ENRICH range and input types are incompatible: range[" + rangeType.rangeType() + "], input[" + inputDataType + "]" + ); + } + } + // For match policies, the ENRICH index field will always be KEYWORD, and input type will be converted to KEYWORD. + // For geo_match, type validation is done earlier, in the Analyzer. + } + + private static boolean rangeTypesCompatible(RangeType rangeType, DataType inputDataType) { + if (inputDataType.noText() == DataType.KEYWORD) { + // We allow runtime parsing of string types to numeric types + return true; + } + return switch (rangeType) { + case INTEGER, LONG -> inputDataType.isWholeNumber(); + case IP -> inputDataType == DataType.IP; + case DATE -> inputDataType.isDate(); + default -> rangeType.isNumeric() == inputDataType.isNumeric(); + }; + } + public static class Request extends AbstractLookupService.Request { private final String matchType; private final String matchField; @@ -96,9 +128,10 @@ public static class Request extends AbstractLookupService.Request { String matchType, String matchField, Page inputPage, - List extractFields + List extractFields, + Source source ) { - super(sessionId, index, inputDataType, inputPage, extractFields); + super(sessionId, index, inputDataType, inputPage, extractFields, source); this.matchType = matchType; this.matchField = matchField; } @@ -116,9 +149,10 @@ protected static class TransportRequest extends AbstractLookupService.TransportR String matchField, Page inputPage, Page toRelease, - List extractFields + List extractFields, + Source source ) { - super(sessionId, shardId, inputDataType, inputPage, toRelease, extractFields); + super(sessionId, shardId, inputDataType, inputPage, toRelease, extractFields, source); this.matchType = matchType; this.matchField = matchField; } @@ -138,6 +172,10 @@ static TransportRequest readFrom(StreamInput in, BlockFactory blockFactory) thro } PlanStreamInput planIn = new PlanStreamInput(in, in.namedWriteableRegistry(), null); List extractFields = planIn.readNamedWriteableCollectionAsList(NamedExpression.class); + var source = Source.EMPTY; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { + source = Source.readFrom(planIn); + } TransportRequest result = new TransportRequest( sessionId, shardId, @@ -146,7 +184,8 @@ static TransportRequest readFrom(StreamInput in, BlockFactory blockFactory) thro matchField, inputPage, inputPage, - extractFields + extractFields, + source ); result.setParentTask(parentTaskId); return result; @@ -165,6 +204,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeWriteable(inputPage); PlanStreamOutput planOut = new PlanStreamOutput(out, null); planOut.writeNamedWriteableCollection(extractFields); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { + source.writeTo(planOut); + } } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java index 836b400c54f8c..f09f7d0e23e7b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java @@ -19,6 +19,7 @@ import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import java.io.IOException; @@ -36,7 +37,8 @@ public record Factory( DataType inputDataType, String lookupIndex, String matchField, - List loadFields + List loadFields, + Source source ) implements OperatorFactory { @Override public String describe() { @@ -63,7 +65,8 @@ public Operator get(DriverContext driverContext) { inputDataType, lookupIndex, matchField, - loadFields + loadFields, + source ); } } @@ -76,6 +79,7 @@ public Operator get(DriverContext driverContext) { private final String lookupIndex; private final String matchField; private final List loadFields; + private final Source source; private long totalTerms = 0L; public LookupFromIndexOperator( @@ -88,7 +92,8 @@ public LookupFromIndexOperator( DataType inputDataType, String lookupIndex, String matchField, - List loadFields + List loadFields, + Source source ) { super(driverContext, maxOutstandingRequests); this.sessionId = sessionId; @@ -99,6 +104,7 @@ public LookupFromIndexOperator( this.lookupIndex = lookupIndex; this.matchField = matchField; this.loadFields = loadFields; + this.source = source; } @Override @@ -111,7 +117,8 @@ protected void performAsync(Page inputPage, ActionListener listener) { inputDataType, matchField, new Page(inputBlock), - loadFields + loadFields, + source ); lookupService.lookupAsync(request, parentTask, listener.map(inputPage::appendPage)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java index ef204e88c234f..849e8e890e248 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.enrich; +import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -25,6 +26,7 @@ import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.esql.action.EsqlQueryAction; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; @@ -68,7 +70,8 @@ protected TransportRequest transportRequest(LookupFromIndexService.Request reque request.inputPage, null, request.extractFields, - request.matchField + request.matchField, + request.source ); } @@ -87,9 +90,10 @@ public static class Request extends AbstractLookupService.Request { DataType inputDataType, String matchField, Page inputPage, - List extractFields + List extractFields, + Source source ) { - super(sessionId, index, inputDataType, inputPage, extractFields); + super(sessionId, index, inputDataType, inputPage, extractFields, source); this.matchField = matchField; } } @@ -104,9 +108,10 @@ protected static class TransportRequest extends AbstractLookupService.TransportR Page inputPage, Page toRelease, List extractFields, - String matchField + String matchField, + Source source ) { - super(sessionId, shardId, inputDataType, inputPage, toRelease, extractFields); + super(sessionId, shardId, inputDataType, inputPage, toRelease, extractFields, source); this.matchField = matchField; } @@ -122,6 +127,10 @@ static TransportRequest readFrom(StreamInput in, BlockFactory blockFactory) thro PlanStreamInput planIn = new PlanStreamInput(in, in.namedWriteableRegistry(), null); List extractFields = planIn.readNamedWriteableCollectionAsList(NamedExpression.class); String matchField = in.readString(); + var source = Source.EMPTY; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { + source = Source.readFrom(planIn); + } TransportRequest result = new TransportRequest( sessionId, shardId, @@ -129,7 +138,8 @@ static TransportRequest readFrom(StreamInput in, BlockFactory blockFactory) thro inputPage, inputPage, extractFields, - matchField + matchField, + source ); result.setParentTask(parentTaskId); return result; @@ -145,6 +155,9 @@ public void writeTo(StreamOutput out) throws IOException { PlanStreamOutput planOut = new PlanStreamOutput(out, null); planOut.writeNamedWriteableCollection(extractFields); out.writeString(matchField); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { + source.writeTo(planOut); + } } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index 1e1cc3b86a9d5..47e5b9acfbf9d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -29,6 +29,7 @@ import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.NameId; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.session.Configuration; @@ -160,7 +161,7 @@ public Block[] readCachedBlockArray() throws IOException { @Override public String sourceText() { - return configuration.query(); + return configuration == null ? Source.EMPTY.text() : configuration.query(); } static void throwOnNullOptionalRead(Class type) throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 0d0b8dda5fc74..1e441826240c9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -489,7 +489,8 @@ private PhysicalOperation planEnrich(EnrichExec enrich, LocalExecutionPlannerCon enrichIndex, enrich.matchType(), enrich.policyMatchField(), - enrich.enrichFields() + enrich.enrichFields(), + enrich.source() ), layout ); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml index 076bf116292d0..3f2bcb4ed7f4d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml @@ -162,14 +162,38 @@ teardown: --- "Invalid IP strings": - requires: - cluster_features: ["gte_v8.14.0"] - reason: "IP range ENRICH support was added in 8.14.0" + capabilities: + - method: POST + path: /_query + parameters: [method, path, parameters, capabilities] + capabilities: [enrich_strict_range_types] + reason: "Runtime range type checking was added" + test_runner_features: [capabilities, allowed_warnings_regex, warnings_regex] - do: - catch: /'invalid_[\d\.]+' is not an IP string literal/ + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + - "Line (1:68|-1:-1): evaluation of \\[(ENRICH networks-policy ON ip_str|)\\] failed, treating result as null. Only first 20 failures recorded." + - "Line (1:68|-1:-1): java.lang.IllegalArgumentException: 'invalid_' is not an IP string literal." + esql.query: body: - query: 'FROM events | eval ip_str = concat("invalid_", to_string(ip)) | ENRICH networks-policy ON ip_str | sort @timestamp | KEEP ip, name, department, message' + query: 'FROM events | eval ip_str = mv_concat("invalid_", to_string(ip)) | ENRICH networks-policy ON ip_str | sort @timestamp | KEEP ip, name, department, message' + + - match: { columns.0.name: "ip" } + - match: { columns.0.type: "ip" } + - match: { columns.1.name: "name" } + - match: { columns.1.type: "keyword" } + - match: { columns.2.name: "department" } + - match: { columns.2.type: "keyword" } + - match: { columns.3.name: "message" } + - match: { columns.3.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ "10.100.0.21", null, null, "network connected" ] } + - match: { values.1: [ [ "10.100.0.21", "10.101.0.107" ], null, null, "sending messages" ] } + - match: { values.2: [ "10.101.0.107" , null, null, "network disconnected" ] } + - match: { values.3: [ "13.101.0.114" , null, null, "authentication failed" ] } --- "IP": diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/63_enrich_int_range.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/63_enrich_int_range.yml new file mode 100644 index 0000000000000..4d84a10507504 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/63_enrich_int_range.yml @@ -0,0 +1,199 @@ +--- +setup: + - requires: + capabilities: + - method: POST + path: /_query + parameters: [method, path, parameters, capabilities] + capabilities: [enrich_strict_range_types] + reason: "Strict range type checking was added" + test_runner_features: [capabilities, allowed_warnings_regex, warnings_regex] + + - do: + indices.create: + index: ages + body: + settings: + index.number_of_shards: 1 + index.routing.rebalance.enable: "none" + mappings: + properties: + age_range: + type: "integer_range" + description: + type: "keyword" + + - do: + bulk: + index: ages + refresh: true + body: + - { "index": { } } + - { "age_range": { "gte": 0, "lt": 2 }, "description": "Baby" } + - { "index": { } } + - { "age_range": { "gte": 2, "lt": 4 }, "description": "Toddler" } + - { "index": { } } + - { "age_range": { "gte": 3, "lt": 5 }, "description": "Preschooler" } + - { "index": { } } + - { "age_range": { "gte": 5, "lt": 12 }, "description": "Child" } + - { "index": { } } + - { "age_range": { "gte": 13, "lt": 20 }, "description": "Adolescent" } + - { "index": { } } + - { "age_range": { "gte": 20, "lt": 40 }, "description": "Young Adult" } + - { "index": { } } + - { "age_range": { "gte": 40, "lt": 60 }, "description": "Middle-aged" } + - { "index": { } } + - { "age_range": { "gte": 60, "lt": 80 }, "description": "Senior" } + - { "index": { } } + - { "age_range": { "gte": 80, "lt": 100 }, "description": "Elderly" } + - { "index": { } } + - { "age_range": { "gte": 100, "lt": 200 }, "description": "Incredible" } + - do: + cluster.health: + wait_for_no_initializing_shards: true + wait_for_events: languid + + - do: + enrich.put_policy: + name: ages-policy + body: + range: + indices: [ "ages" ] + match_field: "age_range" + enrich_fields: [ "description" ] + + - do: + enrich.execute_policy: + name: ages-policy + + - do: + indices.create: + index: employees + body: + mappings: + properties: + name: + type: keyword + age: + type: integer + ak: + type: keyword + salary: + type: double + + - do: + bulk: + index: employees + refresh: true + body: + - { "index": { } } + - { "name": "Joe Soap", "age": 36, "ak": "36", "salary": 55.55 } + - { "index": { } } + - { "name": "Jane Doe", "age": 31, "ak": "31", "salary": 55.55 } + - { "index": { } } + - { "name": "Jane Immortal", "age": -1, "ak": "immortal", "salary": 55.55 } + - { "index": { } } + - { "name": "Magic Mike", "age": 44, "ak": "44", "salary": 55.55 } + - { "index": { } } + - { "name": "Anon Ymous", "age": 61, "ak": "61", "salary": 55.55 } + +--- +teardown: + - do: + enrich.delete_policy: + name: ages-policy + +--- +"ages": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM employees | ENRICH ages-policy ON age | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ 2, "Young Adult" ] } + - match: { values.1: [ 1, "Middle-aged" ] } + - match: { values.2: [ 1, "Senior" ] } + - match: { values.3: [ 1, null ] } + +--- +"ages as typecast keywords": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + - "Line 1:29: evaluation of \\[ak::integer\\] failed, treating result as null. Only first 20 failures recorded." + - "Line 1:29: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number \\[immortal\\]" + esql.query: + body: + query: 'FROM employees | EVAL aki = ak::integer | ENRICH ages-policy ON aki | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ 2, "Young Adult" ] } + - match: { values.1: [ 1, "Middle-aged" ] } + - match: { values.2: [ 1, "Senior" ] } + - match: { values.3: [ 1, null ] } + +--- +"ages as keywords": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + - "Line (1:18|-1:-1): evaluation of \\[(ENRICH ages-policy ON ak|)\\] failed, treating result as null. Only first 20 failures recorded." + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"immortal\\"' + esql.query: + body: + query: 'FROM employees | ENRICH ages-policy ON ak | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ 2, "Young Adult" ] } + - match: { values.1: [ 1, "Middle-aged" ] } + - match: { values.2: [ 1, "Senior" ] } + - match: { values.3: [ 1, null ] } + +--- +"Invalid age as keyword": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + - "Line (1:18|-1:-1): evaluation of \\[(ENRICH ages-policy ON name|)\\] failed, treating result as null. Only first 20 failures recorded." + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"Joe Soap\\"' + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"Jane Doe\\"' + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"Jane Immortal\\"' + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"Magic Mike\\"' + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"Anon Ymous\\"' + esql.query: + body: + query: 'FROM employees | ENRICH ages-policy ON name | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 1 } + - match: { values.0: [ 5, null ] } + +--- +"Invalid age as double": + - do: + catch: /ENRICH range and input types are incompatible. range\[INTEGER\], input\[DOUBLE\]/ + esql.query: + body: + query: 'FROM employees | ENRICH ages-policy ON salary | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/64_enrich_int_match.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/64_enrich_int_match.yml new file mode 100644 index 0000000000000..ef11e5098f5c2 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/64_enrich_int_match.yml @@ -0,0 +1,222 @@ +--- +setup: + - requires: + capabilities: + - method: POST + path: /_query + parameters: [method, path, parameters, capabilities] + capabilities: [enrich_strict_range_types] + reason: "Strict range type checking was added" + test_runner_features: [capabilities, allowed_warnings_regex, warnings_regex] + + - do: + indices.create: + index: decades + body: + settings: + index.number_of_shards: 1 + index.routing.rebalance.enable: "none" + mappings: + properties: + decade: + type: "integer" + description: + type: "keyword" + + - do: + bulk: + index: decades + refresh: true + body: + - { "index": { } } + - { "decade": 1900, "description": "Gay Nineties" } + - { "index": { } } + - { "decade": 1910, "description": "Teens" } + - { "index": { } } + - { "decade": 1920, "description": "Roaring Twenties" } + - { "index": { } } + - { "decade": 1930, "description": "Dirty Thirties" } + - { "index": { } } + - { "decade": 1940, "description": "War Years" } + - { "index": { } } + - { "decade": 1950, "description": "Fabulous Fifties" } + - { "index": { } } + - { "decade": 1960, "description": "Swinging Sixties" } + - { "index": { } } + - { "decade": 1970, "description": "Me Decade" } + - { "index": { } } + - { "decade": 1980, "description": "Decade of Greed" } + - { "index": { } } + - { "decade": 1990, "description": "Nineties" } + - { "index": { } } + - { "decade": 2000, "description": "Aughts" } + - { "index": { } } + - { "decade": 2010, "description": "Digital Age" } + - { "index": { } } + - { "decade": 2020, "description": "Roaring Twenties 2.0" } + - do: + cluster.health: + wait_for_no_initializing_shards: true + wait_for_events: languid + + - do: + enrich.put_policy: + name: decades-policy + body: + match: + indices: [ "decades" ] + match_field: "decade" + enrich_fields: [ "description" ] + + - do: + enrich.execute_policy: + name: decades-policy + + - do: + indices.create: + index: songs + body: + mappings: + properties: + title: + type: keyword + year: + type: integer + singer: + type: keyword + + - do: + bulk: + index: songs + refresh: true + body: + - { "index": { } } + - { "singer": "Louis Armstrong", "title": "What a Wonderful World", "year": 1967 } + - { "index": { } } + - { "singer": "The Beatles", "title": "Hey Jude", "year": 1968 } + - { "index": { } } + - { "singer": "Elvis Presley", "title": "Jailhouse Rock", "year": 1957 } + - { "index": { } } + - { "singer": "Billie Holiday", "title": "Strange Fruit", "year": 1939 } + - { "index": { } } + - { "singer": "Frank Sinatra", "title": "Fly Me to the Moon", "year": 1964 } + - { "index": { } } + - { "singer": "Bob Dylan", "title": "Blowin' in the Wind", "year": 1963 } + - { "index": { } } + - { "singer": "Queen", "title": "Bohemian Rhapsody", "year": 1975 } + - { "index": { } } + - { "singer": "ABBA", "title": "Dancing Queen", "year": 1976 } + - { "index": { } } + - { "singer": "Michael Jackson", "title": "Thriller", "year": 1982 } + - { "index": { } } + - { "singer": "Nirvana", "title": "Smells Like Teen Spirit", "year": 1991 } + - { "index": { } } + - { "singer": "Whitney Houston", "title": "I Will Always Love You", "year": 1992 } + - { "index": { } } + - { "singer": "Aretha Franklin", "title": "Respect", "year": 1967 } + - { "index": { } } + - { "singer": "Chuck Berry", "title": "Johnny B. Goode", "year": 1958 } + - { "index": { } } + - { "singer": "Madonna", "title": "Like a Prayer", "year": 1989 } + - { "index": { } } + - { "singer": "The Rolling Stones", "title": "(I Can't Get No) Satisfaction", "year": 1965 } + - { "index": { } } + - { "singer": "Beyoncé", "title": "Single Ladies (Put a Ring on It)", "year": 2008 } + - { "index": { } } + - { "singer": "Adele", "title": "Rolling in the Deep", "year": 2010 } + - { "index": { } } + - { "singer": "Lady Gaga", "title": "Bad Romance", "year": 2009 } + - { "index": { } } + - { "singer": "Billie Eilish", "title": "Bad Guy", "year": 2019 } + - { "index": { } } + - { "singer": "Taylor Swift", "title": "Anti-Hero", "year": 2022 } + +--- +teardown: + - do: + enrich.delete_policy: + name: decades-policy + +--- +"decades": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM songs | EVAL decade = (10*FLOOR(year/10))::integer | ENRICH decades-policy ON decade | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 9 } + - match: { values.0: [ 6, "Swinging Sixties" ] } + - match: { values.1: [ 2, "Aughts" ] } + - match: { values.2: [ 2, "Decade of Greed" ] } + - match: { values.3: [ 2, "Digital Age" ] } + - match: { values.4: [ 2, "Fabulous Fifties" ] } + - match: { values.5: [ 2, "Me Decade" ] } + - match: { values.6: [ 2, "Nineties" ] } + - match: { values.7: [ 1, "Dirty Thirties" ] } + - match: { values.8: [ 1, "Roaring Twenties 2.0" ] } + +--- +"decades as typecast keywords": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM songs | EVAL decade = (10*FLOOR(year/10))::keyword | ENRICH decades-policy ON decade | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 9 } + - match: { values.0: [ 6, "Swinging Sixties" ] } + - match: { values.1: [ 2, "Aughts" ] } + - match: { values.2: [ 2, "Decade of Greed" ] } + - match: { values.3: [ 2, "Digital Age" ] } + - match: { values.4: [ 2, "Fabulous Fifties" ] } + - match: { values.5: [ 2, "Me Decade" ] } + - match: { values.6: [ 2, "Nineties" ] } + - match: { values.7: [ 1, "Dirty Thirties" ] } + - match: { values.8: [ 1, "Roaring Twenties 2.0" ] } + +--- +"Invalid decade as keyword": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM songs | ENRICH decades-policy ON singer | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 1 } + - match: { values.0: [ 20, null ] } + +--- +"Invalid decade as double": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM songs | EVAL decade = 10.0*FLOOR(year/10) | ENRICH decades-policy ON decade | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 1 } + - match: { values.0: [ 20, null ] } From b17674b48ab2abd8dd75108e9771e6144d704862 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Tue, 19 Nov 2024 15:42:34 +0000 Subject: [PATCH 046/386] Remove REST historical features (#116929) --- .../FullClusterRestartDownsampleIT.java | 5 - .../upgrades/FullClusterRestartIT.java | 3 - .../LogsIndexModeFullClusterRestartIT.java | 3 - .../upgrades/DesiredNodesUpgradeIT.java | 3 - .../elasticsearch/upgrades/DownsampleIT.java | 5 - .../elasticsearch/upgrades/IndexingIT.java | 6 +- .../upgrades/SnapshotBasedRecoveryIT.java | 7 - .../org/elasticsearch/upgrades/TsdbIT.java | 68 ---------- .../UpgradeWithOldIndexSettingsIT.java | 57 +-------- .../metadata/DataStreamTestHelper.java | 25 ++-- .../test/rest/ESRestTestCase.java | 121 ++++++++---------- .../test/rest/ESRestTestFeatureService.java | 1 - .../test/rest/RestTestLegacyFeatures.java | 117 ----------------- .../test/rest/yaml/section/DoSection.java | 9 +- .../xpack/restart/FullClusterRestartIT.java | 30 +---- ...nfigIndexMappingsFullClusterRestartIT.java | 7 +- .../MlHiddenIndicesFullClusterRestartIT.java | 7 +- .../MlMigrationFullClusterRestartIT.java | 7 +- .../ApiKeyBackwardsCompatibilityIT.java | 72 ++++------- .../MlAssignmentPlannerUpgradeIT.java | 18 +-- .../UpgradeClusterClientYamlTestSuiteIT.java | 8 +- .../xpack/test/rest/XPackRestTestHelper.java | 28 ++-- 22 files changed, 116 insertions(+), 491 deletions(-) delete mode 100644 test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java index 3a983dbd058df..6682d48c1796c 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java @@ -18,7 +18,6 @@ import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; import org.elasticsearch.test.cluster.local.distribution.DistributionType; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.junit.Before; import org.junit.ClassRule; import org.junit.rules.RuleChain; @@ -269,10 +268,6 @@ private String getRollupIndexName() throws IOException { } public void testRollupIndex() throws Exception { - assumeTrue( - "Downsample got many stability improvements in 8.10.0", - oldClusterHasFeature(RestTestLegacyFeatures.TSDB_DOWNSAMPLING_STABLE) - ); if (isRunningAgainstOldCluster()) { createIlmPolicy(); createIndex(); diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 26e4f3146da2f..83bf16a0cc24a 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -41,7 +41,6 @@ import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; @@ -262,7 +261,6 @@ public void testNewReplicas() throws Exception { } public void testSearchTimeSeriesMode() throws Exception { - assumeTrue("indexing time series indices changed in 8.2.0", oldClusterHasFeature(RestTestLegacyFeatures.TSDB_NEW_INDEX_FORMAT)); int numDocs; if (isRunningAgainstOldCluster()) { numDocs = createTimeSeriesModeIndex(1); @@ -300,7 +298,6 @@ public void testSearchTimeSeriesMode() throws Exception { } public void testNewReplicasTimeSeriesMode() throws Exception { - assumeTrue("indexing time series indices changed in 8.2.0", oldClusterHasFeature(RestTestLegacyFeatures.TSDB_NEW_INDEX_FORMAT)); if (isRunningAgainstOldCluster()) { createTimeSeriesModeIndex(0); } else { diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java index f1f4fcf091e8f..3459a29e98649 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java @@ -20,7 +20,6 @@ import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.hamcrest.Matcher; import org.hamcrest.Matchers; import org.junit.ClassRule; @@ -125,8 +124,6 @@ protected ElasticsearchCluster getUpgradeCluster() { }"""; public void testLogsIndexing() throws IOException { - assumeTrue("Test uses data streams", oldClusterHasFeature(RestTestLegacyFeatures.DATA_STREAMS_SUPPORTED)); - if (isRunningAgainstOldCluster()) { assertOK(client().performRequest(putTemplate(client(), "logs-template", STANDARD_TEMPLATE))); assertOK(client().performRequest(createDataStream("logs-apache-production"))); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java index e0d1e7aafa637..eb01d67432fe3 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; @@ -49,8 +48,6 @@ private enum ProcessorsPrecision { } public void testUpgradeDesiredNodes() throws Exception { - assumeTrue("Desired nodes was introduced in 8.1", oldClusterHasFeature(RestTestLegacyFeatures.DESIRED_NODE_API_SUPPORTED)); - if (oldClusterHasFeature(DesiredNode.DOUBLE_PROCESSORS_SUPPORTED)) { assertUpgradedNodesCanReadDesiredNodes(); } else if (oldClusterHasFeature(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED)) { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java index 70658da70eb80..bca0c26ad2c32 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java @@ -15,7 +15,6 @@ import org.elasticsearch.client.Response; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.junit.Before; import java.io.IOException; @@ -244,10 +243,6 @@ private String getRollupIndexName() throws IOException { } public void testRollupIndex() throws Exception { - assumeTrue( - "Downsample got many stability improvements in 8.10.0", - oldClusterHasFeature(RestTestLegacyFeatures.TSDB_DOWNSAMPLING_STABLE) - ); if (isOldCluster()) { createIlmPolicy(); createIndex(); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java index 65bf62783fd69..090f409fd46d0 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java @@ -18,10 +18,10 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.elasticsearch.test.ListMatcher; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; @@ -258,7 +258,6 @@ private void bulk(String index, String valueSuffix, int count) throws IOExceptio public void testTsdb() throws IOException { final Version oldClusterVersion = Version.fromString(getOldClusterVersion()); - assumeTrue("indexing time series indices changed in 8.2.0", oldClusterHasFeature(RestTestLegacyFeatures.TSDB_NEW_INDEX_FORMAT)); StringBuilder bulk = new StringBuilder(); if (isOldCluster()) { @@ -385,6 +384,7 @@ private void tsdbBulk(StringBuilder bulk, String dim, long timeStart, long timeE private void assertTsdbAgg(final Version oldClusterVersion, final List expectedTsids, final Matcher... expected) throws IOException { + @UpdateForV9(owner = UpdateForV9.Owner.SEARCH_ANALYTICS) boolean onOrAfterTsidHashingVersion = oldClusterVersion.onOrAfter(Version.V_8_13_0); Request request = new Request("POST", "/tsdb/_search"); request.addParameter("size", "0"); @@ -414,8 +414,6 @@ private void assertTsdbAgg(final Version oldClusterVersion, final List e } public void testSyntheticSource() throws IOException { - assumeTrue("added in 8.4.0", oldClusterHasFeature(RestTestLegacyFeatures.SYNTHETIC_SOURCE_SUPPORTED)); - if (isOldCluster()) { Request createIndex = new Request("PUT", "/synthetic"); XContentBuilder indexSpec = XContentBuilder.builder(XContentType.JSON.xContent()).startObject(); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java index 3343a683bbd11..9217852f1867c 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java @@ -24,7 +24,6 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -50,12 +49,6 @@ public SnapshotBasedRecoveryIT(@Name("upgradedNodes") int upgradedNodes) { } public void testSnapshotBasedRecovery() throws Exception { - assumeTrue( - "Cancel shard allocation command is broken for initial versions of the desired_balance allocator", - oldClusterHasFeature(RestTestLegacyFeatures.DESIRED_BALANCED_ALLOCATOR_SUPPORTED) == false - || oldClusterHasFeature(RestTestLegacyFeatures.DESIRED_BALANCED_ALLOCATOR_FIXED) - ); - final String indexName = "snapshot_based_recovery"; final String repositoryName = "snapshot_based_recovery_repo"; final int numDocs = 200; diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java index 6744c84f29d0f..46b39128c3a31 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; import org.elasticsearch.test.rest.ObjectPath; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import java.io.IOException; import java.time.Instant; @@ -24,8 +23,6 @@ import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.backingIndexEqualTo; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; public class TsdbIT extends AbstractRollingUpgradeTestCase { @@ -131,7 +128,6 @@ public TsdbIT(@Name("upgradedNodes") int upgradedNodes) { """; public void testTsdbDataStream() throws Exception { - assumeTrue("TSDB was GA-ed in 8.7.0", oldClusterHasFeature(RestTestLegacyFeatures.TSDB_GENERALLY_AVAILABLE)); String dataStreamName = "k8s"; if (isOldCluster()) { final String INDEX_TEMPLATE = """ @@ -155,70 +151,6 @@ public void testTsdbDataStream() throws Exception { } } - public void testTsdbDataStreamWithComponentTemplate() throws Exception { - assumeTrue( - "TSDB was GA-ed in 8.7.0 and bug was fixed in 8.11.0", - oldClusterHasFeature(RestTestLegacyFeatures.TSDB_GENERALLY_AVAILABLE) - && (oldClusterHasFeature(RestTestLegacyFeatures.TSDB_EMPTY_TEMPLATE_FIXED) == false) - ); - String dataStreamName = "template-with-component-template"; - if (isOldCluster()) { - final String COMPONENT_TEMPLATE = """ - { - "template": $TEMPLATE - } - """; - var putComponentTemplate = new Request("POST", "/_component_template/1"); - String template = TEMPLATE.replace("\"time_series\"", "\"time_series\", \"routing_path\": [\"k8s.pod.uid\"]"); - putComponentTemplate.setJsonEntity(COMPONENT_TEMPLATE.replace("$TEMPLATE", template)); - assertOK(client().performRequest(putComponentTemplate)); - final String INDEX_TEMPLATE = """ - { - "index_patterns": ["$PATTERN"], - "composed_of": ["1"], - "data_stream": { - } - }"""; - // Add composable index template - String templateName = "2"; - var putIndexTemplateRequest = new Request("POST", "/_index_template/" + templateName); - putIndexTemplateRequest.setJsonEntity(INDEX_TEMPLATE.replace("$PATTERN", dataStreamName)); - assertOK(client().performRequest(putIndexTemplateRequest)); - - performOldClustertOperations(templateName, dataStreamName); - } else if (isMixedCluster()) { - performMixedClusterOperations(dataStreamName); - } else if (isUpgradedCluster()) { - performUpgradedClusterOperations(dataStreamName); - - var dataStreams = getDataStream(dataStreamName); - assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.name"), equalTo(dataStreamName)); - assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.generation"), equalTo(2)); - String firstBackingIndex = ObjectPath.evaluate(dataStreams, "data_streams.0.indices.0.index_name"); - { - var indices = getIndex(firstBackingIndex); - var escapedBackingIndex = firstBackingIndex.replace(".", "\\."); - assertThat(ObjectPath.evaluate(indices, escapedBackingIndex + ".data_stream"), equalTo(dataStreamName)); - assertThat(ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.mode"), nullValue()); - String startTime = ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.time_series.start_time"); - assertThat(startTime, nullValue()); - String endTime = ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.time_series.end_time"); - assertThat(endTime, nullValue()); - } - String secondBackingIndex = ObjectPath.evaluate(dataStreams, "data_streams.0.indices.1.index_name"); - { - var indices = getIndex(secondBackingIndex); - var escapedBackingIndex = secondBackingIndex.replace(".", "\\."); - assertThat(ObjectPath.evaluate(indices, escapedBackingIndex + ".data_stream"), equalTo(dataStreamName)); - assertThat(ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.mode"), equalTo("time_series")); - String startTime = ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.time_series.start_time"); - assertThat(startTime, notNullValue()); - String endTime = ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.time_series.end_time"); - assertThat(endTime, notNullValue()); - } - } - } - private void performUpgradedClusterOperations(String dataStreamName) throws Exception { ensureGreen(dataStreamName); var rolloverRequest = new Request("POST", "/" + dataStreamName + "/_rollover"); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java index f3a322b54039a..b2298c12b7b98 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java @@ -17,13 +17,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import java.io.IOException; import java.util.Map; import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; -import static org.hamcrest.Matchers.is; public class UpgradeWithOldIndexSettingsIT extends AbstractRollingUpgradeTestCase { @@ -35,33 +33,22 @@ public UpgradeWithOldIndexSettingsIT(@Name("upgradedNodes") int upgradedNodes) { private static final String EXPECTED_WARNING = "[index.indexing.slowlog.level] setting was deprecated in Elasticsearch and will " + "be removed in a future release! See the breaking changes documentation for the next major version."; - private static final String EXPECTED_V8_WARNING = "[index.indexing.slowlog.level] setting was deprecated in the previous Elasticsearch" - + " release and is removed in this release."; - public void testOldIndexSettings() throws Exception { if (isOldCluster()) { Request createTestIndex = new Request("PUT", "/" + INDEX_NAME); createTestIndex.setJsonEntity("{\"settings\": {\"index.indexing.slowlog.level\": \"WARN\"}}"); createTestIndex.setOptions(expectWarnings(EXPECTED_WARNING)); - if (oldClusterHasFeature(RestTestLegacyFeatures.INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED)) { - assertTrue( - expectThrows(ResponseException.class, () -> client().performRequest(createTestIndex)).getMessage() - .contains("unknown setting [index.indexing.slowlog.level]") - ); + assertTrue( + expectThrows(ResponseException.class, () -> client().performRequest(createTestIndex)).getMessage() + .contains("unknown setting [index.indexing.slowlog.level]") + ); - Request createTestIndex1 = new Request("PUT", "/" + INDEX_NAME); - client().performRequest(createTestIndex1); - } else { - // create index with settings no longer valid in 8.0 - client().performRequest(createTestIndex); - } + Request createTestIndex1 = new Request("PUT", "/" + INDEX_NAME); + client().performRequest(createTestIndex1); // add some data Request bulk = new Request("POST", "/_bulk"); bulk.addParameter("refresh", "true"); - if (oldClusterHasFeature(RestTestLegacyFeatures.INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED) == false) { - bulk.setOptions(expectWarnings(EXPECTED_WARNING)); - } bulk.setJsonEntity(Strings.format(""" {"index": {"_index": "%s"}} {"f1": "v1", "f2": "v2"} @@ -71,34 +58,12 @@ public void testOldIndexSettings() throws Exception { // add some more data Request bulk = new Request("POST", "/_bulk"); bulk.addParameter("refresh", "true"); - if (oldClusterHasFeature(RestTestLegacyFeatures.INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED) == false) { - bulk.setOptions(expectWarnings(EXPECTED_WARNING)); - } bulk.setJsonEntity(Strings.format(""" {"index": {"_index": "%s"}} {"f1": "v3", "f2": "v4"} """, INDEX_NAME)); client().performRequest(bulk); } else { - if (oldClusterHasFeature(RestTestLegacyFeatures.INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED) == false) { - Request createTestIndex = new Request("PUT", "/" + INDEX_NAME + "/_settings"); - // update index settings should work - createTestIndex.setJsonEntity("{\"index.indexing.slowlog.level\": \"INFO\"}"); - createTestIndex.setOptions(expectWarnings(EXPECTED_V8_WARNING)); - client().performRequest(createTestIndex); - - // ensure we were able to change the setting, despite it having no effect - Request indexSettingsRequest = new Request("GET", "/" + INDEX_NAME + "/_settings"); - Map response = entityAsMap(client().performRequest(indexSettingsRequest)); - - var slowLogLevel = (String) (XContentMapValues.extractValue( - INDEX_NAME + ".settings.index.indexing.slowlog.level", - response - )); - - // check that we can read our old index settings - assertThat(slowLogLevel, is("INFO")); - } assertCount(INDEX_NAME, 2); } } @@ -118,16 +83,6 @@ private void assertCount(String index, int countAtLeast) throws IOException { public static void updateIndexSettingsPermittingSlowlogDeprecationWarning(String index, Settings.Builder settings) throws IOException { Request request = new Request("PUT", "/" + index + "/_settings"); request.setJsonEntity(org.elasticsearch.common.Strings.toString(settings.build())); - if (oldClusterHasFeature(RestTestLegacyFeatures.DEPRECATION_WARNINGS_LEAK_FIXED) == false) { - // There is a bug (fixed in 7.17.9 and 8.7.0 where deprecation warnings could leak into ClusterApplierService#applyChanges) - // Below warnings are set (and leaking) from an index in this test case - request.setOptions(expectVersionSpecificWarnings(v -> { - v.compatible( - "[index.indexing.slowlog.level] setting was deprecated in Elasticsearch and will be removed in a future release! " - + "See the breaking changes documentation for the next major version." - ); - })); - } client().performRequest(request); } } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index 5ca52024e82f6..add110de35a0b 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -157,23 +157,14 @@ public static DataStream newInstance( .build(); } - public static String getLegacyDefaultBackingIndexName( - String dataStreamName, - long generation, - long epochMillis, - boolean isNewIndexNameFormat - ) { - if (isNewIndexNameFormat) { - return String.format( - Locale.ROOT, - BACKING_INDEX_PREFIX + "%s-%s-%06d", - dataStreamName, - DATE_FORMATTER.formatMillis(epochMillis), - generation - ); - } else { - return getLegacyDefaultBackingIndexName(dataStreamName, generation); - } + public static String getLegacyDefaultBackingIndexName(String dataStreamName, long generation, long epochMillis) { + return String.format( + Locale.ROOT, + BACKING_INDEX_PREFIX + "%s-%s-%06d", + dataStreamName, + DATE_FORMATTER.formatMillis(epochMillis), + generation + ); } public static String getLegacyDefaultBackingIndexName(String dataStreamName, long generation) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 28c9905386091..c20aded9280fc 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -415,8 +415,7 @@ protected final TestFeatureService createTestFeatureService( logger.warn( "This test is running on the legacy test framework; historical features from production code will not be available. " + "You need to port the test to the new test plugins in order to use historical features from production code. " - + "If this is a legacy feature used only in tests, you can add it to a test-only FeatureSpecification such as {}.", - RestTestLegacyFeatures.class.getCanonicalName() + + "If this is a legacy feature used only in tests, you can add it to a test-only FeatureSpecification." ); } return new ESRestTestFeatureService(additionalTestOnlyHistoricalFeatures(), semanticNodeVersions, clusterStateFeatures.values()); @@ -719,10 +718,6 @@ protected boolean preserveTemplatesUponCompletion() { * all feature states, deleting system indices, system associated indices, and system data streams. */ protected boolean resetFeatureStates() { - // ML reset fails when ML is disabled in versions before 8.7 - if (isMlEnabled() == false && clusterHasFeature(RestTestLegacyFeatures.ML_STATE_RESET_FALLBACK_ON_DISABLED) == false) { - return false; - } return true; } @@ -917,50 +912,46 @@ private void wipeCluster() throws Exception { * slows down the test because xpack will just recreate * them. */ - // In case of bwc testing, we need to delete component and composable - // index templates only for clusters that support this historical feature - if (clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED)) { - try { - Request getTemplatesRequest = new Request("GET", "_index_template"); - Map composableIndexTemplates = XContentHelper.convertToMap( - JsonXContent.jsonXContent, - EntityUtils.toString(adminClient().performRequest(getTemplatesRequest).getEntity()), - false - ); - List names = ((List) composableIndexTemplates.get("index_templates")).stream() - .map(ct -> (String) ((Map) ct).get("name")) - .filter(name -> isXPackTemplate(name) == false) - .collect(Collectors.toList()); - if (names.isEmpty() == false) { - try { - adminClient().performRequest(new Request("DELETE", "_index_template/" + String.join(",", names))); - } catch (ResponseException e) { - logger.warn(() -> format("unable to remove multiple composable index templates %s", names), e); - } + try { + Request getTemplatesRequest = new Request("GET", "_index_template"); + Map composableIndexTemplates = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + EntityUtils.toString(adminClient().performRequest(getTemplatesRequest).getEntity()), + false + ); + List names = ((List) composableIndexTemplates.get("index_templates")).stream() + .map(ct -> (String) ((Map) ct).get("name")) + .filter(name -> isXPackTemplate(name) == false) + .collect(Collectors.toList()); + if (names.isEmpty() == false) { + try { + adminClient().performRequest(new Request("DELETE", "_index_template/" + String.join(",", names))); + } catch (ResponseException e) { + logger.warn(() -> format("unable to remove multiple composable index templates %s", names), e); } - } catch (Exception e) { - logger.debug("ignoring exception removing all composable index templates", e); - // We hit a version of ES that doesn't support index templates v2 yet, so it's safe to ignore } - try { - Request compReq = new Request("GET", "_component_template"); - String componentTemplates = EntityUtils.toString(adminClient().performRequest(compReq).getEntity()); - Map cTemplates = XContentHelper.convertToMap(JsonXContent.jsonXContent, componentTemplates, false); - List names = ((List) cTemplates.get("component_templates")).stream() - .map(ct -> (String) ((Map) ct).get("name")) - .filter(name -> isXPackTemplate(name) == false) - .collect(Collectors.toList()); - if (names.isEmpty() == false) { - try { - adminClient().performRequest(new Request("DELETE", "_component_template/" + String.join(",", names))); - } catch (ResponseException e) { - logger.warn(() -> format("unable to remove multiple component templates %s", names), e); - } + } catch (Exception e) { + logger.debug("ignoring exception removing all composable index templates", e); + // We hit a version of ES that doesn't support index templates v2 yet, so it's safe to ignore + } + try { + Request compReq = new Request("GET", "_component_template"); + String componentTemplates = EntityUtils.toString(adminClient().performRequest(compReq).getEntity()); + Map cTemplates = XContentHelper.convertToMap(JsonXContent.jsonXContent, componentTemplates, false); + List names = ((List) cTemplates.get("component_templates")).stream() + .map(ct -> (String) ((Map) ct).get("name")) + .filter(name -> isXPackTemplate(name) == false) + .collect(Collectors.toList()); + if (names.isEmpty() == false) { + try { + adminClient().performRequest(new Request("DELETE", "_component_template/" + String.join(",", names))); + } catch (ResponseException e) { + logger.warn(() -> format("unable to remove multiple component templates %s", names), e); } - } catch (Exception e) { - logger.debug("ignoring exception removing all component templates", e); - // We hit a version of ES that doesn't support index templates v2 yet, so it's safe to ignore } + } catch (Exception e) { + logger.debug("ignoring exception removing all component templates", e); + // We hit a version of ES that doesn't support index templates v2 yet, so it's safe to ignore } if (has(ProductFeature.LEGACY_TEMPLATES)) { @@ -1058,29 +1049,25 @@ private Set getAllUnexpectedTemplates() throws IOException { Set unexpectedTemplates = new HashSet<>(); if (preserveDataStreamsUponCompletion() == false && preserveTemplatesUponCompletion() == false) { if (has(ProductFeature.XPACK)) { - // In case of bwc testing, we need to delete component and composable - // index templates only for clusters that support this historical feature - if (clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED)) { - Request getTemplatesRequest = new Request("GET", "_index_template"); - Map composableIndexTemplates = XContentHelper.convertToMap( - JsonXContent.jsonXContent, - EntityUtils.toString(adminClient().performRequest(getTemplatesRequest).getEntity()), - false - ); - unexpectedTemplates.addAll( - ((List) composableIndexTemplates.get("index_templates")).stream() - .map(ct -> (String) ((Map) ct).get("name")) - .filter(name -> isXPackTemplate(name) == false) - .collect(Collectors.toSet()) - ); - Request compReq = new Request("GET", "_component_template"); - String componentTemplates = EntityUtils.toString(adminClient().performRequest(compReq).getEntity()); - Map cTemplates = XContentHelper.convertToMap(JsonXContent.jsonXContent, componentTemplates, false); - ((List) cTemplates.get("component_templates")).stream() + Request getTemplatesRequest = new Request("GET", "_index_template"); + Map composableIndexTemplates = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + EntityUtils.toString(adminClient().performRequest(getTemplatesRequest).getEntity()), + false + ); + unexpectedTemplates.addAll( + ((List) composableIndexTemplates.get("index_templates")).stream() .map(ct -> (String) ((Map) ct).get("name")) .filter(name -> isXPackTemplate(name) == false) - .forEach(unexpectedTemplates::add); - } + .collect(Collectors.toSet()) + ); + Request compReq = new Request("GET", "_component_template"); + String componentTemplates = EntityUtils.toString(adminClient().performRequest(compReq).getEntity()); + Map cTemplates = XContentHelper.convertToMap(JsonXContent.jsonXContent, componentTemplates, false); + ((List) cTemplates.get("component_templates")).stream() + .map(ct -> (String) ((Map) ct).get("name")) + .filter(name -> isXPackTemplate(name) == false) + .forEach(unexpectedTemplates::add); if (has(ProductFeature.LEGACY_TEMPLATES)) { Request getLegacyTemplatesRequest = new Request("GET", "_template"); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java index 66c24f157ddfe..cd3406e7ddac5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java @@ -55,7 +55,6 @@ class ESRestTestFeatureService implements TestFeatureService { ESRestTestFeatureService(List featureSpecs, Set nodeVersions, Collection> nodeFeatures) { List specs = new ArrayList<>(featureSpecs); - specs.add(new RestTestLegacyFeatures()); if (MetadataHolder.HISTORICAL_FEATURES != null) { specs.add(MetadataHolder.HISTORICAL_FEATURES); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java deleted file mode 100644 index 5a228e2540007..0000000000000 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.test.rest; - -import org.elasticsearch.Version; -import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Map; - -import static java.util.Map.entry; - -/** - * This class groups historical features that have been removed from the production codebase, but are still used by the test - * framework to support BwC tests. Rather than leaving them in the main src we group them here, so it's clear they are not used in - * production code anymore. - */ -public class RestTestLegacyFeatures implements FeatureSpecification { - public static final NodeFeature ML_STATE_RESET_FALLBACK_ON_DISABLED = new NodeFeature("ml.state_reset_fallback_on_disabled"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature COMPONENT_TEMPLATE_SUPPORTED = new NodeFeature("indices.component_template_supported"); - public static final NodeFeature ML_NEW_MEMORY_FORMAT = new NodeFeature("ml.new_memory_format"); - - // Ref: https://github.com/elastic/elasticsearch/pull/86416 - public static final NodeFeature ML_MEMORY_OVERHEAD_FIXED = new NodeFeature("ml.memory_overhead_fixed"); - - // QA - rolling upgrade tests - public static final NodeFeature DESIRED_NODE_API_SUPPORTED = new NodeFeature("desired_node_supported"); - public static final NodeFeature SECURITY_UPDATE_API_KEY = new NodeFeature("security.api_key_update"); - public static final NodeFeature SECURITY_BULK_UPDATE_API_KEY = new NodeFeature("security.api_key_bulk_update"); - - public static final NodeFeature TSDB_NEW_INDEX_FORMAT = new NodeFeature("indices.tsdb_new_format"); - public static final NodeFeature TSDB_GENERALLY_AVAILABLE = new NodeFeature("indices.tsdb_supported"); - - public static final NodeFeature TSDB_DOWNSAMPLING_STABLE = new NodeFeature("indices.tsdb_downsampling_stable"); - - /* - * A composable index template with no template defined in the body is mistakenly always assumed to not be a time series template. - * Fixed in #98840 - */ - public static final NodeFeature TSDB_EMPTY_TEMPLATE_FIXED = new NodeFeature("indices.tsdb_empty_composable_template_fixed"); - public static final NodeFeature SYNTHETIC_SOURCE_SUPPORTED = new NodeFeature("indices.synthetic_source"); - - public static final NodeFeature DESIRED_BALANCED_ALLOCATOR_SUPPORTED = new NodeFeature("allocator.desired_balance"); - - /* - * Cancel shard allocation command is broken for initial desired balance versions - * and might allocate shard on the node where it is not supposed to be. This - * is fixed by https://github.com/elastic/elasticsearch/pull/93635. - */ - public static final NodeFeature DESIRED_BALANCED_ALLOCATOR_FIXED = new NodeFeature("allocator.desired_balance_fixed"); - public static final NodeFeature INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED = new NodeFeature("settings.indexing_slowlog_level_removed"); - public static final NodeFeature DEPRECATION_WARNINGS_LEAK_FIXED = new NodeFeature("deprecation_warnings_leak_fixed"); - - // QA - Full cluster restart - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature REPLICATION_OF_CLOSED_INDICES = new NodeFeature("indices.closed_replication_supported"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature SOFT_DELETES_ENFORCED = new NodeFeature("indices.soft_deletes_enforced"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature NEW_TRANSPORT_COMPRESSED_SETTING = new NodeFeature("transport.new_compressed_setting"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature SERVICE_ACCOUNTS_SUPPORTED = new NodeFeature("auth.service_accounts_supported"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature TRANSFORM_SUPPORTED = new NodeFeature("transform.supported"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature SLM_SUPPORTED = new NodeFeature("slm.supported"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature DATA_STREAMS_SUPPORTED = new NodeFeature("data_stream.supported"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature NEW_DATA_STREAMS_INDEX_NAME_FORMAT = new NodeFeature("data_stream.new_index_name_format"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature ML_NLP_SUPPORTED = new NodeFeature("ml.nlp_supported"); - - // YAML - public static final NodeFeature REST_ELASTIC_PRODUCT_HEADER_PRESENT = new NodeFeature("action.rest.product_header_present"); - - @Override - public Map getHistoricalFeatures() { - return Map.ofEntries( - entry(COMPONENT_TEMPLATE_SUPPORTED, Version.V_7_8_0), - entry(ML_STATE_RESET_FALLBACK_ON_DISABLED, Version.V_8_7_0), - entry(SECURITY_UPDATE_API_KEY, Version.V_8_4_0), - entry(SECURITY_BULK_UPDATE_API_KEY, Version.V_8_5_0), - entry(ML_NEW_MEMORY_FORMAT, Version.V_8_11_0), - entry(ML_MEMORY_OVERHEAD_FIXED, Version.V_8_2_1), - entry(REST_ELASTIC_PRODUCT_HEADER_PRESENT, Version.V_8_0_1), - entry(DESIRED_NODE_API_SUPPORTED, Version.V_8_1_0), - entry(TSDB_NEW_INDEX_FORMAT, Version.V_8_2_0), - entry(SYNTHETIC_SOURCE_SUPPORTED, Version.V_8_4_0), - entry(DESIRED_BALANCED_ALLOCATOR_SUPPORTED, Version.V_8_6_0), - entry(DESIRED_BALANCED_ALLOCATOR_FIXED, Version.V_8_7_1), - entry(TSDB_GENERALLY_AVAILABLE, Version.V_8_7_0), - entry(TSDB_DOWNSAMPLING_STABLE, Version.V_8_10_0), - entry(TSDB_EMPTY_TEMPLATE_FIXED, Version.V_8_11_0), - entry(INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED, Version.V_8_0_0), - entry(DEPRECATION_WARNINGS_LEAK_FIXED, Version.V_7_17_9), - entry(REPLICATION_OF_CLOSED_INDICES, Version.V_7_2_0), - entry(SOFT_DELETES_ENFORCED, Version.V_8_0_0), - entry(NEW_TRANSPORT_COMPRESSED_SETTING, Version.V_7_14_0), - entry(SERVICE_ACCOUNTS_SUPPORTED, Version.V_7_13_0), - entry(TRANSFORM_SUPPORTED, Version.V_7_2_0), - entry(SLM_SUPPORTED, Version.V_7_4_0), - entry(DATA_STREAMS_SUPPORTED, Version.V_7_9_0), - entry(NEW_DATA_STREAMS_INDEX_NAME_FORMAT, Version.V_7_11_0), - entry(ML_NLP_SUPPORTED, Version.V_8_0_0) - ); - } -} diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index 86c3f42a6a8ec..8243dcdc9de94 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -21,7 +21,6 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponseException; @@ -371,13 +370,7 @@ public void execute(ClientYamlTestExecutionContext executionContext) throws IOEx ? executionContext.getClientYamlTestCandidate().getTestPath() : null; - var fixedProductionHeader = executionContext.clusterHasFeature( - RestTestLegacyFeatures.REST_ELASTIC_PRODUCT_HEADER_PRESENT.id(), - false - ); - if (fixedProductionHeader) { - checkElasticProductHeader(response.getHeaders("X-elastic-product")); - } + checkElasticProductHeader(response.getHeaders("X-elastic-product")); checkWarningHeaders(response.getWarningHeaders(), testPath); } catch (ClientYamlTestResponseException e) { checkResponseException(e, executionContext); diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 3cdd968fcc2e7..fe4c1c20c69c4 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -23,12 +23,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.test.StreamsUtils; import org.elasticsearch.test.rest.ESRestTestCase; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xcontent.ObjectPath; import org.elasticsearch.xcontent.XContentBuilder; @@ -292,10 +290,6 @@ public void testWatcherWithApiKey() throws Exception { } public void testServiceAccountApiKey() throws IOException { - @UpdateForV9(owner = UpdateForV9.Owner.SECURITY) - var originalClusterSupportsServiceAccounts = oldClusterHasFeature(RestTestLegacyFeatures.SERVICE_ACCOUNTS_SUPPORTED); - assumeTrue("no service accounts in versions before 7.13", originalClusterSupportsServiceAccounts); - if (isRunningAgainstOldCluster()) { final Request createServiceTokenRequest = new Request("POST", "/_security/service/elastic/fleet-server/credential/token"); final Response createServiceTokenResponse = client().performRequest(createServiceTokenRequest); @@ -481,10 +475,6 @@ public void testRollupAfterRestart() throws Exception { } public void testTransformLegacyTemplateCleanup() throws Exception { - @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) - var originalClusterSupportsTransform = oldClusterHasFeature(RestTestLegacyFeatures.TRANSFORM_SUPPORTED); - assumeTrue("Before 7.2 transforms didn't exist", originalClusterSupportsTransform); - if (isRunningAgainstOldCluster()) { // create the source index @@ -559,9 +549,6 @@ public void testTransformLegacyTemplateCleanup() throws Exception { } public void testSlmPolicyAndStats() throws IOException { - @UpdateForV9(owner = UpdateForV9.Owner.DATA_MANAGEMENT) - var originalClusterSupportsSlm = oldClusterHasFeature(RestTestLegacyFeatures.SLM_SUPPORTED); - SnapshotLifecyclePolicy slmPolicy = new SnapshotLifecyclePolicy( "test-policy", "test-policy", @@ -570,7 +557,7 @@ public void testSlmPolicyAndStats() throws IOException { Collections.singletonMap("indices", Collections.singletonList("*")), null ); - if (isRunningAgainstOldCluster() && originalClusterSupportsSlm) { + if (isRunningAgainstOldCluster()) { Request createRepoRequest = new Request("PUT", "_snapshot/test-repo"); String repoCreateJson = "{" + " \"type\": \"fs\"," + " \"settings\": {" + " \"location\": \"test-repo\"" + " }" + "}"; createRepoRequest.setJsonEntity(repoCreateJson); @@ -584,7 +571,7 @@ public void testSlmPolicyAndStats() throws IOException { client().performRequest(createSlmPolicyRequest); } - if (isRunningAgainstOldCluster() == false && originalClusterSupportsSlm) { + if (isRunningAgainstOldCluster() == false) { Request getSlmPolicyRequest = new Request("GET", "_slm/policy/test-policy"); Response response = client().performRequest(getSlmPolicyRequest); Map responseMap = entityAsMap(response); @@ -911,14 +898,6 @@ private void waitForRollUpJob(final String rollupJob, final Matcher expectedS @SuppressWarnings("unchecked") public void testDataStreams() throws Exception { - - @UpdateForV9(owner = UpdateForV9.Owner.DATA_MANAGEMENT) - var originalClusterSupportsDataStreams = oldClusterHasFeature(RestTestLegacyFeatures.DATA_STREAMS_SUPPORTED); - - @UpdateForV9(owner = UpdateForV9.Owner.DATA_MANAGEMENT) - var originalClusterDataStreamHasDateInIndexName = oldClusterHasFeature(RestTestLegacyFeatures.NEW_DATA_STREAMS_INDEX_NAME_FORMAT); - - assumeTrue("no data streams in versions before 7.9.0", originalClusterSupportsDataStreams); if (isRunningAgainstOldCluster()) { createComposableTemplate(client(), "dst", "ds"); @@ -955,10 +934,7 @@ public void testDataStreams() throws Exception { List> indices = (List>) ds.get("indices"); assertEquals("ds", ds.get("name")); assertEquals(1, indices.size()); - assertEquals( - DataStreamTestHelper.getLegacyDefaultBackingIndexName("ds", 1, timestamp, originalClusterDataStreamHasDateInIndexName), - indices.get(0).get("index_name") - ); + assertEquals(DataStreamTestHelper.getLegacyDefaultBackingIndexName("ds", 1, timestamp), indices.get(0).get("index_name")); assertNumHits("ds", 1, 1); } diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java index c825de31a7f6e..91820299da8a5 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xpack.test.rest.IndexMappingTemplateAsserter; import org.elasticsearch.xpack.test.rest.XPackRestTestConstants; @@ -48,11 +47,7 @@ protected Settings restClientSettings() { public void waitForMlTemplates() throws Exception { // We shouldn't wait for ML templates during the upgrade - production won't if (isRunningAgainstOldCluster()) { - XPackRestTestHelper.waitForTemplates( - client(), - XPackRestTestConstants.ML_POST_V7120_TEMPLATES, - clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED) - ); + XPackRestTestHelper.waitForTemplates(client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES); } } diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java index 7dc0a2f48bbc9..a83ad5b4f8da4 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; import org.elasticsearch.core.Tuple; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -64,11 +63,7 @@ protected Settings restClientSettings() { public void waitForMlTemplates() throws Exception { // We shouldn't wait for ML templates during the upgrade - production won't if (isRunningAgainstOldCluster()) { - XPackRestTestHelper.waitForTemplates( - client(), - XPackRestTestConstants.ML_POST_V7120_TEMPLATES, - clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED) - ); + XPackRestTestHelper.waitForTemplates(client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES); } } diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java index 0b15e98f201a0..74f62fac26488 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java @@ -18,7 +18,6 @@ import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -58,11 +57,7 @@ protected Settings restClientSettings() { public void waitForMlTemplates() throws Exception { // We shouldn't wait for ML templates during the upgrade - production won't if (isRunningAgainstOldCluster()) { - XPackRestTestHelper.waitForTemplates( - client(), - XPackRestTestConstants.ML_POST_V7120_TEMPLATES, - clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED) - ); + XPackRestTestHelper.waitForTemplates(client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES); } } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java index 8a775c7f7d3d8..25e8aed73bda2 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java @@ -21,7 +21,6 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.test.rest.ObjectPath; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.transport.RemoteClusterPortSettings; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -118,20 +117,18 @@ public void testCreatingAndUpdatingApiKeys() throws Exception { ); RestClient client = client(); - if (isUpdateApiSupported(client)) { - var updateException = expectThrows( - Exception.class, - () -> updateOrBulkUpdateApiKey(client, apiKey.v1(), randomRoleDescriptors(true)) - ); + var updateException = expectThrows( + Exception.class, + () -> updateOrBulkUpdateApiKey(client, apiKey.v1(), randomRoleDescriptors(true)) + ); - assertThat( - updateException.getMessage(), - anyOf( - containsString("failed to parse role [my_role]. unexpected field [remote_indices]"), - containsString("remote indices not supported for API keys") - ) - ); - } + assertThat( + updateException.getMessage(), + anyOf( + containsString("failed to parse role [my_role]. unexpected field [remote_indices]"), + containsString("remote indices not supported for API keys") + ) + ); } case MIXED -> { try { @@ -145,20 +142,18 @@ public void testCreatingAndUpdatingApiKeys() throws Exception { // fail when remote_indices are provided: // against old node - if (isUpdateApiSupported(oldVersionClient)) { - Exception e = expectThrows( - Exception.class, - () -> updateOrBulkUpdateApiKey(oldVersionClient, apiKey.v1(), randomRoleDescriptors(true)) - ); - assertThat( - e.getMessage(), - anyOf( - containsString("failed to parse role [my_role]. unexpected field [remote_indices]"), - containsString("remote indices not supported for API keys") - ) - ); - } - Exception e = expectThrows(Exception.class, () -> createOrGrantApiKey(oldVersionClient, randomRoleDescriptors(true))); + Exception e = expectThrows( + Exception.class, + () -> updateOrBulkUpdateApiKey(oldVersionClient, apiKey.v1(), randomRoleDescriptors(true)) + ); + assertThat( + e.getMessage(), + anyOf( + containsString("failed to parse role [my_role]. unexpected field [remote_indices]"), + containsString("remote indices not supported for API keys") + ) + ); + e = expectThrows(Exception.class, () -> createOrGrantApiKey(oldVersionClient, randomRoleDescriptors(true))); assertThat( e.getMessage(), anyOf( @@ -263,28 +258,9 @@ private void updateOrBulkUpdateApiKey(String id, String roles) throws IOExceptio updateOrBulkUpdateApiKey(client(), id, roles); } - private boolean isUpdateApiSupported(RestClient client) { - return switch (CLUSTER_TYPE) { - case OLD -> clusterHasFeature(RestTestLegacyFeatures.SECURITY_UPDATE_API_KEY); // Update API was introduced in 8.4.0. - case MIXED -> clusterHasFeature(RestTestLegacyFeatures.SECURITY_UPDATE_API_KEY) || client == newVersionClient; - case UPGRADED -> true; - }; - } - - private boolean isBulkUpdateApiSupported(RestClient client) { - return switch (CLUSTER_TYPE) { - case OLD -> clusterHasFeature(RestTestLegacyFeatures.SECURITY_BULK_UPDATE_API_KEY); // Bulk update API was introduced in 8.5.0. - case MIXED -> clusterHasFeature(RestTestLegacyFeatures.SECURITY_BULK_UPDATE_API_KEY) || client == newVersionClient; - case UPGRADED -> true; - }; - } - private void updateOrBulkUpdateApiKey(RestClient client, String id, String roles) throws IOException { - if (false == isUpdateApiSupported(client)) { - return; // Update API is not supported. - } final Request updateApiKeyRequest; - final boolean bulkUpdate = randomBoolean() && isBulkUpdateApiSupported(client); + final boolean bulkUpdate = randomBoolean(); if (bulkUpdate) { updateApiKeyRequest = new Request("POST", "_security/api_key/_bulk_update"); updateApiKeyRequest.setJsonEntity(org.elasticsearch.common.Strings.format(""" diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java index 74165eeb07b8a..aa166311f6465 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java @@ -15,7 +15,6 @@ import org.elasticsearch.core.Strings; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; @@ -83,11 +82,7 @@ public void testMlAssignmentPlannerUpgrade() throws Exception { // assert correct memory format is used assertOldMemoryFormat("old_memory_format"); - if (clusterHasFeature(RestTestLegacyFeatures.ML_NEW_MEMORY_FORMAT)) { - assertNewMemoryFormat("new_memory_format"); - } else { - assertOldMemoryFormat("new_memory_format"); - } + assertNewMemoryFormat("new_memory_format"); } case MIXED -> { ensureHealth(".ml-inference-*,.ml-config*", (request -> { @@ -99,12 +94,7 @@ public void testMlAssignmentPlannerUpgrade() throws Exception { // assert correct memory format is used assertOldMemoryFormat("old_memory_format"); - if (clusterHasFeature(RestTestLegacyFeatures.ML_NEW_MEMORY_FORMAT)) { - assertNewMemoryFormat("new_memory_format"); - } else { - assertOldMemoryFormat("new_memory_format"); - } - + assertNewMemoryFormat("new_memory_format"); } case UPGRADED -> { ensureHealth(".ml-inference-*,.ml-config*", (request -> { @@ -137,14 +127,12 @@ private void waitForDeploymentStarted(String modelId) throws Exception { @SuppressWarnings("unchecked") private void assertOldMemoryFormat(String modelId) throws Exception { - // There was a change in the MEMORY_OVERHEAD value in 8.3.0, see #86416 - long memoryOverheadMb = clusterHasFeature(RestTestLegacyFeatures.ML_MEMORY_OVERHEAD_FIXED) ? 240 : 270; var response = getTrainedModelStats(modelId); Map map = entityAsMap(response); List> stats = (List>) map.get("trained_model_stats"); assertThat(stats, hasSize(1)); var stat = stats.get(0); - Long expectedMemoryUsage = ByteSizeValue.ofMb(memoryOverheadMb).getBytes() + RAW_MODEL_SIZE * 2; + Long expectedMemoryUsage = ByteSizeValue.ofMb(240).getBytes() + RAW_MODEL_SIZE * 2; Integer actualMemoryUsage = (Integer) XContentMapValues.extractValue("model_size_stats.required_native_memory_bytes", stat); assertThat( Strings.format("Memory usage mismatch for the model %s in cluster state %s", modelId, CLUSTER_TYPE.toString()), diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java index d7d2676163851..0c57baad1a09b 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java @@ -27,7 +27,6 @@ import java.util.Map; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.test.rest.RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -43,12 +42,7 @@ public class UpgradeClusterClientYamlTestSuiteIT extends ESClientYamlSuiteTestCa public void waitForTemplates() throws Exception { if (AbstractUpgradeTestCase.CLUSTER_TYPE == AbstractUpgradeTestCase.ClusterType.OLD) { try { - boolean clusterUnderstandsComposableTemplates = clusterHasFeature(COMPONENT_TEMPLATE_SUPPORTED); - XPackRestTestHelper.waitForTemplates( - client(), - XPackRestTestConstants.ML_POST_V7120_TEMPLATES, - clusterUnderstandsComposableTemplates - ); + XPackRestTestHelper.waitForTemplates(client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES); } catch (AssertionError e) { throw new AssertionError("Failure in test setup: Failed to initialize ML index templates", e); } diff --git a/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java b/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java index 31b74b8706877..88b01defb1a86 100644 --- a/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java +++ b/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.xcontent.json.JsonXContent; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; @@ -34,32 +33,27 @@ private XPackRestTestHelper() {} * @throws InterruptedException If the wait is interrupted */ @SuppressWarnings("unchecked") - public static void waitForTemplates(RestClient client, List expectedTemplates, boolean clusterUnderstandsComposableTemplates) - throws Exception { + public static void waitForTemplates(RestClient client, List expectedTemplates) throws Exception { // TODO: legacy support can be removed once all X-Pack plugins use only composable // templates in the oldest version we test upgrades from assertBusy(() -> { Map response; - if (clusterUnderstandsComposableTemplates) { - final Request request = new Request("GET", "_index_template"); - request.addParameter("error_trace", "true"); + final Request request = new Request("GET", "_index_template"); + request.addParameter("error_trace", "true"); - String string = EntityUtils.toString(client.performRequest(request).getEntity()); - List> templateList = (List>) XContentHelper.convertToMap( - JsonXContent.jsonXContent, - string, - false - ).get("index_templates"); - response = templateList.stream().collect(Collectors.toMap(m -> (String) m.get("name"), m -> m.get("index_template"))); - } else { - response = Collections.emptyMap(); - } + String string = EntityUtils.toString(client.performRequest(request).getEntity()); + List> templateList = (List>) XContentHelper.convertToMap( + JsonXContent.jsonXContent, + string, + false + ).get("index_templates"); + response = templateList.stream().collect(Collectors.toMap(m -> (String) m.get("name"), m -> m.get("index_template"))); final Set templates = new TreeSet<>(response.keySet()); final Request legacyRequest = new Request("GET", "_template"); legacyRequest.addParameter("error_trace", "true"); - String string = EntityUtils.toString(client.performRequest(legacyRequest).getEntity()); + string = EntityUtils.toString(client.performRequest(legacyRequest).getEntity()); Map legacyResponse = XContentHelper.convertToMap(JsonXContent.jsonXContent, string, false); final Set legacyTemplates = new TreeSet<>(legacyResponse.keySet()); From c699af2c67ca302e39ee8b1dbd14f1cc50470017 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Tue, 19 Nov 2024 18:27:05 +0100 Subject: [PATCH 047/386] [DOCS] Rename how-to subsection, move recipes to search relevance (#117044) --- docs/reference/how-to.asciidoc | 26 +++++++++---------- docs/reference/how-to/recipes.asciidoc | 4 +-- .../reference/how-to/recipes/scoring.asciidoc | 5 ++-- .../search-your-data.asciidoc | 4 ++- 4 files changed, 20 insertions(+), 19 deletions(-) diff --git a/docs/reference/how-to.asciidoc b/docs/reference/how-to.asciidoc index ec514eb05be29..eeac2fe5c2f50 100644 --- a/docs/reference/how-to.asciidoc +++ b/docs/reference/how-to.asciidoc @@ -1,23 +1,21 @@ [[how-to]] -= How to += Optimizations -[partintro] --- -Elasticsearch ships with defaults which are intended to give a good out of -the box experience. Full text search, highlighting, aggregations, and indexing -should all just work without the user having to change anything. +Elasticsearch's default settings provide a good out-of-box experience for basic operations like full text search, highlighting, aggregations, and indexing. -Once you better understand how you want to use Elasticsearch, however, -there are a number of optimizations you can make to improve performance -for your use case. +However, there are a number of optimizations you can make to improve performance for your use case. -This section provides guidance about which changes should and shouldn't be -made. --- +This section provides recommendations for various use cases. -include::how-to/general.asciidoc[] +* <> +* <> +* <> +* <> +* <> +* <> +* <> -include::how-to/recipes.asciidoc[] +include::how-to/general.asciidoc[] include::how-to/indexing-speed.asciidoc[] diff --git a/docs/reference/how-to/recipes.asciidoc b/docs/reference/how-to/recipes.asciidoc index b46f624aef51d..de23404be6164 100644 --- a/docs/reference/how-to/recipes.asciidoc +++ b/docs/reference/how-to/recipes.asciidoc @@ -1,7 +1,7 @@ [[recipes]] -== Recipes +== Search relevance optimizations -This section includes a few recipes to help with common problems: +This section includes a few recipes to help with common search relevance issues: * <> * <> diff --git a/docs/reference/how-to/recipes/scoring.asciidoc b/docs/reference/how-to/recipes/scoring.asciidoc index 5c5a8977d34d4..a578826e31fac 100644 --- a/docs/reference/how-to/recipes/scoring.asciidoc +++ b/docs/reference/how-to/recipes/scoring.asciidoc @@ -88,8 +88,9 @@ pages independently of the query. There are two main queries that allow combining static score contributions with textual relevance, eg. as computed with BM25: - - <> - - <> + +* <> +* <> For instance imagine that you have a `pagerank` field that you wish to combine with the BM25 score so that the final score is equal to diff --git a/docs/reference/search/search-your-data/search-your-data.asciidoc b/docs/reference/search/search-your-data/search-your-data.asciidoc index 82541412db4bd..9ef1ae0ebc59b 100644 --- a/docs/reference/search/search-your-data/search-your-data.asciidoc +++ b/docs/reference/search/search-your-data/search-your-data.asciidoc @@ -43,10 +43,12 @@ DSL, with a simplified user experience. Create search applications based on your results directly in the Kibana Search UI. include::search-api.asciidoc[] +include::../../how-to/recipes.asciidoc[] +// ☝️ search relevance recipes include::retrievers-overview.asciidoc[] include::knn-search.asciidoc[] include::semantic-search.asciidoc[] include::search-across-clusters.asciidoc[] include::search-with-synonyms.asciidoc[] include::search-application-overview.asciidoc[] -include::behavioral-analytics/behavioral-analytics-overview.asciidoc[] +include::behavioral-analytics/behavioral-analytics-overview.asciidoc[] \ No newline at end of file From 5124aefe08847f9742eadac0723421eedd1e0ac1 Mon Sep 17 00:00:00 2001 From: Pawan Kartik Date: Tue, 19 Nov 2024 18:39:56 +0000 Subject: [PATCH 048/386] Add tests for RCS2:ES|QL to verify behaviour for disconnected clusters (#116847) * Add tests for RCS2:ES|QL to verify behaviour for disconnected clusters * Address some review comments --- ...ssClusterEsqlRCS2UnavailableRemotesIT.java | 316 ++++++++++++++++++ 1 file changed, 316 insertions(+) create mode 100644 x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java new file mode 100644 index 0000000000000..52cd0655fbfdf --- /dev/null +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java @@ -0,0 +1,316 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.remotecluster; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.is; + +public class CrossClusterEsqlRCS2UnavailableRemotesIT extends AbstractRemoteClusterSecurityTestCase { + private static final AtomicReference> API_KEY_MAP_REF = new AtomicReference<>(); + + static { + fulfillingCluster = ElasticsearchCluster.local() + .name("fulfilling-cluster") + .nodes(1) + .module("x-pack-esql") + .apply(commonClusterConfig) + .setting("remote_cluster.port", "0") + .setting("xpack.ml.enabled", "false") + .setting("xpack.security.remote_cluster_server.ssl.enabled", "true") + .setting("xpack.security.remote_cluster_server.ssl.key", "remote-cluster.key") + .setting("xpack.security.remote_cluster_server.ssl.certificate", "remote-cluster.crt") + .setting("xpack.security.authc.token.enabled", "true") + .keystore("xpack.security.remote_cluster_server.ssl.secure_key_passphrase", "remote-cluster-password") + .node(0, spec -> spec.setting("remote_cluster_server.enabled", "true")) + .build(); + + queryCluster = ElasticsearchCluster.local() + .name("query-cluster") + .module("x-pack-esql") + .apply(commonClusterConfig) + .setting("xpack.ml.enabled", "false") + .setting("xpack.security.remote_cluster_client.ssl.enabled", "true") + .setting("xpack.security.remote_cluster_client.ssl.certificate_authorities", "remote-cluster-ca.crt") + .setting("xpack.security.authc.token.enabled", "true") + .keystore("cluster.remote.my_remote_cluster.credentials", () -> { + if (API_KEY_MAP_REF.get() == null) { + final Map apiKeyMap = createCrossClusterAccessApiKey(""" + { + "search": [ + { + "names": ["*"] + } + ] + }"""); + API_KEY_MAP_REF.set(apiKeyMap); + } + return (String) API_KEY_MAP_REF.get().get("encoded"); + }) + .rolesFile(Resource.fromClasspath("roles.yml")) + .user(REMOTE_METRIC_USER, PASS.toString(), "read_remote_shared_metrics", false) + .build(); + } + + @ClassRule + public static TestRule clusterRule = RuleChain.outerRule(fulfillingCluster).around(queryCluster); + + @Before + public void setupPreRequisites() throws Exception { + setupRolesAndPrivileges(); + loadData(); + } + + public void testEsqlRcs2UnavailableRemoteScenarios() throws Exception { + clusterShutDownWithRandomSkipUnavailable(); + remoteClusterShutdownWithSkipUnavailableTrue(); + remoteClusterShutdownWithSkipUnavailableFalse(); + } + + private void clusterShutDownWithRandomSkipUnavailable() throws Exception { + // skip_unavailable is set to a random boolean value. + // However, no clusters are stopped. Hence, we do not expect any other behaviour + // other than a 200-OK. + + configureRemoteCluster("my_remote_cluster", fulfillingCluster, false, randomBoolean(), randomBoolean()); + String query = "FROM *,my_remote_cluster:* | LIMIT 10"; + Response response = performRequestWithRemoteSearchUser(esqlRequest(query)); + + Map map = responseAsMap(response); + ArrayList columns = (ArrayList) map.get("columns"); + ArrayList values = (ArrayList) map.get("values"); + Map clusters = (Map) map.get("_clusters"); + Map clusterDetails = (Map) clusters.get("details"); + Map localClusterDetails = (Map) clusterDetails.get("(local)"); + Map remoteClusterDetails = (Map) clusterDetails.get("my_remote_cluster"); + + assertOK(response); + assertThat((int) map.get("took"), greaterThan(0)); + assertThat(columns.size(), is(4)); + assertThat(values.size(), is(9)); + + assertThat((int) clusters.get("total"), is(2)); + assertThat((int) clusters.get("successful"), is(2)); + assertThat((int) clusters.get("running"), is(0)); + assertThat((int) clusters.get("skipped"), is(0)); + assertThat((int) clusters.get("partial"), is(0)); + assertThat((int) clusters.get("failed"), is(0)); + + assertThat(clusterDetails.size(), is(2)); + assertThat((int) localClusterDetails.get("took"), greaterThan(0)); + assertThat(localClusterDetails.get("status"), is("successful")); + + assertThat((int) remoteClusterDetails.get("took"), greaterThan(0)); + assertThat(remoteClusterDetails.get("status"), is("successful")); + } + + @SuppressWarnings("unchecked") + private void remoteClusterShutdownWithSkipUnavailableTrue() throws Exception { + // Remote cluster is stopped and skip unavailable is set to true. + // We expect no exception and partial results from the remaining open cluster. + + configureRemoteCluster("my_remote_cluster", fulfillingCluster, false, randomBoolean(), true); + + try { + // Stop remote cluster. + fulfillingCluster.stop(true); + + // A simple query that targets our remote cluster. + String query = "FROM *,my_remote_cluster:* | LIMIT 10"; + Response response = performRequestWithRemoteSearchUser(esqlRequest(query)); + + Map map = responseAsMap(response); + ArrayList columns = (ArrayList) map.get("columns"); + ArrayList values = (ArrayList) map.get("values"); + Map clusters = (Map) map.get("_clusters"); + Map clusterDetails = (Map) clusters.get("details"); + Map localClusterDetails = (Map) clusterDetails.get("(local)"); + Map remoteClusterDetails = (Map) clusterDetails.get("my_remote_cluster"); + + // Assert results obtained from the local cluster and that remote cluster was + // skipped. + assertOK(response); + assertThat((int) map.get("took"), greaterThan(0)); + + assertThat(columns.size(), is(2)); + assertThat(values.size(), is(5)); + + assertThat((int) clusters.get("total"), is(2)); + assertThat((int) clusters.get("successful"), is(1)); + assertThat((int) clusters.get("skipped"), is(1)); + assertThat((int) clusters.get("running"), is(0)); + assertThat((int) clusters.get("partial"), is(0)); + assertThat((int) clusters.get("failed"), is(0)); + + assertThat(clusterDetails.size(), is(2)); + assertThat((int) localClusterDetails.get("took"), greaterThan(0)); + assertThat(localClusterDetails.get("status"), is("successful")); + + assertThat((int) remoteClusterDetails.get("took"), greaterThan(0)); + assertThat(remoteClusterDetails.get("status"), is("skipped")); + + ArrayList remoteClusterFailures = (ArrayList) remoteClusterDetails.get("failures"); + assertThat(remoteClusterFailures.size(), equalTo(1)); + Map failuresMap = (Map) remoteClusterFailures.get(0); + + Map reason = (Map) failuresMap.get("reason"); + assertThat(reason.get("type").toString(), equalTo("connect_transport_exception")); + assertThat(reason.get("reason").toString(), containsString("Unable to connect to [my_remote_cluster]")); + } finally { + fulfillingCluster.start(); + closeFulfillingClusterClient(); + initFulfillingClusterClient(); + } + } + + private void remoteClusterShutdownWithSkipUnavailableFalse() throws Exception { + // Remote cluster is stopped and skip_unavailable is set to false. + // Although the other cluster is open, we expect an Exception. + + configureRemoteCluster("my_remote_cluster", fulfillingCluster, false, randomBoolean(), false); + + try { + // Stop remote cluster. + fulfillingCluster.stop(true); + + // A simple query that targets our remote cluster. + String query = "FROM *,my_remote_cluster:* | LIMIT 10"; + ResponseException ex = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(esqlRequest(query))); + assertThat(ex.getMessage(), containsString("connect_transport_exception")); + } finally { + fulfillingCluster.start(); + closeFulfillingClusterClient(); + initFulfillingClusterClient(); + } + } + + private void setupRolesAndPrivileges() throws IOException { + var putUserRequest = new Request("PUT", "/_security/user/" + REMOTE_SEARCH_USER); + putUserRequest.setJsonEntity(""" + { + "password": "x-pack-test-password", + "roles" : ["remote_search"] + }"""); + assertOK(adminClient().performRequest(putUserRequest)); + + var putRoleOnRemoteClusterRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); + putRoleOnRemoteClusterRequest.setJsonEntity(""" + { + "indices": [ + { + "names": ["task", "hits"], + "privileges": ["read", "read_cross_cluster", "create_index", "monitor"] + } + ], + "remote_indices": [ + { + "names": ["task", "hits"], + "privileges": ["read", "read_cross_cluster", "create_index", "monitor"], + "clusters": ["*"] + } + ] + }"""); + assertOK(adminClient().performRequest(putRoleOnRemoteClusterRequest)); + } + + private void loadData() throws IOException { + Request createIndex = new Request("PUT", "task"); + createIndex.setJsonEntity(""" + { + "mappings": { + "properties": { + "id": { "type": "integer" }, + "time_taken_millis": { "type": "integer" } + } + } + } + """); + assertOK(client().performRequest(createIndex)); + + Request bulkRequest = new Request("POST", "/_bulk?refresh=true"); + bulkRequest.setJsonEntity(""" + { "index": { "_index": "task" } } + { "id": 1, "time_taken_millis": 39} + { "index": { "_index": "task" } } + { "id": 2, "time_taken_millis": 25} + { "index": { "_index": "task" } } + { "id": 3, "time_taken_millis": 42} + { "index": { "_index": "task" } } + { "id": 4, "time_taken_millis": 16} + { "index": { "_index": "task" } } + { "id": 5, "time_taken_millis": 62} + """); + assertOK(client().performRequest(bulkRequest)); + + createIndex = new Request("PUT", "hits"); + createIndex.setJsonEntity(""" + { + "mappings": { + "properties": { + "endpoint_id": { "type": "integer" }, + "t_hits": { "type": "integer" } + } + } + } + """); + assertOK(performRequestAgainstFulfillingCluster(createIndex)); + + bulkRequest = new Request("POST", "/_bulk?refresh=true"); + bulkRequest.setJsonEntity(""" + { "index": {"_index": "hits"}} + { "endpoint_id": 1, "t_hits": 1267 } + { "index": {"_index": "hits"}} + { "endpoint_id": 2, "t_hits": 1389 } + { "index": {"_index": "hits"}} + { "endpoint_id": 3, "t_hits": 1922 } + { "index": {"_index": "hits"}} + { "endpoint_id": 4, "t_hits": 1547 } + """); + assertOK(performRequestAgainstFulfillingCluster(bulkRequest)); + } + + private Response performRequestWithRemoteSearchUser(final Request request) throws IOException { + request.setOptions( + RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", headerFromRandomAuthMethod(REMOTE_SEARCH_USER, PASS)) + ); + return client().performRequest(request); + } + + private Request esqlRequest(String query) throws IOException { + XContentBuilder body = JsonXContent.contentBuilder(); + + body.startObject(); + body.field("query", query); + body.field("include_ccs_metadata", true); + body.endObject(); + + Request request = new Request("POST", "_query"); + request.setJsonEntity(org.elasticsearch.common.Strings.toString(body)); + + return request; + } +} From bcd690f853332a88ae5dff4ad0769e9ff0f6fb11 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 19 Nov 2024 11:19:36 -0800 Subject: [PATCH 049/386] Update forbidden apis to 3.8 (#117047) --- gradle/build.versions.toml | 2 +- gradle/verification-metadata.xml | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/gradle/build.versions.toml b/gradle/build.versions.toml index 1bdd93e3a7470..05fda8e0244de 100644 --- a/gradle/build.versions.toml +++ b/gradle/build.versions.toml @@ -16,7 +16,7 @@ checkstyle = "com.puppycrawl.tools:checkstyle:10.3" commons-codec = "commons-codec:commons-codec:1.11" commmons-io = "commons-io:commons-io:2.2" docker-compose = "com.avast.gradle:gradle-docker-compose-plugin:0.17.5" -forbiddenApis = "de.thetaphi:forbiddenapis:3.6" +forbiddenApis = "de.thetaphi:forbiddenapis:3.8" gradle-enterprise = "com.gradle:develocity-gradle-plugin:3.18.1" hamcrest = "org.hamcrest:hamcrest:2.1" httpcore = "org.apache.httpcomponents:httpcore:4.4.12" diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 5e874b52fc4c6..3f56071f6f495 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -1336,6 +1336,11 @@ + + + + + From 04ca5059527b0cefcfb8416fd944dd9372a758d7 Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Tue, 19 Nov 2024 21:07:54 +0100 Subject: [PATCH 050/386] Fix LogsDB plugin initialisation of `cluster.logsdb.enabled` (#116825) --- .../org/elasticsearch/datastreams/AbstractDataStreamIT.java | 1 + .../java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java | 5 +++++ 2 files changed, 6 insertions(+) diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AbstractDataStreamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AbstractDataStreamIT.java index 01c63be448e62..2dc6fd84fdfe7 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AbstractDataStreamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AbstractDataStreamIT.java @@ -45,6 +45,7 @@ public abstract class AbstractDataStreamIT extends ESRestTestCase { // tests such as testIgnoreDynamicBeyondLimit. .setting("xpack.apm_data.enabled", "false") .setting("xpack.otel_data.registry.enabled", "false") + .setting("cluster.logsdb.enabled", "false") .build(); protected RestClient client; diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java index 0eb0754985c94..93ba126e4196f 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java @@ -48,6 +48,11 @@ public LogsDBPlugin(Settings settings) { public Collection createComponents(PluginServices services) { licenseService.setLicenseState(XPackPlugin.getSharedLicenseState()); var clusterSettings = services.clusterService().getClusterSettings(); + // The `cluster.logsdb.enabled` setting is registered by this plugin, but its value may be updated by other plugins + // before this plugin registers its settings update consumer below. This means we might miss updates that occurred earlier. + // To handle this, we explicitly fetch the current `cluster.logsdb.enabled` setting value from the cluster settings + // and update it, ensuring we capture any prior changes. + logsdbIndexModeSettingsProvider.updateClusterIndexModeLogsdbEnabled(clusterSettings.get(CLUSTER_LOGSDB_ENABLED)); clusterSettings.addSettingsUpdateConsumer(FALLBACK_SETTING, licenseService::setSyntheticSourceFallback); clusterSettings.addSettingsUpdateConsumer( CLUSTER_LOGSDB_ENABLED, From 1e3398fb043f1b0a570c525a21df60c2f37e532b Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 19 Nov 2024 12:16:43 -0800 Subject: [PATCH 051/386] Make IDE setup lenient to support composite builds (#117052) We had originally made this strict to ensure refactored projects would result in an error but this doesn't work well for composite builds so we're making this lenient again. --- build-tools-internal/src/main/groovy/elasticsearch.ide.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index 5640409e0ff44..431d51d6c1275 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -145,7 +145,7 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { doLast { ['main', 'test'].each { sourceSet -> - modifyXml(".idea/modules/libs/native/elasticsearch.libs.${project.project(':libs:native').name}.${sourceSet}.iml") { xml -> + modifyXml(".idea/modules/libs/native/elasticsearch.libs.native.${sourceSet}.iml") { xml -> xml.component.find { it.'@name' == 'NewModuleRootManager' }?.'@LANGUAGE_LEVEL' = 'JDK_21_PREVIEW' } } From f9c5bc0b069acd194cdf74a4fc1e81daf1fcd31f Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Tue, 19 Nov 2024 15:18:31 -0500 Subject: [PATCH 052/386] Remove legacy params from range query (#116970) Remove to, from, include_lower, include_upper range query params. These params have been removed from our documentation in v. 0.90.4 (d6ecdec), and got deprecated in 8.16 in #113286. --- docs/changelog/116970.yaml | 11 ++++++++ modules/rank-eval/build.gradle | 4 --- modules/runtime-fields-common/build.gradle | 4 --- muted-tests.yml | 3 --- rest-api-spec/build.gradle | 1 + .../test/search/500_date_range.yml | 26 ------------------- .../index/query/RangeQueryBuilder.java | 14 +--------- x-pack/plugin/build.gradle | 1 - 8 files changed, 13 insertions(+), 51 deletions(-) create mode 100644 docs/changelog/116970.yaml diff --git a/docs/changelog/116970.yaml b/docs/changelog/116970.yaml new file mode 100644 index 0000000000000..66de673dfb53c --- /dev/null +++ b/docs/changelog/116970.yaml @@ -0,0 +1,11 @@ +pr: 116970 +summary: Remove legacy params from range query +area: Search +type: breaking +issues: [] +breaking: + title: Remove legacy params from range query + area: REST API + details: The deprecated range query parameters `to`, `from`, `include_lower`, and `include_upper` are no longer supported. + impact: Users should use `lt`, `lte`, `gt`, and `gte` query parameters instead. + notable: false diff --git a/modules/rank-eval/build.gradle b/modules/rank-eval/build.gradle index 511dd7be9ae68..c9016798c18b9 100644 --- a/modules/rank-eval/build.gradle +++ b/modules/rank-eval/build.gradle @@ -25,7 +25,3 @@ testClusters.configureEach { // Modules who's integration is explicitly tested in integration tests module ':modules:lang-mustache' } - -tasks.named("yamlRestCompatTestTransform").configure({ task -> - task.skipTest("rank_eval/30_failures/Response format", "warning does not exist for compatibility") -}) diff --git a/modules/runtime-fields-common/build.gradle b/modules/runtime-fields-common/build.gradle index e743939cbf79e..e8e06f0a9c4c7 100644 --- a/modules/runtime-fields-common/build.gradle +++ b/modules/runtime-fields-common/build.gradle @@ -22,7 +22,3 @@ dependencies { api project(':libs:grok') api project(':libs:dissect') } - -tasks.named("yamlRestCompatTestTransform").configure({ task -> - task.skipTestsByFilePattern("**/runtime_fields/110_composite.yml", "warning does not exist for compatibility") -}) diff --git a/muted-tests.yml b/muted-tests.yml index 7083767d0451e..bff2727c56e67 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -67,9 +67,6 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=mtermvectors/10_basic/Tests catching other exceptions per item} issue: https://github.com/elastic/elasticsearch/issues/113325 -- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT - method: test {p0=search/500_date_range/from, to, include_lower, include_upper deprecated} - issue: https://github.com/elastic/elasticsearch/pull/113286 - class: org.elasticsearch.integration.KibanaUserRoleIntegTests method: testFieldMappings issue: https://github.com/elastic/elasticsearch/issues/113592 diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 8e1df37804708..439960228cef6 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -59,4 +59,5 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling with vector_operations_count") task.skipTest("cat.aliases/10_basic/Deprecated local parameter", "CAT APIs not covered by compatibility policy") task.skipTest("cat.shards/10_basic/Help", "sync_id is removed in 9.0") + task.skipTest("search/500_date_range/from, to, include_lower, include_upper deprecated", "deprecated parameters are removed in 9.0") }) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/500_date_range.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/500_date_range.yml index e9bfffb8da604..76057b5a364fb 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/500_date_range.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/500_date_range.yml @@ -123,29 +123,3 @@ setup: - match: { hits.total: 1 } - length: { hits.hits: 1 } - match: { hits.hits.0._id: "4" } - - ---- -"from, to, include_lower, include_upper deprecated": - - requires: - cluster_features: "gte_v8.16.0" - reason: 'from, to, include_lower, include_upper parameters are deprecated since 8.16.0' - test_runner_features: warnings - - - do: - warnings: - - "Deprecated field [from] used, this field is unused and will be removed entirely" - - "Deprecated field [to] used, this field is unused and will be removed entirely" - - "Deprecated field [include_lower] used, this field is unused and will be removed entirely" - - "Deprecated field [include_upper] used, this field is unused and will be removed entirely" - search: - index: dates - body: - sort: field - query: - range: - date: - from: 1000 - to: 2023 - include_lower: false - include_upper: false diff --git a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java index f1081d06d649d..9f6a2be8cdbc7 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java @@ -45,10 +45,6 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i public static final ParseField LTE_FIELD = new ParseField("lte"); public static final ParseField GTE_FIELD = new ParseField("gte"); - public static final ParseField FROM_FIELD = new ParseField("from").withAllDeprecated(); - public static final ParseField TO_FIELD = new ParseField("to").withAllDeprecated(); - private static final ParseField INCLUDE_LOWER_FIELD = new ParseField("include_lower").withAllDeprecated(); - private static final ParseField INCLUDE_UPPER_FIELD = new ParseField("include_upper").withAllDeprecated(); public static final ParseField GT_FIELD = new ParseField("gt"); public static final ParseField LT_FIELD = new ParseField("lt"); private static final ParseField TIME_ZONE_FIELD = new ParseField("time_zone"); @@ -367,15 +363,7 @@ public static RangeQueryBuilder fromXContent(XContentParser parser) throws IOExc if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else { - if (FROM_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - from = maybeConvertToBytesRef(parser.objectBytes()); - } else if (TO_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - to = maybeConvertToBytesRef(parser.objectBytes()); - } else if (INCLUDE_LOWER_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - includeLower = parser.booleanValue(); - } else if (INCLUDE_UPPER_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - includeUpper = parser.booleanValue(); - } else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { boost = parser.floatValue(); } else if (GT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { from = maybeConvertToBytesRef(parser.objectBytes()); diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index b13f7903bc8b5..48b1d478ddf94 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -81,7 +81,6 @@ tasks.named("precommit").configure { } tasks.named("yamlRestCompatTestTransform").configure({ task -> - task.skipTest("security/10_forbidden/Test bulk response with invalid credentials", "warning does not exist for compatibility") task.skipTest("esql/60_usage/Basic ESQL usage output (telemetry)", "The telemetry output changed. We dropped a column. That's safe.") task.skipTest("inference/inference_crud/Test get all", "Assertions on number of inference models break due to default configs") task.skipTest("esql/60_usage/Basic ESQL usage output (telemetry) snapshot version", "The number of functions is constantly increasing") From 2416c63551458cac70575722667c9bfc743c8b1d Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Tue, 19 Nov 2024 15:24:49 -0500 Subject: [PATCH 053/386] add missing capabilities checks (#117072) This should fix a couple of tests failures due to incorrect capabilities checks. I already added them manually to 8.x to resolve a failure there (see #116602) so this doesn't need to be backported. --- .../qa/testFixtures/src/main/resources/union_types.csv-spec | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec index a2fd3f3d5e0da..0f4c3ef6ef9e9 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec @@ -734,6 +734,8 @@ count:long | @timestamp:date Multi Index millis to nanos stats required_capability: union_types required_capability: union_types_remove_fields +required_capability: to_date_nanos +required_capability: date_trunc_date_nanos FROM sample_data, sample_data_ts_nanos | EVAL @timestamp = DATE_TRUNC(1 hour, TO_DATE_NANOS(@timestamp)) From c3ea73f51ad2fd3f58e483c712de6f7a256e876e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 08:08:43 +1100 Subject: [PATCH 054/386] Mute org.elasticsearch.xpack.test.rest.XPackRestIT test {p0=snapshot/10_basic/Failed to snapshot indices with synthetic source} #117082 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index bff2727c56e67..75345f61bf5b1 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -242,6 +242,9 @@ tests: - class: org.elasticsearch.upgrades.QueryBuilderBWCIT method: testQueryBuilderBWC {cluster=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/116990 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=snapshot/10_basic/Failed to snapshot indices with synthetic source} + issue: https://github.com/elastic/elasticsearch/issues/117082 # Examples: # From ad13a7ffc815c9d994d3dd8b9eba2d8d145963f7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 08:23:04 +1100 Subject: [PATCH 055/386] Mute org.elasticsearch.upgrades.FullClusterRestartDownsampleIT testRollupIndex {cluster=OLD} #117084 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 75345f61bf5b1..0301720b508e9 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -245,6 +245,9 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=snapshot/10_basic/Failed to snapshot indices with synthetic source} issue: https://github.com/elastic/elasticsearch/issues/117082 +- class: org.elasticsearch.upgrades.FullClusterRestartDownsampleIT + method: testRollupIndex {cluster=OLD} + issue: https://github.com/elastic/elasticsearch/issues/117084 # Examples: # From de219fc91a84ad0f47b1d30f6807b0b6c6890757 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 08:23:43 +1100 Subject: [PATCH 056/386] Mute org.elasticsearch.upgrades.FullClusterRestartDownsampleIT testRollupIndex {cluster=UPGRADED} #117086 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 0301720b508e9..fb3a0ac76ca22 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -248,6 +248,9 @@ tests: - class: org.elasticsearch.upgrades.FullClusterRestartDownsampleIT method: testRollupIndex {cluster=OLD} issue: https://github.com/elastic/elasticsearch/issues/117084 +- class: org.elasticsearch.upgrades.FullClusterRestartDownsampleIT + method: testRollupIndex {cluster=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/117086 # Examples: # From 0b74492a30f3e9c5bd366fa2c82fa637c87f5974 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Tue, 19 Nov 2024 22:24:04 +0100 Subject: [PATCH 057/386] ESQL: Refactor named writeable entries for expressions and plans (#117029) Move writable declarations outside the core classes to avoid errors (such as subClass.getNamedWritable()) and centralize them in a top package class for better management. Make all touched serialization code the same as in the initial PR, except for the fact that LookupJoin/LookupJoinExec are missing. Co-authored-by: Costin Leau --- .../xpack/esql/core/expression/Attribute.java | 6 - .../esql/core/expression/Expression.java | 10 - .../expression/ExpressionCoreWritables.java | 48 ++++ .../esql/core/expression/NamedExpression.java | 10 - .../predicate/fulltext/FullTextPredicate.java | 5 - .../org/elasticsearch/compute/data/Block.java | 16 -- .../compute/data/BlockWritables.java | 28 +++ .../compute/data/SerializationTestCase.java | 2 +- .../exchange/ExchangeServiceTests.java | 4 +- .../esql/expression/ExpressionWritables.java | 213 ++++++++++++++++++ .../function/aggregate/AggregateFunction.java | 22 -- .../aggregate/AggregateWritables.java | 37 +++ .../function/aggregate/package-info.java | 2 +- .../function/fulltext/FullTextFunction.java | 4 - .../function/fulltext/FullTextWritables.java | 28 +++ .../function/grouping/GroupingWritables.java | 19 ++ .../function/scalar/EsqlScalarFunction.java | 96 -------- .../scalar/ScalarFunctionWritables.java | 99 ++++++++ .../function/scalar/UnaryScalarFunction.java | 110 --------- .../AbstractMultivalueFunction.java | 23 -- .../multivalue/MvFunctionWritables.java | 36 +++ .../function/scalar/package-info.java | 2 +- .../scalar/spatial/BinarySpatialFunction.java | 5 - .../arithmetic/EsqlArithmeticOperation.java | 5 - .../comparison/EsqlBinaryComparison.java | 5 - .../xpack/esql/plan/PlanWritables.java | 115 ++++++++++ .../xpack/esql/plan/logical/LogicalPlan.java | 28 --- .../esql/plan/physical/PhysicalPlan.java | 29 --- .../xpack/esql/plugin/EsqlPlugin.java | 29 +-- .../xpack/esql/SerializationTestUtils.java | 24 +- .../action/EsqlQueryResponseProfileTests.java | 4 +- .../esql/action/EsqlQueryResponseTests.java | 3 +- .../AbstractExpressionSerializationTests.java | 21 +- .../xpack/esql/expression/AliasTests.java | 11 +- .../function/AbstractAttributeTestCase.java | 7 +- .../esql/io/stream/PlanStreamOutputTests.java | 8 +- ...AbstractLogicalPlanSerializationTests.java | 18 +- .../logical/local/LocalSupplierTests.java | 3 +- ...bstractPhysicalPlanSerializationTests.java | 20 +- .../ConfigurationSerializationTests.java | 4 +- .../esql/type/MultiTypeEsFieldTests.java | 8 +- 41 files changed, 674 insertions(+), 493 deletions(-) create mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ExpressionCoreWritables.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockWritables.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/ExpressionWritables.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/GroupingWritables.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFunctionWritables.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java index 45f42a754910d..53debedafc3d8 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.esql.core.expression; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -34,11 +33,6 @@ public abstract class Attribute extends NamedExpression { */ protected static final String SYNTHETIC_ATTRIBUTE_NAME_PREFIX = "$$"; - public static List getNamedWriteables() { - // TODO add UnsupportedAttribute when these are moved to the same project - return List.of(FieldAttribute.ENTRY, MetadataAttribute.ENTRY, ReferenceAttribute.ENTRY); - } - // can the attr be null - typically used in JOINs private final Nullability nullability; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java index 0be7f65d767c7..00765a8c0528c 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.esql.core.expression; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.capabilities.Resolvable; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; @@ -15,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.StringUtils; -import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; @@ -29,14 +27,6 @@ * (which is a type of expression) with a single child, c. */ public abstract class Expression extends Node implements Resolvable { - public static List getNamedWriteables() { - List entries = new ArrayList<>(); - for (NamedWriteableRegistry.Entry e : NamedExpression.getNamedWriteables()) { - entries.add(new NamedWriteableRegistry.Entry(Expression.class, e.name, in -> (NamedExpression) e.reader.read(in))); - } - entries.add(Literal.ENTRY); - return entries; - } public static class TypeResolution { private final boolean failed; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ExpressionCoreWritables.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ExpressionCoreWritables.java new file mode 100644 index 0000000000000..174a0321a3057 --- /dev/null +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ExpressionCoreWritables.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.core.expression; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; + +import java.util.ArrayList; +import java.util.List; + +public class ExpressionCoreWritables { + + public static List getNamedWriteables() { + List entries = new ArrayList<>(); + entries.addAll(expressions()); + entries.addAll(namedExpressions()); + entries.addAll(attributes()); + return entries; + } + + public static List expressions() { + List entries = new ArrayList<>(); + // add entries as expressions + for (NamedWriteableRegistry.Entry e : namedExpressions()) { + entries.add(new NamedWriteableRegistry.Entry(Expression.class, e.name, in -> (Expression) e.reader.read(in))); + } + entries.add(Literal.ENTRY); + return entries; + } + + public static List namedExpressions() { + List entries = new ArrayList<>(); + // add entries as named writeables + for (NamedWriteableRegistry.Entry e : attributes()) { + entries.add(new NamedWriteableRegistry.Entry(NamedExpression.class, e.name, in -> (NamedExpression) e.reader.read(in))); + } + entries.add(Alias.ENTRY); + return entries; + } + + public static List attributes() { + return List.of(FieldAttribute.ENTRY, MetadataAttribute.ENTRY, ReferenceAttribute.ENTRY); + } +} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java index 3b018f09e5ebd..f425fc7110a41 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java @@ -7,11 +7,9 @@ package org.elasticsearch.xpack.esql.core.expression; import org.elasticsearch.common.io.stream.NamedWriteable; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.Source; -import java.util.ArrayList; import java.util.List; import java.util.Objects; @@ -20,14 +18,6 @@ * (by converting to an attribute). */ public abstract class NamedExpression extends Expression implements NamedWriteable { - public static List getNamedWriteables() { - List entries = new ArrayList<>(); - for (NamedWriteableRegistry.Entry e : Attribute.getNamedWriteables()) { - entries.add(new NamedWriteableRegistry.Entry(NamedExpression.class, e.name, in -> (NamedExpression) e.reader.read(in))); - } - entries.add(Alias.ENTRY); - return entries; - } private final String name; private final NameId id; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/FullTextPredicate.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/FullTextPredicate.java index 29a567e83211d..b23593804f8fe 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/FullTextPredicate.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/FullTextPredicate.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.esql.core.expression.predicate.fulltext; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; @@ -23,10 +22,6 @@ public abstract class FullTextPredicate extends Expression { - public static List getNamedWriteables() { - return List.of(MatchQueryPredicate.ENTRY, MultiMatchQueryPredicate.ENTRY, StringQueryPredicate.ENTRY); - } - public enum Operator { AND, OR; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 9e48bc13cdafa..1e06cf1ea4450 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -10,7 +10,6 @@ import org.apache.lucene.util.Accountable; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.NamedWriteable; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; @@ -18,8 +17,6 @@ import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.BlockLoader; -import java.util.List; - /** * A Block is a columnar representation of homogenous data. It has a position (row) count, and * various data retrieval methods for accessing the underlying data that is stored at a given @@ -291,19 +288,6 @@ static Block[] buildAll(Block.Builder... builders) { } } - static List getNamedWriteables() { - return List.of( - IntBlock.ENTRY, - LongBlock.ENTRY, - FloatBlock.ENTRY, - DoubleBlock.ENTRY, - BytesRefBlock.ENTRY, - BooleanBlock.ENTRY, - ConstantNullBlock.ENTRY, - CompositeBlock.ENTRY - ); - } - /** * Serialization type for blocks: 0 and 1 replace false/true used in pre-8.14 */ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockWritables.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockWritables.java new file mode 100644 index 0000000000000..ff9139e57e52e --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockWritables.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; + +import java.util.List; + +public class BlockWritables { + + public static List getNamedWriteables() { + return List.of( + IntBlock.ENTRY, + LongBlock.ENTRY, + FloatBlock.ENTRY, + DoubleBlock.ENTRY, + BytesRefBlock.ENTRY, + BooleanBlock.ENTRY, + ConstantNullBlock.ENTRY, + CompositeBlock.ENTRY + ); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java index e72c34fdb5f7a..d76e58d1c8a30 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java @@ -29,7 +29,7 @@ public abstract class SerializationTestCase extends ESTestCase { BigArrays bigArrays; protected BlockFactory blockFactory; - NamedWriteableRegistry registry = new NamedWriteableRegistry(Block.getNamedWriteables()); + NamedWriteableRegistry registry = new NamedWriteableRegistry(BlockWritables.getNamedWriteables()); @Before public final void newBlockFactory() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index 9e07f9c8f5faf..0b1ecce8c375b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -22,8 +22,8 @@ import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.compute.data.Page; @@ -457,7 +457,7 @@ public void sendResponse(TransportResponse transportResponse) { private MockTransportService newTransportService() { List namedWriteables = new ArrayList<>(ClusterModule.getNamedWriteables()); - namedWriteables.addAll(Block.getNamedWriteables()); + namedWriteables.addAll(BlockWritables.getNamedWriteables()); NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(namedWriteables); MockTransportService service = MockTransportService.createNewService( Settings.EMPTY, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/ExpressionWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/ExpressionWritables.java new file mode 100644 index 0000000000000..7e2de0094c2ab --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/ExpressionWritables.java @@ -0,0 +1,213 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.xpack.esql.core.expression.ExpressionCoreWritables; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; +import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; +import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; +import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; +import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateWritables; +import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextWritables; +import org.elasticsearch.xpack.esql.expression.function.scalar.ScalarFunctionWritables; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromBase64; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBase64; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDateNanos; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDatetime; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDegrees; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoShape; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToIP; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToRadians; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToUnsignedLong; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToVersion; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Abs; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Acos; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Asin; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cbrt; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Ceil; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cos; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cosh; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Exp; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Floor; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log10; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Signum; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sin; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sinh; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sqrt; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tan; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tanh; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvFunctionWritables; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialContains; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialDisjoint; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialWithin; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StDistance; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StY; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.ByteLength; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Space; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Trim; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; +import org.elasticsearch.xpack.esql.expression.function.scalar.util.Delay; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Neg; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Sub; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThanOrEqual; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; + +import java.util.ArrayList; +import java.util.List; + +public class ExpressionWritables { + + public static List getNamedWriteables() { + List entries = new ArrayList<>(); + + entries.addAll(allExpressions()); + entries.addAll(aggregates()); + entries.addAll(scalars()); + entries.addAll(spatials()); + entries.addAll(arithmetics()); + entries.addAll(binaryComparisons()); + entries.addAll(fullText()); + entries.addAll(unaryScalars()); + return entries; + } + + public static List attributes() { + List entries = new ArrayList<>(); + entries.addAll(ExpressionCoreWritables.attributes()); + entries.add(UnsupportedAttribute.ENTRY); + return entries; + } + + public static List namedExpressions() { + List entries = new ArrayList<>(); + entries.addAll(ExpressionCoreWritables.namedExpressions()); + entries.add(UnsupportedAttribute.NAMED_EXPRESSION_ENTRY); + return entries; + } + + public static List expressions() { + List entries = new ArrayList<>(); + entries.addAll(ExpressionCoreWritables.expressions()); + entries.add(UnsupportedAttribute.EXPRESSION_ENTRY); + entries.add(Order.ENTRY); + return entries; + } + + public static List allExpressions() { + List entries = new ArrayList<>(); + entries.addAll(expressions()); + entries.addAll(namedExpressions()); + entries.addAll(attributes()); + return entries; + } + + public static List aggregates() { + return AggregateWritables.getNamedWriteables(); + } + + public static List scalars() { + return ScalarFunctionWritables.getNamedWriteables(); + } + + public static List unaryScalars() { + List entries = new ArrayList<>(); + entries.add(Abs.ENTRY); + entries.add(Acos.ENTRY); + entries.add(Asin.ENTRY); + entries.add(Atan.ENTRY); + entries.add(ByteLength.ENTRY); + entries.add(Cbrt.ENTRY); + entries.add(Ceil.ENTRY); + entries.add(Cos.ENTRY); + entries.add(Cosh.ENTRY); + entries.add(Exp.ENTRY); + entries.add(Floor.ENTRY); + entries.add(FromBase64.ENTRY); + entries.add(IsNotNull.ENTRY); + entries.add(IsNull.ENTRY); + entries.add(Length.ENTRY); + entries.add(Log10.ENTRY); + entries.add(LTrim.ENTRY); + entries.add(Neg.ENTRY); + entries.add(Not.ENTRY); + entries.add(RLike.ENTRY); + entries.add(RTrim.ENTRY); + entries.add(Signum.ENTRY); + entries.add(Sin.ENTRY); + entries.add(Sinh.ENTRY); + entries.add(Space.ENTRY); + entries.add(Sqrt.ENTRY); + entries.add(StX.ENTRY); + entries.add(StY.ENTRY); + entries.add(Tan.ENTRY); + entries.add(Tanh.ENTRY); + entries.add(ToBase64.ENTRY); + entries.add(ToBoolean.ENTRY); + entries.add(ToCartesianPoint.ENTRY); + entries.add(ToDatetime.ENTRY); + entries.add(ToDateNanos.ENTRY); + entries.add(ToDegrees.ENTRY); + entries.add(ToDouble.ENTRY); + entries.add(ToGeoShape.ENTRY); + entries.add(ToCartesianShape.ENTRY); + entries.add(ToGeoPoint.ENTRY); + entries.add(ToIP.ENTRY); + entries.add(ToInteger.ENTRY); + entries.add(ToLong.ENTRY); + entries.add(ToRadians.ENTRY); + entries.add(ToString.ENTRY); + entries.add(ToUnsignedLong.ENTRY); + entries.add(ToVersion.ENTRY); + entries.add(Trim.ENTRY); + entries.add(WildcardLike.ENTRY); + entries.add(Delay.ENTRY); + // mv functions + entries.addAll(MvFunctionWritables.getNamedWriteables()); + return entries; + } + + private static List spatials() { + return List.of(SpatialContains.ENTRY, SpatialDisjoint.ENTRY, SpatialIntersects.ENTRY, SpatialWithin.ENTRY, StDistance.ENTRY); + } + + private static List arithmetics() { + return List.of(Add.ENTRY, Div.ENTRY, Mod.ENTRY, Mul.ENTRY, Sub.ENTRY); + } + + private static List binaryComparisons() { + return List.of(Equals.ENTRY, GreaterThan.ENTRY, GreaterThanOrEqual.ENTRY, LessThan.ENTRY, LessThanOrEqual.ENTRY, NotEquals.ENTRY); + } + + private static List fullText() { + return FullTextWritables.getNamedWriteables(); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java index f7a74cc2ae93f..87efccfc90ab3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -30,27 +29,6 @@ * A type of {@code Function} that takes multiple values and extracts a single value out of them. For example, {@code AVG()}. */ public abstract class AggregateFunction extends Function { - public static List getNamedWriteables() { - return List.of( - Avg.ENTRY, - Count.ENTRY, - CountDistinct.ENTRY, - Max.ENTRY, - Median.ENTRY, - MedianAbsoluteDeviation.ENTRY, - Min.ENTRY, - Percentile.ENTRY, - Rate.ENTRY, - SpatialCentroid.ENTRY, - Sum.ENTRY, - Top.ENTRY, - Values.ENTRY, - // internal functions - ToPartial.ENTRY, - FromPartial.ENTRY, - WeightedAvg.ENTRY - ); - } private final Expression field; private final List parameters; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java new file mode 100644 index 0000000000000..b9cfd8892dd69 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; + +import java.util.List; + +public class AggregateWritables { + + public static List getNamedWriteables() { + return List.of( + Avg.ENTRY, + Count.ENTRY, + CountDistinct.ENTRY, + Max.ENTRY, + Median.ENTRY, + MedianAbsoluteDeviation.ENTRY, + Min.ENTRY, + Percentile.ENTRY, + Rate.ENTRY, + SpatialCentroid.ENTRY, + Sum.ENTRY, + Top.ENTRY, + Values.ENTRY, + // internal functions + ToPartial.ENTRY, + FromPartial.ENTRY, + WeightedAvg.ENTRY + ); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/package-info.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/package-info.java index 4106df331d101..9f08401a42dd1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/package-info.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/package-info.java @@ -94,7 +94,7 @@ * {@link org.elasticsearch.common.io.stream.NamedWriteable#writeTo}, * and a deserializing constructor. Then add an {@link org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry} * constant and add that constant to the list in - * {@link org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction#getNamedWriteables}. + * {@link org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateWritables#getNamedWriteables}. * *

  • * Do the same with {@link org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry}. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java index 1a3667de992cd..9addf08e1b5f9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; @@ -29,9 +28,6 @@ * {@link org.elasticsearch.xpack.esql.optimizer.LocalPhysicalPlanOptimizer} to rewrite them into Lucene queries. */ public abstract class FullTextFunction extends Function { - public static List getNamedWriteables() { - return List.of(QueryString.ENTRY, Match.ENTRY); - } private final Expression query; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java new file mode 100644 index 0000000000000..7fdfb4b328869 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MatchQueryPredicate; +import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MultiMatchQueryPredicate; +import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; + +import java.util.List; + +public class FullTextWritables { + + public static List getNamedWriteables() { + return List.of( + MatchQueryPredicate.ENTRY, + MultiMatchQueryPredicate.ENTRY, + StringQueryPredicate.ENTRY, + QueryString.ENTRY, + Match.ENTRY + ); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/GroupingWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/GroupingWritables.java new file mode 100644 index 0000000000000..89b9036e97e3a --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/GroupingWritables.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.grouping; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; + +import java.util.List; + +public class GroupingWritables { + + public static List getNamedWriteables() { + return List.of(Bucket.ENTRY, Categorize.ENTRY); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java index 65985f234ac92..404ce7e3900c9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java @@ -7,57 +7,11 @@ package org.elasticsearch.xpack.esql.expression.function.scalar; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.FullTextPredicate; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; -import org.elasticsearch.xpack.esql.expression.function.grouping.Bucket; -import org.elasticsearch.xpack.esql.expression.function.grouping.Categorize; -import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; -import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; -import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Least; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateDiff; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateExtract; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateFormat; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.Now; -import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; -import org.elasticsearch.xpack.esql.expression.function.scalar.ip.IpPrefix; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan2; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.E; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Hypot; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pi; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tau; -import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; -import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.BinarySpatialFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.BitLength; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.EndsWith; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Locate; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Repeat; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Reverse; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Right; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Split; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.StartsWith; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Substring; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToLower; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToUpper; -import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.InsensitiveEquals; -import java.util.ArrayList; import java.util.List; /** @@ -71,56 +25,6 @@ *

    */ public abstract class EsqlScalarFunction extends ScalarFunction implements EvaluatorMapper { - public static List getNamedWriteables() { - List entries = new ArrayList<>(); - entries.add(And.ENTRY); - entries.add(Atan2.ENTRY); - entries.add(BitLength.ENTRY); - entries.add(Bucket.ENTRY); - entries.add(Case.ENTRY); - entries.add(Categorize.ENTRY); - entries.add(CIDRMatch.ENTRY); - entries.add(Coalesce.ENTRY); - entries.add(Concat.ENTRY); - entries.add(E.ENTRY); - entries.add(EndsWith.ENTRY); - entries.add(Greatest.ENTRY); - entries.add(Hypot.ENTRY); - entries.add(In.ENTRY); - entries.add(InsensitiveEquals.ENTRY); - entries.add(DateExtract.ENTRY); - entries.add(DateDiff.ENTRY); - entries.add(DateFormat.ENTRY); - entries.add(DateParse.ENTRY); - entries.add(DateTrunc.ENTRY); - entries.add(IpPrefix.ENTRY); - entries.add(Least.ENTRY); - entries.add(Left.ENTRY); - entries.add(Locate.ENTRY); - entries.add(Log.ENTRY); - entries.add(Now.ENTRY); - entries.add(Or.ENTRY); - entries.add(Pi.ENTRY); - entries.add(Pow.ENTRY); - entries.add(Right.ENTRY); - entries.add(Repeat.ENTRY); - entries.add(Replace.ENTRY); - entries.add(Reverse.ENTRY); - entries.add(Round.ENTRY); - entries.add(Split.ENTRY); - entries.add(Substring.ENTRY); - entries.add(StartsWith.ENTRY); - entries.add(Tau.ENTRY); - entries.add(ToLower.ENTRY); - entries.add(ToUpper.ENTRY); - entries.addAll(BinarySpatialFunction.getNamedWriteables()); - entries.addAll(EsqlArithmeticOperation.getNamedWriteables()); - entries.addAll(EsqlBinaryComparison.getNamedWriteables()); - entries.addAll(FullTextPredicate.getNamedWriteables()); - entries.addAll(UnaryScalarFunction.getNamedWriteables()); - return entries; - } - protected EsqlScalarFunction(Source source) { super(source); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java new file mode 100644 index 0000000000000..192ca6c43e57d --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.expression.function.grouping.GroupingWritables; +import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; +import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; +import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Least; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateDiff; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateExtract; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateFormat; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.Now; +import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; +import org.elasticsearch.xpack.esql.expression.function.scalar.ip.IpPrefix; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan2; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.E; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Hypot; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pi; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tau; +import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.BitLength; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.EndsWith; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Locate; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Repeat; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Reverse; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Right; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Split; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.StartsWith; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Substring; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToLower; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToUpper; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.InsensitiveEquals; + +import java.util.ArrayList; +import java.util.List; + +public class ScalarFunctionWritables { + public static List getNamedWriteables() { + List entries = new ArrayList<>(); + entries.add(And.ENTRY); + entries.add(Atan2.ENTRY); + entries.add(BitLength.ENTRY); + entries.add(Case.ENTRY); + entries.add(CIDRMatch.ENTRY); + entries.add(Coalesce.ENTRY); + entries.add(Concat.ENTRY); + entries.add(E.ENTRY); + entries.add(EndsWith.ENTRY); + entries.add(Greatest.ENTRY); + entries.add(Hypot.ENTRY); + entries.add(In.ENTRY); + entries.add(InsensitiveEquals.ENTRY); + entries.add(DateExtract.ENTRY); + entries.add(DateDiff.ENTRY); + entries.add(DateFormat.ENTRY); + entries.add(DateParse.ENTRY); + entries.add(DateTrunc.ENTRY); + entries.add(IpPrefix.ENTRY); + entries.add(Least.ENTRY); + entries.add(Left.ENTRY); + entries.add(Locate.ENTRY); + entries.add(Log.ENTRY); + entries.add(Now.ENTRY); + entries.add(Or.ENTRY); + entries.add(Pi.ENTRY); + entries.add(Pow.ENTRY); + entries.add(Right.ENTRY); + entries.add(Repeat.ENTRY); + entries.add(Replace.ENTRY); + entries.add(Reverse.ENTRY); + entries.add(Round.ENTRY); + entries.add(Split.ENTRY); + entries.add(Substring.ENTRY); + entries.add(StartsWith.ENTRY); + entries.add(Tau.ENTRY); + entries.add(ToLower.ENTRY); + entries.add(ToUpper.ENTRY); + + entries.addAll(GroupingWritables.getNamedWriteables()); + return entries; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java index 610fe1c5ea000..d2af110a5203f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java @@ -7,130 +7,20 @@ package org.elasticsearch.xpack.esql.expression.function.scalar; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromBase64; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBase64; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDateNanos; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDatetime; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDegrees; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoShape; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToIP; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToRadians; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToUnsignedLong; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToVersion; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Abs; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Acos; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Asin; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cbrt; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Ceil; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cos; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cosh; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Exp; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Floor; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log10; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Signum; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sin; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sinh; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sqrt; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tan; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tanh; -import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.AbstractMultivalueFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX; -import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StY; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.ByteLength; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Space; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Trim; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; -import org.elasticsearch.xpack.esql.expression.function.scalar.util.Delay; -import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Neg; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; -import java.util.List; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNumeric; public abstract class UnaryScalarFunction extends EsqlScalarFunction { - public static List getNamedWriteables() { - List entries = new ArrayList<>(); - entries.add(Abs.ENTRY); - entries.add(Acos.ENTRY); - entries.add(Asin.ENTRY); - entries.add(Atan.ENTRY); - entries.add(ByteLength.ENTRY); - entries.add(Cbrt.ENTRY); - entries.add(Ceil.ENTRY); - entries.add(Cos.ENTRY); - entries.add(Cosh.ENTRY); - entries.add(Exp.ENTRY); - entries.add(Floor.ENTRY); - entries.add(FromBase64.ENTRY); - entries.add(IsNotNull.ENTRY); - entries.add(IsNull.ENTRY); - entries.add(Length.ENTRY); - entries.add(Log10.ENTRY); - entries.add(LTrim.ENTRY); - entries.add(Neg.ENTRY); - entries.add(Not.ENTRY); - entries.add(RLike.ENTRY); - entries.add(RTrim.ENTRY); - entries.add(Signum.ENTRY); - entries.add(Sin.ENTRY); - entries.add(Sinh.ENTRY); - entries.add(Space.ENTRY); - entries.add(Sqrt.ENTRY); - entries.add(StX.ENTRY); - entries.add(StY.ENTRY); - entries.add(Tan.ENTRY); - entries.add(Tanh.ENTRY); - entries.add(ToBase64.ENTRY); - entries.add(ToBoolean.ENTRY); - entries.add(ToCartesianPoint.ENTRY); - entries.add(ToDatetime.ENTRY); - entries.add(ToDateNanos.ENTRY); - entries.add(ToDegrees.ENTRY); - entries.add(ToDouble.ENTRY); - entries.add(ToGeoShape.ENTRY); - entries.add(ToCartesianShape.ENTRY); - entries.add(ToGeoPoint.ENTRY); - entries.add(ToIP.ENTRY); - entries.add(ToInteger.ENTRY); - entries.add(ToLong.ENTRY); - entries.add(ToRadians.ENTRY); - entries.add(ToString.ENTRY); - entries.add(ToUnsignedLong.ENTRY); - entries.add(ToVersion.ENTRY); - entries.add(Trim.ENTRY); - entries.add(WildcardLike.ENTRY); - entries.add(Delay.ENTRY); - entries.addAll(AbstractMultivalueFunction.getNamedWriteables()); - return entries; - } - protected final Expression field; public UnaryScalarFunction(Source source, Expression field) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java index 6a3b58728b192..a32761cfd9948 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.data.Block; @@ -22,7 +21,6 @@ import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import java.io.IOException; -import java.util.List; /** * Base class for functions that reduce multivalued fields into single valued fields. @@ -32,27 +30,6 @@ *

    */ public abstract class AbstractMultivalueFunction extends UnaryScalarFunction { - public static List getNamedWriteables() { - return List.of( - MvAppend.ENTRY, - MvAvg.ENTRY, - MvConcat.ENTRY, - MvCount.ENTRY, - MvDedupe.ENTRY, - MvFirst.ENTRY, - MvLast.ENTRY, - MvMax.ENTRY, - MvMedian.ENTRY, - MvMedianAbsoluteDeviation.ENTRY, - MvMin.ENTRY, - MvPercentile.ENTRY, - MvPSeriesWeightedSum.ENTRY, - MvSlice.ENTRY, - MvSort.ENTRY, - MvSum.ENTRY, - MvZip.ENTRY - ); - } protected AbstractMultivalueFunction(Source source, Expression field) { super(source, field); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFunctionWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFunctionWritables.java new file mode 100644 index 0000000000000..7f8fcd910ad6d --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFunctionWritables.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; + +import java.util.List; + +public class MvFunctionWritables { + public static List getNamedWriteables() { + return List.of( + MvAppend.ENTRY, + MvAvg.ENTRY, + MvConcat.ENTRY, + MvCount.ENTRY, + MvDedupe.ENTRY, + MvFirst.ENTRY, + MvLast.ENTRY, + MvMax.ENTRY, + MvMedian.ENTRY, + MvMedianAbsoluteDeviation.ENTRY, + MvMin.ENTRY, + MvPercentile.ENTRY, + MvPSeriesWeightedSum.ENTRY, + MvSlice.ENTRY, + MvSort.ENTRY, + MvSum.ENTRY, + MvZip.ENTRY + ); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java index 46538b77edc74..eccc7ee4672c9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java @@ -112,7 +112,7 @@ * {@link org.elasticsearch.common.io.stream.NamedWriteable#writeTo}, * and a deserializing constructor. Then add an {@link org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry} * constant and register it. To register it, look for a method like - * {@link org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction#getNamedWriteables()} + * {@link org.elasticsearch.xpack.esql.expression.function.scalar.ScalarFunctionWritables#getNamedWriteables()} * in your function's class hierarchy. Keep going up until you hit a function with that name. * Then add your new "ENTRY" constant to the list it returns. *
  • diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java index 8839244e6c601..4d08b0e9687ec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.geometry.Geometry; @@ -22,7 +21,6 @@ import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; import java.io.IOException; -import java.util.List; import java.util.Objects; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; @@ -40,9 +38,6 @@ * and of compatible CRS. For example geo_point and geo_shape can be compared, but not geo_point and cartesian_point. */ public abstract class BinarySpatialFunction extends BinaryScalarFunction implements SpatialEvaluatorFactory.SpatialSourceResolution { - public static List getNamedWriteables() { - return List.of(SpatialContains.ENTRY, SpatialDisjoint.ENTRY, SpatialIntersects.ENTRY, SpatialWithin.ENTRY, StDistance.ENTRY); - } private final SpatialTypeResolver spatialTypeResolver; private SpatialCrsType crsType; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java index 62201bcfa858d..74394d796855f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; @@ -21,7 +20,6 @@ import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import java.io.IOException; -import java.util.List; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; @@ -31,9 +29,6 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.commonType; public abstract class EsqlArithmeticOperation extends ArithmeticOperation implements EvaluatorMapper { - public static List getNamedWriteables() { - return List.of(Add.ENTRY, Div.ENTRY, Mod.ENTRY, Mul.ENTRY, Sub.ENTRY); - } /** * The only role of this enum is to fit the super constructor that expects a BinaryOperation which is diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java index db771a6354883..cbbf87fb6c4cb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.comparison; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -25,7 +24,6 @@ import java.io.IOException; import java.time.ZoneId; -import java.util.List; import java.util.Map; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; @@ -33,9 +31,6 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.commonType; public abstract class EsqlBinaryComparison extends BinaryComparison implements EvaluatorMapper { - public static List getNamedWriteables() { - return List.of(Equals.ENTRY, GreaterThan.ENTRY, GreaterThanOrEqual.ENTRY, LessThan.ENTRY, LessThanOrEqual.ENTRY, NotEquals.ENTRY); - } private final Map evaluatorMap; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java new file mode 100644 index 0000000000000..40b91beaee3eb --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java @@ -0,0 +1,115 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.xpack.esql.plan.logical.Aggregate; +import org.elasticsearch.xpack.esql.plan.logical.Dissect; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; +import org.elasticsearch.xpack.esql.plan.logical.EsRelation; +import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Filter; +import org.elasticsearch.xpack.esql.plan.logical.Grok; +import org.elasticsearch.xpack.esql.plan.logical.InlineStats; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.Lookup; +import org.elasticsearch.xpack.esql.plan.logical.MvExpand; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; +import org.elasticsearch.xpack.esql.plan.logical.Project; +import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.logical.join.InlineJoin; +import org.elasticsearch.xpack.esql.plan.logical.join.Join; +import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; +import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; +import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; +import org.elasticsearch.xpack.esql.plan.physical.DissectExec; +import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; +import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; +import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.EvalExec; +import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec; +import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; +import org.elasticsearch.xpack.esql.plan.physical.ExchangeSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; +import org.elasticsearch.xpack.esql.plan.physical.FilterExec; +import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; +import org.elasticsearch.xpack.esql.plan.physical.GrokExec; +import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; +import org.elasticsearch.xpack.esql.plan.physical.LimitExec; +import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; +import org.elasticsearch.xpack.esql.plan.physical.OrderExec; +import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; +import org.elasticsearch.xpack.esql.plan.physical.RowExec; +import org.elasticsearch.xpack.esql.plan.physical.ShowExec; +import org.elasticsearch.xpack.esql.plan.physical.SubqueryExec; +import org.elasticsearch.xpack.esql.plan.physical.TopNExec; + +import java.util.ArrayList; +import java.util.List; + +public class PlanWritables { + + public static List getNamedWriteables() { + List entries = new ArrayList<>(); + entries.addAll(logical()); + entries.addAll(phsyical()); + return entries; + } + + public static List logical() { + return List.of( + Aggregate.ENTRY, + Dissect.ENTRY, + Enrich.ENTRY, + EsRelation.ENTRY, + EsqlProject.ENTRY, + Eval.ENTRY, + Filter.ENTRY, + Grok.ENTRY, + InlineStats.ENTRY, + InlineJoin.ENTRY, + Join.ENTRY, + LocalRelation.ENTRY, + Limit.ENTRY, + Lookup.ENTRY, + MvExpand.ENTRY, + OrderBy.ENTRY, + Project.ENTRY, + TopN.ENTRY + ); + } + + public static List phsyical() { + return List.of( + AggregateExec.ENTRY, + DissectExec.ENTRY, + EnrichExec.ENTRY, + EsQueryExec.ENTRY, + EsSourceExec.ENTRY, + EvalExec.ENTRY, + ExchangeExec.ENTRY, + ExchangeSinkExec.ENTRY, + ExchangeSourceExec.ENTRY, + FieldExtractExec.ENTRY, + FilterExec.ENTRY, + FragmentExec.ENTRY, + GrokExec.ENTRY, + HashJoinExec.ENTRY, + LimitExec.ENTRY, + LocalSourceExec.ENTRY, + MvExpandExec.ENTRY, + OrderExec.ENTRY, + ProjectExec.ENTRY, + RowExec.ENTRY, + ShowExec.ENTRY, + SubqueryExec.ENTRY, + TopNExec.ENTRY + ); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java index e07dd9e14649e..e845c25bd3b32 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java @@ -6,15 +6,10 @@ */ package org.elasticsearch.xpack.esql.plan.logical; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.capabilities.Resolvable; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.plan.QueryPlan; -import org.elasticsearch.xpack.esql.plan.logical.join.InlineJoin; -import org.elasticsearch.xpack.esql.plan.logical.join.Join; -import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; -import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import java.util.List; @@ -23,29 +18,6 @@ * For example, a logical plan in English would be: "I want to get from DEN to SFO". */ public abstract class LogicalPlan extends QueryPlan implements Resolvable { - public static List getNamedWriteables() { - return List.of( - Aggregate.ENTRY, - Dissect.ENTRY, - Enrich.ENTRY, - EsRelation.ENTRY, - EsqlProject.ENTRY, - Eval.ENTRY, - Filter.ENTRY, - Grok.ENTRY, - InlineStats.ENTRY, - InlineJoin.ENTRY, - Join.ENTRY, - LocalRelation.ENTRY, - Limit.ENTRY, - Lookup.ENTRY, - MvExpand.ENTRY, - OrderBy.ENTRY, - Project.ENTRY, - TopN.ENTRY - ); - } - /** * Order is important in the enum; any values should be added at the end. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/PhysicalPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/PhysicalPlan.java index ecf78908d6d3e..d2935ccb75b66 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/PhysicalPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/PhysicalPlan.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.plan.physical; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.plan.QueryPlan; @@ -20,34 +19,6 @@ * PhysicalPlan = take Delta, DEN to SJC, then SJC to SFO */ public abstract class PhysicalPlan extends QueryPlan { - public static List getNamedWriteables() { - return List.of( - AggregateExec.ENTRY, - DissectExec.ENTRY, - EnrichExec.ENTRY, - EsQueryExec.ENTRY, - EsSourceExec.ENTRY, - EvalExec.ENTRY, - ExchangeExec.ENTRY, - ExchangeSinkExec.ENTRY, - ExchangeSourceExec.ENTRY, - FieldExtractExec.ENTRY, - FilterExec.ENTRY, - FragmentExec.ENTRY, - GrokExec.ENTRY, - HashJoinExec.ENTRY, - LimitExec.ENTRY, - LocalSourceExec.ENTRY, - MvExpandExec.ENTRY, - OrderExec.ENTRY, - ProjectExec.ENTRY, - RowExec.ENTRY, - ShowExec.ENTRY, - SubqueryExec.ENTRY, - TopNExec.ENTRY - ); - } - public PhysicalPlan(Source source, List children) { super(source, children); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java index b091ab0c1bafc..67948fe717f2f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java @@ -21,8 +21,8 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.lucene.LuceneOperator; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.AbstractPageMappingOperator; @@ -60,17 +60,10 @@ import org.elasticsearch.xpack.esql.action.RestEsqlDeleteAsyncResultAction; import org.elasticsearch.xpack.esql.action.RestEsqlGetAsyncResultAction; import org.elasticsearch.xpack.esql.action.RestEsqlQueryAction; -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.enrich.EnrichLookupOperator; import org.elasticsearch.xpack.esql.execution.PlanExecutor; -import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; -import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; -import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; -import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; +import org.elasticsearch.xpack.esql.plan.PlanWritables; import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; import org.elasticsearch.xpack.esql.session.IndexResolver; @@ -199,18 +192,10 @@ public List getNamedWriteables() { entries.add(SingleValueQuery.ENTRY); entries.add(AsyncOperator.Status.ENTRY); entries.add(EnrichLookupOperator.Status.ENTRY); - entries.addAll(Block.getNamedWriteables()); - entries.addAll(Attribute.getNamedWriteables()); - entries.add(UnsupportedAttribute.ENTRY); // TODO combine with above once these are in the same project - entries.addAll(NamedExpression.getNamedWriteables()); - entries.add(UnsupportedAttribute.NAMED_EXPRESSION_ENTRY); // TODO combine with above once these are in the same project - entries.addAll(Expression.getNamedWriteables()); - entries.add(UnsupportedAttribute.EXPRESSION_ENTRY); // TODO combine with above once these are in the same project - entries.addAll(EsqlScalarFunction.getNamedWriteables()); - entries.addAll(AggregateFunction.getNamedWriteables()); - entries.addAll(LogicalPlan.getNamedWriteables()); - entries.addAll(PhysicalPlan.getNamedWriteables()); - entries.addAll(FullTextFunction.getNamedWriteables()); + + entries.addAll(BlockWritables.getNamedWriteables()); + entries.addAll(ExpressionWritables.getNamedWriteables()); + entries.addAll(PlanWritables.getNamedWriteables()); return entries; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java index 0a9e8b1b90681..9c24ec96dddf8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.ExistsQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; @@ -24,15 +24,11 @@ import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.index.query.WildcardQueryBuilder; import org.elasticsearch.test.EqualsHashCodeTestUtils; -import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; -import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; -import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; +import org.elasticsearch.xpack.esql.plan.PlanWritables; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; @@ -115,17 +111,9 @@ public static NamedWriteableRegistry writableRegistry() { entries.add(new NamedWriteableRegistry.Entry(QueryBuilder.class, RegexpQueryBuilder.NAME, RegexpQueryBuilder::new)); entries.add(new NamedWriteableRegistry.Entry(QueryBuilder.class, ExistsQueryBuilder.NAME, ExistsQueryBuilder::new)); entries.add(SingleValueQuery.ENTRY); - entries.addAll(Attribute.getNamedWriteables()); - entries.add(UnsupportedAttribute.ENTRY); - entries.addAll(NamedExpression.getNamedWriteables()); - entries.add(UnsupportedAttribute.NAMED_EXPRESSION_ENTRY); - entries.addAll(Expression.getNamedWriteables()); - entries.addAll(EsqlScalarFunction.getNamedWriteables()); - entries.addAll(AggregateFunction.getNamedWriteables()); - entries.addAll(Block.getNamedWriteables()); - entries.addAll(LogicalPlan.getNamedWriteables()); - entries.addAll(PhysicalPlan.getNamedWriteables()); - entries.addAll(FullTextFunction.getNamedWriteables()); + entries.addAll(ExpressionWritables.getNamedWriteables()); + entries.addAll(PlanWritables.getNamedWriteables()); + entries.addAll(BlockWritables.getNamedWriteables()); return new NamedWriteableRegistry(entries); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java index 2f3aa09868637..134981d3c3b0c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java @@ -9,7 +9,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.operator.AbstractPageMappingOperator; import org.elasticsearch.compute.operator.DriverProfile; import org.elasticsearch.compute.operator.DriverSleeps; @@ -39,7 +39,7 @@ protected EsqlQueryResponse.Profile mutateInstance(EsqlQueryResponse.Profile ins @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - Stream.concat(Stream.of(AbstractPageMappingOperator.Status.ENTRY), Block.getNamedWriteables().stream()).toList() + Stream.concat(Stream.of(AbstractPageMappingOperator.Status.ENTRY), BlockWritables.getNamedWriteables().stream()).toList() ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 4aaf4f6cccf0f..35364089127cc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -99,7 +100,7 @@ public void blockFactoryEmpty() { @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - Stream.concat(Stream.of(AbstractPageMappingOperator.Status.ENTRY), Block.getNamedWriteables().stream()).toList() + Stream.concat(Stream.of(AbstractPageMappingOperator.Status.ENTRY), BlockWritables.getNamedWriteables().stream()).toList() ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractExpressionSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractExpressionSerializationTests.java index a2aa447c748e9..6dd0c5fe88afd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractExpressionSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractExpressionSerializationTests.java @@ -8,20 +8,11 @@ package org.elasticsearch.xpack.esql.expression; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.Node; import org.elasticsearch.xpack.esql.expression.function.ReferenceAttributeTests; -import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; -import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; -import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.plan.AbstractNodeSerializationTests; -import java.util.ArrayList; -import java.util.List; - public abstract class AbstractExpressionSerializationTests extends AbstractNodeSerializationTests { public static Expression randomChild() { return ReferenceAttributeTests.randomReferenceAttribute(false); @@ -29,17 +20,7 @@ public static Expression randomChild() { @Override protected final NamedWriteableRegistry getNamedWriteableRegistry() { - List entries = new ArrayList<>(NamedExpression.getNamedWriteables()); - entries.addAll(Expression.getNamedWriteables()); - entries.addAll(Attribute.getNamedWriteables()); - entries.addAll(EsqlScalarFunction.getNamedWriteables()); - entries.addAll(AggregateFunction.getNamedWriteables()); - entries.addAll(FullTextFunction.getNamedWriteables()); - entries.add(UnsupportedAttribute.ENTRY); - entries.add(UnsupportedAttribute.NAMED_EXPRESSION_ENTRY); - entries.add(UnsupportedAttribute.EXPRESSION_ENTRY); - entries.add(org.elasticsearch.xpack.esql.expression.Order.ENTRY); - return new NamedWriteableRegistry(entries); + return new NamedWriteableRegistry(ExpressionWritables.getNamedWriteables()); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AliasTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AliasTests.java index ccbed01994bf7..7bb8ab3e147e2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AliasTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AliasTests.java @@ -11,23 +11,18 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.test.AbstractWireTestCase; import org.elasticsearch.xpack.esql.core.expression.Alias; -import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.tree.SourceTests; import org.elasticsearch.xpack.esql.expression.function.ReferenceAttributeTests; -import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.in; public class AliasTests extends AbstractWireTestCase { public static Alias randomAlias() { @@ -76,10 +71,6 @@ protected Alias copyInstance(Alias instance, TransportVersion version) throws IO @Override protected final NamedWriteableRegistry getNamedWriteableRegistry() { - List entries = new ArrayList<>(NamedExpression.getNamedWriteables()); - entries.addAll(Attribute.getNamedWriteables()); - entries.add(UnsupportedAttribute.ENTRY); - entries.addAll(Expression.getNamedWriteables()); - return new NamedWriteableRegistry(entries); + return new NamedWriteableRegistry(ExpressionWritables.allExpressions()); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAttributeTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAttributeTestCase.java index a9750acdb1b84..d59e309790ad2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAttributeTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAttributeTestCase.java @@ -15,13 +15,12 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import org.elasticsearch.xpack.esql.session.Configuration; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; import java.util.Objects; import static org.hamcrest.Matchers.sameInstance; @@ -52,9 +51,7 @@ protected final ExtraAttribute mutateInstance(ExtraAttribute instance) { @Override protected final NamedWriteableRegistry getNamedWriteableRegistry() { - List entries = new ArrayList<>(Attribute.getNamedWriteables()); - entries.add(UnsupportedAttribute.ENTRY); - return new NamedWriteableRegistry(entries); + return new NamedWriteableRegistry(ExpressionWritables.attributes()); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java index d3e1710a715af..9a1a30b892b22 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; @@ -23,10 +24,10 @@ import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; import org.elasticsearch.xpack.esql.expression.function.FieldAttributeTests; import org.elasticsearch.xpack.esql.expression.function.MetadataAttributeTests; import org.elasticsearch.xpack.esql.expression.function.ReferenceAttributeTests; -import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttributeTests; import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.type.EsFieldTests; @@ -280,9 +281,8 @@ private Column randomColumn() { static { List writeables = new ArrayList<>(); - writeables.addAll(Block.getNamedWriteables()); - writeables.addAll(Attribute.getNamedWriteables()); - writeables.add(UnsupportedAttribute.ENTRY); + writeables.addAll(BlockWritables.getNamedWriteables()); + writeables.addAll(ExpressionWritables.attributes()); REGISTRY = new NamedWriteableRegistry(new ArrayList<>(new HashSet<>(writeables))); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/AbstractLogicalPlanSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/AbstractLogicalPlanSerializationTests.java index 6936c96a143d4..eea408914f4c5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/AbstractLogicalPlanSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/AbstractLogicalPlanSerializationTests.java @@ -8,13 +8,11 @@ package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.xpack.esql.core.tree.Node; -import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; import org.elasticsearch.xpack.esql.plan.AbstractNodeSerializationTests; +import org.elasticsearch.xpack.esql.plan.PlanWritables; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelationSerializationTests; import java.util.ArrayList; @@ -32,12 +30,10 @@ public static LogicalPlan randomChild(int depth) { @Override protected final NamedWriteableRegistry getNamedWriteableRegistry() { List entries = new ArrayList<>(); - entries.addAll(LogicalPlan.getNamedWriteables()); - entries.addAll(AggregateFunction.getNamedWriteables()); - entries.addAll(Expression.getNamedWriteables()); - entries.addAll(Attribute.getNamedWriteables()); - entries.addAll(Block.getNamedWriteables()); - entries.addAll(NamedExpression.getNamedWriteables()); + entries.addAll(PlanWritables.logical()); + entries.addAll(ExpressionWritables.aggregates()); + entries.addAll(ExpressionWritables.allExpressions()); + entries.addAll(BlockWritables.getNamedWriteables()); return new NamedWriteableRegistry(entries); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java index ccb27b41f2ed6..1f56150977d99 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.test.AbstractWireTestCase; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; @@ -81,6 +82,6 @@ protected boolean shouldBeSame(LocalSupplier newInstance) { @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(Block.getNamedWriteables()); + return new NamedWriteableRegistry(BlockWritables.getNamedWriteables()); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/AbstractPhysicalPlanSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/AbstractPhysicalPlanSerializationTests.java index 4b74114a0e01c..7689b80515880 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/AbstractPhysicalPlanSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/AbstractPhysicalPlanSerializationTests.java @@ -9,16 +9,13 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.search.SearchModule; -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.Node; -import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.plan.AbstractNodeSerializationTests; -import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.PlanWritables; import java.util.ArrayList; import java.util.List; @@ -50,13 +47,10 @@ public static Integer randomEstimatedRowSize() { @Override protected final NamedWriteableRegistry getNamedWriteableRegistry() { List entries = new ArrayList<>(); - entries.addAll(PhysicalPlan.getNamedWriteables()); - entries.addAll(LogicalPlan.getNamedWriteables()); - entries.addAll(AggregateFunction.getNamedWriteables()); - entries.addAll(Expression.getNamedWriteables()); - entries.addAll(Attribute.getNamedWriteables()); - entries.addAll(Block.getNamedWriteables()); - entries.addAll(NamedExpression.getNamedWriteables()); + entries.addAll(PlanWritables.getNamedWriteables()); + entries.addAll(ExpressionWritables.aggregates()); + entries.addAll(ExpressionWritables.allExpressions()); + entries.addAll(BlockWritables.getNamedWriteables()); entries.addAll(new SearchModule(Settings.EMPTY, List.of()).getNamedWriteables()); // Query builders entries.add(Add.ENTRY); // Used by the eval tests return new NamedWriteableRegistry(entries); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/ConfigurationSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/ConfigurationSerializationTests.java index 1f35bb5312b20..b010616cd7cf7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/ConfigurationSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/ConfigurationSerializationTests.java @@ -14,9 +14,9 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.core.Releasables; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.esql.Column; @@ -110,6 +110,6 @@ protected Configuration mutateInstance(Configuration in) { @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(Block.getNamedWriteables()); + return new NamedWriteableRegistry(BlockWritables.getNamedWriteables()); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/MultiTypeEsFieldTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/MultiTypeEsFieldTests.java index f533c20975aff..987ab103cf80b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/MultiTypeEsFieldTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/MultiTypeEsFieldTests.java @@ -10,14 +10,13 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.test.AbstractWireTestCase; -import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField; -import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape; @@ -92,9 +91,8 @@ protected MultiTypeEsField mutateInstance(MultiTypeEsField instance) throws IOEx @Override protected final NamedWriteableRegistry getNamedWriteableRegistry() { - List entries = new ArrayList<>(UnaryScalarFunction.getNamedWriteables()); - entries.addAll(Attribute.getNamedWriteables()); - entries.addAll(Expression.getNamedWriteables()); + List entries = new ArrayList<>(ExpressionWritables.allExpressions()); + entries.addAll(ExpressionWritables.unaryScalars()); return new NamedWriteableRegistry(entries); } From 231ebaf777b707083848d65042237683ad41c27d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 08:24:26 +1100 Subject: [PATCH 058/386] Mute org.elasticsearch.upgrades.FullClusterRestartIT testNewReplicasTimeSeriesMode {cluster=OLD} #117087 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index fb3a0ac76ca22..49c21394daac6 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -251,6 +251,9 @@ tests: - class: org.elasticsearch.upgrades.FullClusterRestartDownsampleIT method: testRollupIndex {cluster=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/117086 +- class: org.elasticsearch.upgrades.FullClusterRestartIT + method: testNewReplicasTimeSeriesMode {cluster=OLD} + issue: https://github.com/elastic/elasticsearch/issues/117087 # Examples: # From 3b3ea209959dc221a13ab6ece2fecab24de18fae Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 08:24:46 +1100 Subject: [PATCH 059/386] Mute org.elasticsearch.upgrades.FullClusterRestartIT testNewReplicasTimeSeriesMode {cluster=UPGRADED} #117088 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 49c21394daac6..884c78ad5f5d9 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -254,6 +254,9 @@ tests: - class: org.elasticsearch.upgrades.FullClusterRestartIT method: testNewReplicasTimeSeriesMode {cluster=OLD} issue: https://github.com/elastic/elasticsearch/issues/117087 +- class: org.elasticsearch.upgrades.FullClusterRestartIT + method: testNewReplicasTimeSeriesMode {cluster=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/117088 # Examples: # From 56fc71620f517b44ef7feedc6abd7babc7f38ca7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 08:25:01 +1100 Subject: [PATCH 060/386] Mute org.elasticsearch.upgrades.FullClusterRestartIT testSearchTimeSeriesMode {cluster=OLD} #117089 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 884c78ad5f5d9..f4d893cdfeeb0 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -257,6 +257,9 @@ tests: - class: org.elasticsearch.upgrades.FullClusterRestartIT method: testNewReplicasTimeSeriesMode {cluster=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/117088 +- class: org.elasticsearch.upgrades.FullClusterRestartIT + method: testSearchTimeSeriesMode {cluster=OLD} + issue: https://github.com/elastic/elasticsearch/issues/117089 # Examples: # From c1141cca2e9aefdcf0bb9071d60cfc75b55b1b08 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 08:25:18 +1100 Subject: [PATCH 061/386] Mute org.elasticsearch.upgrades.FullClusterRestartIT testSearchTimeSeriesMode {cluster=UPGRADED} #117090 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index f4d893cdfeeb0..28e39c75f349d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -260,6 +260,9 @@ tests: - class: org.elasticsearch.upgrades.FullClusterRestartIT method: testSearchTimeSeriesMode {cluster=OLD} issue: https://github.com/elastic/elasticsearch/issues/117089 +- class: org.elasticsearch.upgrades.FullClusterRestartIT + method: testSearchTimeSeriesMode {cluster=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/117090 # Examples: # From 8814debf4ff37b72a9ae61393b38501516375c07 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 08:25:30 +1100 Subject: [PATCH 062/386] Mute org.elasticsearch.xpack.restart.CoreFullClusterRestartIT testNewReplicasTimeSeriesMode {cluster=OLD} #117091 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 28e39c75f349d..46992854f0744 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -263,6 +263,9 @@ tests: - class: org.elasticsearch.upgrades.FullClusterRestartIT method: testSearchTimeSeriesMode {cluster=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/117090 +- class: org.elasticsearch.xpack.restart.CoreFullClusterRestartIT + method: testNewReplicasTimeSeriesMode {cluster=OLD} + issue: https://github.com/elastic/elasticsearch/issues/117091 # Examples: # From 9cf03bec45e48670caf5c1188e8564b1d2353ec4 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 08:25:41 +1100 Subject: [PATCH 063/386] Mute org.elasticsearch.xpack.restart.CoreFullClusterRestartIT testSearchTimeSeriesMode {cluster=OLD} #117092 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 46992854f0744..8f083db37e193 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -266,6 +266,9 @@ tests: - class: org.elasticsearch.xpack.restart.CoreFullClusterRestartIT method: testNewReplicasTimeSeriesMode {cluster=OLD} issue: https://github.com/elastic/elasticsearch/issues/117091 +- class: org.elasticsearch.xpack.restart.CoreFullClusterRestartIT + method: testSearchTimeSeriesMode {cluster=OLD} + issue: https://github.com/elastic/elasticsearch/issues/117092 # Examples: # From 8de0d7a9809e0675250ef5824b77f5dc8865bbb3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 08:25:51 +1100 Subject: [PATCH 064/386] Mute org.elasticsearch.xpack.restart.CoreFullClusterRestartIT testNewReplicasTimeSeriesMode {cluster=UPGRADED} #117093 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 8f083db37e193..6cd59a3d8fa57 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -269,6 +269,9 @@ tests: - class: org.elasticsearch.xpack.restart.CoreFullClusterRestartIT method: testSearchTimeSeriesMode {cluster=OLD} issue: https://github.com/elastic/elasticsearch/issues/117092 +- class: org.elasticsearch.xpack.restart.CoreFullClusterRestartIT + method: testNewReplicasTimeSeriesMode {cluster=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/117093 # Examples: # From 8a37692d683a6ea1b11451bfe987bfe1aa7a89c9 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 08:26:00 +1100 Subject: [PATCH 065/386] Mute org.elasticsearch.xpack.restart.CoreFullClusterRestartIT testSearchTimeSeriesMode {cluster=UPGRADED} #117094 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 6cd59a3d8fa57..36e463c20915c 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -272,6 +272,9 @@ tests: - class: org.elasticsearch.xpack.restart.CoreFullClusterRestartIT method: testNewReplicasTimeSeriesMode {cluster=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/117093 +- class: org.elasticsearch.xpack.restart.CoreFullClusterRestartIT + method: testSearchTimeSeriesMode {cluster=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/117094 # Examples: # From d017f93b8366c82d246d261bfb5ecb5d89b90fee Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 08:35:46 +1100 Subject: [PATCH 066/386] Mute org.elasticsearch.discovery.ClusterDisruptionIT testAckedIndexing #117024 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 36e463c20915c..2233f8639cfe8 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -275,6 +275,9 @@ tests: - class: org.elasticsearch.xpack.restart.CoreFullClusterRestartIT method: testSearchTimeSeriesMode {cluster=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/117094 +- class: org.elasticsearch.discovery.ClusterDisruptionIT + method: testAckedIndexing + issue: https://github.com/elastic/elasticsearch/issues/117024 # Examples: # From 123b1035a8762debfa17e4a27f0d7f3c516e6d07 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Tue, 19 Nov 2024 16:42:07 -0500 Subject: [PATCH 067/386] Optimize PipelineConfiguration-checking ClusterStateListeners (#117038) --- .../ingest/PipelineConfigurationBridge.java | 8 +- .../geoip/GeoIpDownloaderTaskExecutor.java | 2 +- .../org/elasticsearch/TransportVersions.java | 1 + .../action/ingest/GetPipelineResponse.java | 2 +- .../MetadataIndexTemplateService.java | 2 +- .../elasticsearch/ingest/IngestService.java | 6 +- .../ingest/PipelineConfiguration.java | 141 ++++++++++++------ .../ingest/GetPipelineResponseTests.java | 2 +- .../ingest/IngestMetadataTests.java | 4 +- .../ingest/PipelineConfigurationTests.java | 51 +++++-- .../InferenceProcessorInfoExtractor.java | 8 +- .../template/IndexTemplateRegistryTests.java | 2 +- .../enrich/EnrichPolicyReindexPipeline.java | 2 +- .../loadingservice/ModelLoadingService.java | 2 +- .../LegacyStackTemplateRegistryTests.java | 2 +- .../stack/StackTemplateRegistryTests.java | 2 +- 16 files changed, 165 insertions(+), 72 deletions(-) diff --git a/libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/ingest/PipelineConfigurationBridge.java b/libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/ingest/PipelineConfigurationBridge.java index e146b06fe3f53..cb90d10665659 100644 --- a/libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/ingest/PipelineConfigurationBridge.java +++ b/libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/ingest/PipelineConfigurationBridge.java @@ -28,8 +28,12 @@ public String getId() { return delegate.getId(); } - public Map getConfigAsMap() { - return delegate.getConfigAsMap(); + public Map getConfig() { + return delegate.getConfig(); + } + + public Map getConfig(final boolean unmodifiable) { + return delegate.getConfig(unmodifiable); } @Override diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java index 61ca050d91c13..2f96aa3cbc69a 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java @@ -268,7 +268,7 @@ private static Set pipelinesWithGeoIpProcessor(ClusterState clusterState Set ids = new HashSet<>(); // note: this loop is unrolled rather than streaming-style because it's hot enough to show up in a flamegraph for (PipelineConfiguration configuration : configurations) { - List> processors = (List>) configuration.getConfigAsMap().get(Pipeline.PROCESSORS_KEY); + List> processors = (List>) configuration.getConfig().get(Pipeline.PROCESSORS_KEY); if (hasAtLeastOneGeoipProcessor(processors, downloadDatabaseOnPipelineCreation)) { ids.add(configuration.getId()); } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 1a99123ebdac6..887cfea36a199 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -203,6 +203,7 @@ static TransportVersion def(int id) { public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS_REVERT = def(8_794_00_0); public static final TransportVersion FAST_REFRESH_RCO_2 = def(8_795_00_0); public static final TransportVersion ESQL_ENRICH_RUNTIME_WARNINGS = def(8_796_00_0); + public static final TransportVersion INGEST_PIPELINE_CONFIGURATION_AS_MAP = def(8_797_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java index 3ed1dfef50053..760b87af49a78 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java @@ -80,7 +80,7 @@ public RestStatus status() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); for (PipelineConfiguration pipeline : pipelines) { - builder.field(pipeline.getId(), summary ? Map.of() : pipeline.getConfigAsMap()); + builder.field(pipeline.getId(), summary ? Map.of() : pipeline.getConfig()); } builder.endObject(); return builder; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java index d6ed28454df96..3878a3329b634 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java @@ -782,7 +782,7 @@ private void validateUseOfDeprecatedIngestPipelines(String name, IngestMetadata private void emitWarningIfPipelineIsDeprecated(String name, Map pipelines, String pipelineName) { Optional.ofNullable(pipelineName) .map(pipelines::get) - .filter(p -> Boolean.TRUE.equals(p.getConfigAsMap().get("deprecated"))) + .filter(p -> Boolean.TRUE.equals(p.getConfig().get("deprecated"))) .ifPresent( p -> deprecationLogger.warn( DeprecationCategory.TEMPLATES, diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index ce61f197b4831..1494d2a46f9d0 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -519,7 +519,7 @@ public static boolean isNoOpPipelineUpdate(ClusterState state, PutPipelineReques && currentIngestMetadata.getPipelines().containsKey(request.getId())) { var pipelineConfig = XContentHelper.convertToMap(request.getSource(), false, request.getXContentType()).v2(); var currentPipeline = currentIngestMetadata.getPipelines().get(request.getId()); - if (currentPipeline.getConfigAsMap().equals(pipelineConfig)) { + if (currentPipeline.getConfig().equals(pipelineConfig)) { return true; } } @@ -1292,7 +1292,7 @@ synchronized void innerUpdatePipelines(IngestMetadata newIngestMetadata) { try { Pipeline newPipeline = Pipeline.create( newConfiguration.getId(), - newConfiguration.getConfigAsMap(), + newConfiguration.getConfig(false), processorFactories, scriptService ); @@ -1416,7 +1416,7 @@ public

    Collection getPipelineWithProcessorType(Cla public synchronized void reloadPipeline(String id) throws Exception { PipelineHolder holder = pipelines.get(id); - Pipeline updatedPipeline = Pipeline.create(id, holder.configuration.getConfigAsMap(), processorFactories, scriptService); + Pipeline updatedPipeline = Pipeline.create(id, holder.configuration.getConfig(false), processorFactories, scriptService); Map updatedPipelines = new HashMap<>(this.pipelines); updatedPipelines.put(id, new PipelineHolder(holder.configuration, updatedPipeline)); this.pipelines = Map.copyOf(updatedPipelines); diff --git a/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java b/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java index 9067cdb2040fd..64142caf4189d 100644 --- a/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java +++ b/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java @@ -9,12 +9,14 @@ package org.elasticsearch.ingest; +import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.SimpleDiffable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.xcontent.ContextParser; import org.elasticsearch.xcontent.ObjectParser; @@ -22,26 +24,32 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; -import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.Objects; /** - * Encapsulates a pipeline's id and configuration as a blob + * Encapsulates a pipeline's id and configuration as a loosely typed map -- see {@link Pipeline} for the + * parsed and processed object(s) that a pipeline configuration will become. This class is used for things + * like keeping track of pipelines in the cluster state (where a pipeline is 'just some json') whereas the + * {@link Pipeline} class is used in the actual processing of ingest documents through pipelines in the + * {@link IngestService}. */ public final class PipelineConfiguration implements SimpleDiffable, ToXContentObject { private static final ObjectParser PARSER = new ObjectParser<>("pipeline_config", true, Builder::new); static { PARSER.declareString(Builder::setId, new ParseField("id")); - PARSER.declareField((parser, builder, aVoid) -> { - XContentBuilder contentBuilder = XContentBuilder.builder(parser.contentType().xContent()); - contentBuilder.generator().copyCurrentStructure(parser); - builder.setConfig(BytesReference.bytes(contentBuilder), contentBuilder.contentType()); - }, new ParseField("config"), ObjectParser.ValueType.OBJECT); - + PARSER.declareField( + (parser, builder, aVoid) -> builder.setConfig(parser.map()), + new ParseField("config"), + ObjectParser.ValueType.OBJECT + ); } public static ContextParser getParser() { @@ -51,56 +59,94 @@ public static ContextParser getParser() { private static class Builder { private String id; - private BytesReference config; - private XContentType xContentType; + private Map config; void setId(String id) { this.id = id; } - void setConfig(BytesReference config, XContentType xContentType) { + void setConfig(Map config) { this.config = config; - this.xContentType = xContentType; } PipelineConfiguration build() { - return new PipelineConfiguration(id, config, xContentType); + return new PipelineConfiguration(id, config); } } private final String id; - // Store config as bytes reference, because the config is only used when the pipeline store reads the cluster state - // and the way the map of maps config is read requires a deep copy (it removes instead of gets entries to check for unused options) - // also the get pipeline api just directly returns this to the caller - private final BytesReference config; - private final XContentType xContentType; + private final Map config; - public PipelineConfiguration(String id, BytesReference config, XContentType xContentType) { + public PipelineConfiguration(String id, Map config) { this.id = Objects.requireNonNull(id); - this.config = Objects.requireNonNull(config); - this.xContentType = Objects.requireNonNull(xContentType); + this.config = deepCopy(config, true); // defensive deep copy + } + + /** + * A convenience constructor that parses some bytes as a map representing a pipeline's config and then delegates to the + * conventional {@link #PipelineConfiguration(String, Map)} constructor. + * + * @param id the id of the pipeline + * @param config a parse-able bytes reference that will return a pipeline configuration + * @param xContentType the content-type to use while parsing the pipeline configuration + */ + public PipelineConfiguration(String id, BytesReference config, XContentType xContentType) { + this(id, XContentHelper.convertToMap(config, true, xContentType).v2()); } public String getId() { return id; } - public Map getConfigAsMap() { - return XContentHelper.convertToMap(config, true, xContentType).v2(); + /** + * @return a reference to the unmodifiable configuration map for this pipeline + */ + public Map getConfig() { + return getConfig(true); } - // pkg-private for tests - XContentType getXContentType() { - return xContentType; + /** + * @param unmodifiable whether the returned map should be unmodifiable or not + * @return a reference to the unmodifiable config map (if unmodifiable is true) or + * a reference to a freshly-created mutable deep copy of the config map (if unmodifiable is false) + */ + public Map getConfig(boolean unmodifiable) { + if (unmodifiable) { + return config; // already unmodifiable + } else { + return deepCopy(config, false); + } + } + + @SuppressWarnings("unchecked") + private static T deepCopy(final T value, final boolean unmodifiable) { + return (T) innerDeepCopy(value, unmodifiable); } - // pkg-private for tests - BytesReference getConfig() { - return config; + private static Object innerDeepCopy(final Object value, final boolean unmodifiable) { + if (value instanceof Map mapValue) { + final Map copy = Maps.newLinkedHashMapWithExpectedSize(mapValue.size()); // n.b. maintain ordering + for (Map.Entry entry : mapValue.entrySet()) { + copy.put(innerDeepCopy(entry.getKey(), unmodifiable), innerDeepCopy(entry.getValue(), unmodifiable)); + } + return unmodifiable ? Collections.unmodifiableMap(copy) : copy; + } else if (value instanceof List listValue) { + final List copy = new ArrayList<>(listValue.size()); + for (Object itemValue : listValue) { + copy.add(innerDeepCopy(itemValue, unmodifiable)); + } + return unmodifiable ? Collections.unmodifiableList(copy) : copy; + } else { + // if this list of expected value types ends up not being exhaustive, then we want to learn about that + // at development time, but it's probably better to err on the side of passing through the value at runtime + assert (value == null || value instanceof String || value instanceof Number || value instanceof Boolean) + : "unexpected value type [" + value.getClass() + "]"; + return value; + } } public Integer getVersion() { - Object o = getConfigAsMap().get("version"); + Object o = config.get("version"); if (o == null) { return null; } else if (o instanceof Number number) { @@ -114,13 +160,22 @@ public Integer getVersion() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("id", id); - builder.field("config", getConfigAsMap()); + builder.field("config", config); builder.endObject(); return builder; } public static PipelineConfiguration readFrom(StreamInput in) throws IOException { - return new PipelineConfiguration(in.readString(), in.readBytesReference(), in.readEnum(XContentType.class)); + final String id = in.readString(); + final Map config; + if (in.getTransportVersion().onOrAfter(TransportVersions.INGEST_PIPELINE_CONFIGURATION_AS_MAP)) { + config = in.readGenericMap(); + } else { + final BytesReference bytes = in.readSlicedBytesReference(); + final XContentType type = in.readEnum(XContentType.class); + config = XContentHelper.convertToMap(bytes, true, type).v2(); + } + return new PipelineConfiguration(id, config); } public static Diff readDiffFrom(StreamInput in) throws IOException { @@ -135,8 +190,14 @@ public String toString() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(id); - out.writeBytesReference(config); - XContentHelper.writeTo(out, xContentType); + if (out.getTransportVersion().onOrAfter(TransportVersions.INGEST_PIPELINE_CONFIGURATION_AS_MAP)) { + out.writeGenericMap(config); + } else { + XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent).prettyPrint(); + builder.map(config); + out.writeBytesReference(BytesReference.bytes(builder)); + XContentHelper.writeTo(out, XContentType.JSON); + } } @Override @@ -147,14 +208,14 @@ public boolean equals(Object o) { PipelineConfiguration that = (PipelineConfiguration) o; if (id.equals(that.id) == false) return false; - return getConfigAsMap().equals(that.getConfigAsMap()); + return config.equals(that.config); } @Override public int hashCode() { int result = id.hashCode(); - result = 31 * result + getConfigAsMap().hashCode(); + result = 31 * result + config.hashCode(); return result; } @@ -164,7 +225,7 @@ public int hashCode() { *

    The given upgrader is applied to the config map for any processor of the given type. */ PipelineConfiguration maybeUpgradeProcessors(String type, IngestMetadata.ProcessorConfigUpgrader upgrader) { - Map mutableConfigMap = getConfigAsMap(); + Map mutableConfigMap = getConfig(false); boolean changed = false; // This should be a List of Maps, where the keys are processor types and the values are config maps. // But we'll skip upgrading rather than fail if not. @@ -180,11 +241,7 @@ PipelineConfiguration maybeUpgradeProcessors(String type, IngestMetadata.Process } } if (changed) { - try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { - return new PipelineConfiguration(id, BytesReference.bytes(builder.map(mutableConfigMap)), xContentType); - } catch (IOException e) { - throw new UncheckedIOException(e); - } + return new PipelineConfiguration(id, mutableConfigMap); } else { return this; } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java b/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java index 4e6b2b17b2554..61284a49b2502 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java @@ -79,7 +79,7 @@ public void testXContentDeserialization() throws IOException { assertEquals(actualPipelines.size(), parsedPipelines.size()); for (PipelineConfiguration pipeline : parsedPipelines) { assertTrue(pipelinesMap.containsKey(pipeline.getId())); - assertEquals(pipelinesMap.get(pipeline.getId()).getConfigAsMap(), pipeline.getConfigAsMap()); + assertEquals(pipelinesMap.get(pipeline.getId()).getConfig(), pipeline.getConfig()); } } diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java index b62fff2eceb28..8235c66ef976b 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java @@ -56,8 +56,8 @@ public void testFromXContent() throws IOException { assertEquals(2, custom.getPipelines().size()); assertEquals("1", custom.getPipelines().get("1").getId()); assertEquals("2", custom.getPipelines().get("2").getId()); - assertEquals(pipeline.getConfigAsMap(), custom.getPipelines().get("1").getConfigAsMap()); - assertEquals(pipeline2.getConfigAsMap(), custom.getPipelines().get("2").getConfigAsMap()); + assertEquals(pipeline.getConfig(), custom.getPipelines().get("1").getConfig()); + assertEquals(pipeline2.getConfig(), custom.getPipelines().get("2").getConfig()); } } diff --git a/server/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java b/server/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java index 202c4edb2d0c8..7be6e97762ccf 100644 --- a/server/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java @@ -26,26 +26,57 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.Map; import java.util.function.Predicate; +import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; public class PipelineConfigurationTests extends AbstractXContentTestCase { + public void testConfigInvariants() { + Map original = Map.of("a", 1); + Map mutable = new HashMap<>(original); + PipelineConfiguration configuration = new PipelineConfiguration("1", mutable); + // the config is equal to the original & mutable map, regardless of how you get a reference to it + assertThat(configuration.getConfig(), equalTo(original)); + assertThat(configuration.getConfig(), equalTo(mutable)); + assertThat(configuration.getConfig(), equalTo(configuration.getConfig(false))); + assertThat(configuration.getConfig(), equalTo(configuration.getConfig(true))); + // the config is the same instance as itself when unmodifiable is true + assertThat(configuration.getConfig(), sameInstance(configuration.getConfig())); + assertThat(configuration.getConfig(), sameInstance(configuration.getConfig(true))); + // but it's not the same instance as the original mutable map, nor if unmodifiable is false + assertThat(configuration.getConfig(), not(sameInstance(mutable))); + assertThat(configuration.getConfig(), not(sameInstance(configuration.getConfig(false)))); + + // changing the mutable map doesn't alter the pipeline's configuration + mutable.put("b", 2); + assertThat(configuration.getConfig(), equalTo(original)); + + // the modifiable map can be modified + Map modifiable = configuration.getConfig(false); + modifiable.put("c", 3); // this doesn't throw an exception + assertThat(modifiable.get("c"), equalTo(3)); + // but the next modifiable copy is a new fresh copy, and doesn't reflect those changes + assertThat(configuration.getConfig(), equalTo(configuration.getConfig(false))); + } + public void testSerialization() throws IOException { PipelineConfiguration configuration = new PipelineConfiguration( "1", new BytesArray("{}".getBytes(StandardCharsets.UTF_8)), XContentType.JSON ); - assertEquals(XContentType.JSON, configuration.getXContentType()); - + assertThat(configuration.getConfig(), anEmptyMap()); BytesStreamOutput out = new BytesStreamOutput(); configuration.writeTo(out); StreamInput in = StreamInput.wrap(out.bytes().toBytesRef().bytes); PipelineConfiguration serialized = PipelineConfiguration.readFrom(in); - assertEquals(XContentType.JSON, serialized.getXContentType()); - assertEquals("{}", serialized.getConfig().utf8ToString()); + assertThat(serialized.getConfig(), anEmptyMap()); } public void testMetaSerialization() throws IOException { @@ -56,13 +87,14 @@ public void testMetaSerialization() throws IOException { new BytesArray(configJson.getBytes(StandardCharsets.UTF_8)), XContentType.JSON ); - assertEquals(XContentType.JSON, configuration.getXContentType()); BytesStreamOutput out = new BytesStreamOutput(); configuration.writeTo(out); StreamInput in = StreamInput.wrap(out.bytes().toBytesRef().bytes); PipelineConfiguration serialized = PipelineConfiguration.readFrom(in); - assertEquals(XContentType.JSON, serialized.getXContentType()); - assertEquals(configJson, serialized.getConfig().utf8ToString()); + assertEquals( + XContentHelper.convertToMap(new BytesArray(configJson.getBytes(StandardCharsets.UTF_8)), true, XContentType.JSON).v2(), + serialized.getConfig() + ); } public void testParser() throws IOException { @@ -80,9 +112,8 @@ public void testParser() throws IOException { XContentParser xContentParser = xContentType.xContent() .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, bytes.streamInput()); PipelineConfiguration parsed = parser.parse(xContentParser, null); - assertEquals(xContentType.canonical(), parsed.getXContentType()); - assertEquals("{}", XContentHelper.convertToJson(parsed.getConfig(), false, parsed.getXContentType())); - assertEquals("1", parsed.getId()); + assertThat(parsed.getId(), equalTo("1")); + assertThat(parsed.getConfig(), anEmptyMap()); } public void testGetVersion() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/InferenceProcessorInfoExtractor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/InferenceProcessorInfoExtractor.java index 83f7832645270..ad8a55a5f8443 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/InferenceProcessorInfoExtractor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/InferenceProcessorInfoExtractor.java @@ -51,7 +51,7 @@ public static int countInferenceProcessors(ClusterState state) { } Counter counter = Counter.newCounter(); ingestMetadata.getPipelines().forEach((pipelineId, configuration) -> { - Map configMap = configuration.getConfigAsMap(); + Map configMap = configuration.getConfig(); List> processorConfigs = (List>) configMap.get(PROCESSORS_KEY); for (Map processorConfigWithKey : processorConfigs) { for (Map.Entry entry : processorConfigWithKey.entrySet()) { @@ -73,7 +73,7 @@ public static Set getModelIdsFromInferenceProcessors(IngestMetadata inge Set modelIds = new LinkedHashSet<>(); ingestMetadata.getPipelines().forEach((pipelineId, configuration) -> { - Map configMap = configuration.getConfigAsMap(); + Map configMap = configuration.getConfig(); List> processorConfigs = readList(configMap, PROCESSORS_KEY); for (Map processorConfigWithKey : processorConfigs) { for (Map.Entry entry : processorConfigWithKey.entrySet()) { @@ -100,7 +100,7 @@ public static Map> pipelineIdsByResource(ClusterState state, return pipelineIdsByModelIds; } ingestMetadata.getPipelines().forEach((pipelineId, configuration) -> { - Map configMap = configuration.getConfigAsMap(); + Map configMap = configuration.getConfig(); List> processorConfigs = readList(configMap, PROCESSORS_KEY); for (Map processorConfigWithKey : processorConfigs) { for (Map.Entry entry : processorConfigWithKey.entrySet()) { @@ -131,7 +131,7 @@ public static Set pipelineIdsForResource(ClusterState state, Set return pipelineIds; } ingestMetadata.getPipelines().forEach((pipelineId, configuration) -> { - Map configMap = configuration.getConfigAsMap(); + Map configMap = configuration.getConfig(); List> processorConfigs = readList(configMap, PROCESSORS_KEY); for (Map processorConfigWithKey : processorConfigs) { for (Map.Entry entry : processorConfigWithKey.entrySet()) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java index e396712cbc360..356fac4539137 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java @@ -726,7 +726,7 @@ private static void assertPutPipelineAction( putRequest.getSource(), putRequest.getXContentType() ); - List processors = (List) pipelineConfiguration.getConfigAsMap().get("processors"); + List processors = (List) pipelineConfiguration.getConfig().get("processors"); assertThat(processors, hasSize(1)); Map setProcessor = (Map) ((Map) processors.get(0)).get("set"); assertNotNull(setProcessor.get("field")); diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyReindexPipeline.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyReindexPipeline.java index 7cddd7e037742..512955a5fe2eb 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyReindexPipeline.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyReindexPipeline.java @@ -56,7 +56,7 @@ static boolean exists(ClusterState clusterState) { if (ingestMetadata != null) { final PipelineConfiguration pipeline = ingestMetadata.getPipelines().get(pipelineName()); if (pipeline != null) { - Object version = pipeline.getConfigAsMap().get("version"); + Object version = pipeline.getConfig().get("version"); return version instanceof Number number && number.intValue() >= ENRICH_PIPELINE_LAST_UPDATED_VERSION; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java index deb645ff96133..4a9d65481d412 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java @@ -981,7 +981,7 @@ private static Set countInferenceProcessors(IngestMetadata ingestMetadat return allReferencedModelKeys; } ingestMetadata.getPipelines().forEach((pipelineId, pipelineConfiguration) -> { - Object processors = pipelineConfiguration.getConfigAsMap().get("processors"); + Object processors = pipelineConfiguration.getConfig().get("processors"); if (processors instanceof List) { for (Object processor : (List) processors) { if (processor instanceof Map) { diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java index b8c64f945db0a..654cf494e0e6f 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java @@ -56,7 +56,7 @@ public void testThatTemplatesAreDeprecated() { registry.getIngestPipelines() .stream() .map(ipc -> new PipelineConfiguration(ipc.getId(), ipc.loadConfig(), XContentType.JSON)) - .map(PipelineConfiguration::getConfigAsMap) + .map(PipelineConfiguration::getConfig) .forEach(p -> assertTrue((Boolean) p.get("deprecated"))); } diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java index 35e81f6f4c8c7..a8043f3d5e4e5 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java @@ -516,7 +516,7 @@ public void testThatTemplatesAreNotDeprecated() { registry.getIngestPipelines() .stream() .map(ipc -> new PipelineConfiguration(ipc.getId(), ipc.loadConfig(), XContentType.JSON)) - .map(PipelineConfiguration::getConfigAsMap) + .map(PipelineConfiguration::getConfig) .forEach(p -> assertFalse((Boolean) p.get("deprecated"))); } From 72c44595f4810eebba6cf2aa7bb7c3e14f1a8a95 Mon Sep 17 00:00:00 2001 From: Stef Nestor <26751266+stefnestor@users.noreply.github.com> Date: Tue, 19 Nov 2024 14:43:50 -0700 Subject: [PATCH 068/386] (Doc+) link videos for allocation and ilm (#116880) * (Doc+) link videos for allocation and ilm --------- Co-authored-by: shainaraskas <58563081+shainaraskas@users.noreply.github.com> --- docs/reference/cluster/allocation-explain.asciidoc | 6 +++++- docs/reference/ilm/error-handling.asciidoc | 5 +++-- docs/reference/snapshot-restore/repository-s3.asciidoc | 3 +++ .../common-issues/diagnose-unassigned-shards.asciidoc | 3 ++- .../common-issues/red-yellow-cluster-status.asciidoc | 6 ++++++ docs/reference/troubleshooting/diagnostic.asciidoc | 2 ++ 6 files changed, 21 insertions(+), 4 deletions(-) diff --git a/docs/reference/cluster/allocation-explain.asciidoc b/docs/reference/cluster/allocation-explain.asciidoc index bbbea192f0f86..e640fa77c71ee 100644 --- a/docs/reference/cluster/allocation-explain.asciidoc +++ b/docs/reference/cluster/allocation-explain.asciidoc @@ -159,6 +159,8 @@ node. <5> The decider which led to the `no` decision for the node. <6> An explanation as to why the decider returned a `no` decision, with a helpful hint pointing to the setting that led to the decision. In this example, a newly created index has <> that requires that it only be allocated to a node named `nonexistent_node`, which does not exist, so the index is unable to allocate. +See https://www.youtube.com/watch?v=5z3n2VgusLE[this video] for a walkthrough of troubleshooting a node and index setting mismatch. + [[maximum-number-of-retries-exceeded]] ====== Maximum number of retries exceeded @@ -235,7 +237,9 @@ primary shard that was previously allocated. ---- // NOTCONSOLE -TIP: If a shard is unassigned with an allocation status of `no_valid_shard_copy`, then you should <>. If all the nodes containing in-sync copies of a shard are lost, then you can <>. +If a shard is unassigned with an allocation status of `no_valid_shard_copy`, then you should <>. If all the nodes containing in-sync copies of a shard are lost, then you can <>. + +See https://www.youtube.com/watch?v=6OAg9IyXFO4[this video] for a walkthrough of troubleshooting `no_valid_shard_copy`. ===== Unassigned replica shard diff --git a/docs/reference/ilm/error-handling.asciidoc b/docs/reference/ilm/error-handling.asciidoc index e8df44653e9c5..911dc8b9cce40 100644 --- a/docs/reference/ilm/error-handling.asciidoc +++ b/docs/reference/ilm/error-handling.asciidoc @@ -8,8 +8,9 @@ When this happens, {ilm-init} moves the index to an `ERROR` step. If {ilm-init} cannot resolve the error automatically, execution is halted until you resolve the underlying issues with the policy, index, or cluster. -See this https://www.youtube.com/watch?v=VCIqkji3IwY[{ilm-init} health video] -for example troubleshooting walkthrough. +See https://www.youtube.com/watch?v=VCIqkji3IwY[this video] +for a walkthrough of troubleshooting current {ilm-init} health issues, and https://www.youtube.com/watch?v=onrnnwjYWSQ[this video] +for a walkthrough of troubleshooting historical {ilm-init} issues. For example, you might have a `shrink-index` policy that shrinks an index to four shards once it is at least five days old: diff --git a/docs/reference/snapshot-restore/repository-s3.asciidoc b/docs/reference/snapshot-restore/repository-s3.asciidoc index 36f311b1cdd97..1b08a802a444f 100644 --- a/docs/reference/snapshot-restore/repository-s3.asciidoc +++ b/docs/reference/snapshot-restore/repository-s3.asciidoc @@ -6,6 +6,9 @@ You can use AWS S3 as a repository for {ref}/snapshot-restore.html[Snapshot/Rest *If you are looking for a hosted solution of Elasticsearch on AWS, please visit https://www.elastic.co/cloud/.* +See https://www.youtube.com/watch?v=ACqfyzWf-xs[this video] +for a walkthrough of connecting an AWS S3 repository. + [[repository-s3-usage]] ==== Getting started diff --git a/docs/reference/troubleshooting/common-issues/diagnose-unassigned-shards.asciidoc b/docs/reference/troubleshooting/common-issues/diagnose-unassigned-shards.asciidoc index fe9422d6d4c53..e1ceefb92bbec 100644 --- a/docs/reference/troubleshooting/common-issues/diagnose-unassigned-shards.asciidoc +++ b/docs/reference/troubleshooting/common-issues/diagnose-unassigned-shards.asciidoc @@ -8,5 +8,6 @@ In order to diagnose the unassigned shards in your deployment use the following include::{es-ref-dir}/tab-widgets/troubleshooting/data/diagnose-unassigned-shards-widget.asciidoc[] - +See https://www.youtube.com/watch?v=v2mbeSd1vTQ[this video] +for a walkthrough of monitoring allocation health. diff --git a/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc b/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc index eb56a37562c31..4289242deb486 100644 --- a/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc +++ b/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc @@ -19,6 +19,8 @@ operate to have a green health status. In many cases, your cluster will recover to green health status automatically. If the cluster doesn't automatically recover, then you must <> the remaining problems so management and cleanup activities can proceed. +See https://www.youtube.com/watch?v=v2mbeSd1vTQ[this video] +for a walkthrough of monitoring allocation health. [discrete] [[diagnose-cluster-status]] @@ -90,6 +92,8 @@ PUT _cluster/settings } ---- +See https://www.youtube.com/watch?v=MiKKUdZvwnI[this video] for walkthrough of troubleshooting "no allocations are allowed". + [discrete] [[fix-cluster-status-recover-nodes]] ===== Recover lost nodes @@ -262,3 +266,5 @@ POST _cluster/reroute ---- // TEST[s/^/PUT my-index\n/] // TEST[catch:bad_request] + +See https://www.youtube.com/watch?v=6OAg9IyXFO4[this video] for a walkthrough of troubleshooting `no_valid_shard_copy`. \ No newline at end of file diff --git a/docs/reference/troubleshooting/diagnostic.asciidoc b/docs/reference/troubleshooting/diagnostic.asciidoc index a944ca88d285d..c6d46b9e94fc8 100644 --- a/docs/reference/troubleshooting/diagnostic.asciidoc +++ b/docs/reference/troubleshooting/diagnostic.asciidoc @@ -13,6 +13,8 @@ This information can be used to troubleshoot problems with your cluster. For exa You can generate diagnostic information using this tool before you contact https://support.elastic.co[Elastic Support] or https://discuss.elastic.co[Elastic Discuss] to minimize turnaround time. +See this https://www.youtube.com/watch?v=Bb6SaqhqYHw[this video] for a walkthrough of capturing an {es} diagnostic. + [discrete] [[diagnostic-tool-requirements]] === Requirements From 82d14c9710fa3048c2750cfd110f092ceb8eafdc Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 09:13:29 +1100 Subject: [PATCH 069/386] Mute org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT test {yaml=reference/esql/esql-across-clusters/line_197} #117099 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 2233f8639cfe8..4f8f834bb2c50 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -278,6 +278,9 @@ tests: - class: org.elasticsearch.discovery.ClusterDisruptionIT method: testAckedIndexing issue: https://github.com/elastic/elasticsearch/issues/117024 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/esql/esql-across-clusters/line_197} + issue: https://github.com/elastic/elasticsearch/issues/117099 # Examples: # From 6272ca2fd1c7d371848f1bb2d102841e38c7b99f Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Wed, 20 Nov 2024 11:25:05 +1100 Subject: [PATCH 070/386] Skip eager reconciliation for empty routing table (#116903) No need to start the eager reconciliation when the routing table is empty. An empty routing table means either the cluster has no shards or the state has not recovered. The eager reconciliation is not necessary in both cases. Resolves: #115885 --- .../DesiredBalanceShardsAllocator.java | 8 ++ .../DesiredBalanceShardsAllocatorTests.java | 75 +++++++++++++++++++ 2 files changed, 83 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java index bfe8a20f18043..72261df658ca1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java @@ -131,6 +131,10 @@ protected void processInput(DesiredBalanceInput desiredBalanceInput) { recordTime( cumulativeComputationTime, + // We set currentDesiredBalance back to INITIAL when the node stands down as master in onNoLongerMaster. + // However, it is possible that we revert the effect here by setting it again since the computation is async + // and does not check whether the node is master. This should have little to no practical impact. But it may + // lead to unexpected behaviours for tests. See also https://github.com/elastic/elasticsearch/pull/116904 () -> setCurrentDesiredBalance( desiredBalanceComputer.compute( getInitialDesiredBalance(), @@ -213,6 +217,10 @@ public void allocate(RoutingAllocation allocation, ActionListener listener queue.add(index, listener); desiredBalanceComputation.onNewInput(DesiredBalanceInput.create(index, allocation)); + if (allocation.routingTable().indicesRouting().isEmpty()) { + logger.debug("No eager reconciliation needed for empty routing table"); + return; + } // Starts reconciliation towards desired balance that might have not been updated with a recent calculation yet. // This is fine as balance should have incremental rather than radical changes. // This should speed up achieving the desired balance in cases current state is still different from it (due to THROTTLING). diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java index b18e2c0cd2647..9d33b697e31ca 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionTestUtils; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterInfo; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -57,6 +58,7 @@ import org.elasticsearch.test.MockLog; import org.elasticsearch.threadpool.TestThreadPool; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Queue; @@ -79,7 +81,9 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; public class DesiredBalanceShardsAllocatorTests extends ESAllocationTestCase { @@ -916,6 +920,77 @@ public void resetDesiredBalance() { } } + public void testNotReconcileEagerlyForEmptyRoutingTable() { + final var threadPool = new TestThreadPool(getTestName()); + final var clusterService = ClusterServiceUtils.createClusterService(ClusterState.EMPTY_STATE, threadPool); + final var clusterSettings = createBuiltInClusterSettings(); + final var shardsAllocator = createShardsAllocator(); + final var reconciliationTaskSubmitted = new AtomicBoolean(); + final var desiredBalanceShardsAllocator = new DesiredBalanceShardsAllocator( + shardsAllocator, + threadPool, + clusterService, + new DesiredBalanceComputer(clusterSettings, TimeProviderUtils.create(() -> 1L), shardsAllocator) { + @Override + public DesiredBalance compute( + DesiredBalance previousDesiredBalance, + DesiredBalanceInput desiredBalanceInput, + Queue> pendingDesiredBalanceMoves, + Predicate isFresh + ) { + assertThat(previousDesiredBalance, sameInstance(DesiredBalance.INITIAL)); + return new DesiredBalance(desiredBalanceInput.index(), Map.of()); + } + }, + (clusterState, rerouteStrategy) -> null, + TelemetryProvider.NOOP, + EMPTY_NODE_ALLOCATION_STATS + ) { + + private ActionListener lastListener; + + @Override + public void allocate(RoutingAllocation allocation, ActionListener listener) { + lastListener = listener; + super.allocate(allocation, listener); + } + + @Override + protected void reconcile(DesiredBalance desiredBalance, RoutingAllocation allocation) { + fail("should not call reconcile"); + } + + @Override + protected void submitReconcileTask(DesiredBalance desiredBalance) { + assertThat(desiredBalance.lastConvergedIndex(), equalTo(0L)); + reconciliationTaskSubmitted.set(true); + lastListener.onResponse(null); + } + }; + assertThat(desiredBalanceShardsAllocator.getDesiredBalance(), sameInstance(DesiredBalance.INITIAL)); + try { + final PlainActionFuture future = new PlainActionFuture<>(); + desiredBalanceShardsAllocator.allocate( + new RoutingAllocation( + new AllocationDeciders(Collections.emptyList()), + clusterService.state(), + null, + null, + randomNonNegativeLong() + ), + future + ); + safeGet(future); + assertThat(desiredBalanceShardsAllocator.getStats().computationSubmitted(), equalTo(1L)); + assertThat(desiredBalanceShardsAllocator.getStats().computationExecuted(), equalTo(1L)); + assertThat(reconciliationTaskSubmitted.get(), is(true)); + assertThat(desiredBalanceShardsAllocator.getDesiredBalance().lastConvergedIndex(), equalTo(0L)); + } finally { + clusterService.close(); + terminate(threadPool); + } + } + private static IndexMetadata createIndex(String name) { return IndexMetadata.builder(name).settings(indexSettings(IndexVersion.current(), 1, 0)).build(); } From bc785f5ca19da7f51958495908b9819fc72b0c4e Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Tue, 19 Nov 2024 17:52:24 -0800 Subject: [PATCH 071/386] Esql/lookup join grammar (#116515) First PR for adding LOOKUP JOIN in ESQL. Introduces grammar and wires the main building blocks to execute a query; follow-ups are required (see #116208 for more details). Co-authored-by: Nik Everett --- docs/changelog/116515.yaml | 5 + docs/reference/esql/esql-commands.asciidoc | 8 +- docs/reference/esql/esql-query-api.asciidoc | 4 +- ...inestats.asciidoc => inlinestats.disabled} | 0 .../{lookup.asciidoc => lookup.disabled} | 0 .../xpack/esql/ccq/MultiClusterSpecIT.java | 2 + .../xpack/esql/CsvTestsDataLoader.java | 6 +- ....csv-spec => inlinestats.csv-spec-ignored} | 0 .../src/main/resources/lookup-join.csv-spec | 48 + ...ookup.csv-spec => lookup.csv-spec-ignored} | 82 +- .../src/main/resources/union_types.csv-spec | 4 +- .../esql/src/main/antlr/EsqlBaseLexer.g4 | 34 +- .../esql/src/main/antlr/EsqlBaseLexer.tokens | 296 ++- .../esql/src/main/antlr/EsqlBaseParser.g4 | 17 + .../esql/src/main/antlr/EsqlBaseParser.tokens | 296 ++- .../xpack/esql/action/EsqlCapabilities.java | 7 +- .../xpack/esql/analysis/Analyzer.java | 103 + .../xpack/esql/analysis/PreAnalyzer.java | 15 +- .../xpack/esql/analysis/Verifier.java | 16 + .../logical/PushDownAndCombineLimits.java | 4 +- .../physical/local/InsertFieldExtraction.java | 12 +- .../xpack/esql/parser/EsqlBaseLexer.interp | 36 +- .../xpack/esql/parser/EsqlBaseLexer.java | 2321 +++++++++-------- .../xpack/esql/parser/EsqlBaseParser.interp | 24 +- .../xpack/esql/parser/EsqlBaseParser.java | 2014 ++++++++------ .../parser/EsqlBaseParserBaseListener.java | 48 + .../parser/EsqlBaseParserBaseVisitor.java | 28 + .../esql/parser/EsqlBaseParserListener.java | 40 + .../esql/parser/EsqlBaseParserVisitor.java | 24 + .../xpack/esql/parser/LogicalPlanBuilder.java | 42 +- .../xpack/esql/plan/QueryPlan.java | 9 +- .../xpack/esql/plan/logical/EsRelation.java | 7 +- .../xpack/esql/plan/logical/InlineStats.java | 4 +- .../xpack/esql/plan/logical/Lookup.java | 4 +- .../esql/plan/logical/UnresolvedRelation.java | 4 + .../xpack/esql/plan/logical/join/Join.java | 80 +- .../esql/plan/logical/join/JoinConfig.java | 11 +- .../esql/plan/logical/join/JoinType.java | 41 +- .../esql/plan/logical/join/JoinTypes.java | 155 ++ .../esql/plan/logical/join/LookupJoin.java | 103 + .../esql/plan/physical/LookupJoinExec.java | 162 ++ .../esql/planner/LocalExecutionPlanner.java | 62 +- .../esql/planner/mapper/LocalMapper.java | 40 +- .../xpack/esql/planner/mapper/Mapper.java | 21 +- .../esql/planner/mapper/MapperUtils.java | 17 +- .../xpack/esql/plugin/ComputeService.java | 5 + .../esql/plugin/TransportEsqlQueryAction.java | 1 + .../xpack/esql/session/EsqlSession.java | 6 +- .../elasticsearch/xpack/esql/CsvTests.java | 7 +- .../xpack/esql/analysis/AnalyzerTests.java | 26 +- .../xpack/esql/analysis/VerifierTests.java | 8 +- .../optimizer/LogicalPlanOptimizerTests.java | 8 +- .../optimizer/PhysicalPlanOptimizerTests.java | 22 +- .../esql/parser/StatementParserTests.java | 54 +- .../plan/logical/JoinSerializationTests.java | 3 +- .../xpack/esql/plan/logical/JoinTests.java | 6 +- .../planner/LocalExecutionPlannerTests.java | 1 + .../esql/tree/EsqlNodeSubclassTests.java | 14 +- 58 files changed, 3943 insertions(+), 2474 deletions(-) create mode 100644 docs/changelog/116515.yaml rename docs/reference/esql/processing-commands/{inlinestats.asciidoc => inlinestats.disabled} (100%) rename docs/reference/esql/processing-commands/{lookup.asciidoc => lookup.disabled} (100%) rename x-pack/plugin/esql/qa/testFixtures/src/main/resources/{inlinestats.csv-spec => inlinestats.csv-spec-ignored} (100%) create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec rename x-pack/plugin/esql/qa/testFixtures/src/main/resources/{lookup.csv-spec => lookup.csv-spec-ignored} (80%) create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinTypes.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java diff --git a/docs/changelog/116515.yaml b/docs/changelog/116515.yaml new file mode 100644 index 0000000000000..6c0d473361e52 --- /dev/null +++ b/docs/changelog/116515.yaml @@ -0,0 +1,5 @@ +pr: 116515 +summary: Esql/lookup join grammar +area: ES|QL +type: feature +issues: [] diff --git a/docs/reference/esql/esql-commands.asciidoc b/docs/reference/esql/esql-commands.asciidoc index 235113ac1394a..33e748d7eb7c1 100644 --- a/docs/reference/esql/esql-commands.asciidoc +++ b/docs/reference/esql/esql-commands.asciidoc @@ -38,12 +38,12 @@ image::images/esql/processing-command.svg[A processing command changing an input * <> * <> ifeval::["{release-state}"=="unreleased"] -* experimental:[] <> +//* experimental:[] <> endif::[] * <> * <> ifeval::["{release-state}"=="unreleased"] -* experimental:[] <> +//* experimental:[] <> endif::[] * experimental:[] <> * <> @@ -63,12 +63,12 @@ include::processing-commands/enrich.asciidoc[] include::processing-commands/eval.asciidoc[] include::processing-commands/grok.asciidoc[] ifeval::["{release-state}"=="unreleased"] -include::processing-commands/inlinestats.asciidoc[] +//include::processing-commands/inlinestats.asciidoc[] endif::[] include::processing-commands/keep.asciidoc[] include::processing-commands/limit.asciidoc[] ifeval::["{release-state}"=="unreleased"] -include::processing-commands/lookup.asciidoc[] +//include::processing-commands/lookup.asciidoc[] endif::[] include::processing-commands/mv_expand.asciidoc[] include::processing-commands/rename.asciidoc[] diff --git a/docs/reference/esql/esql-query-api.asciidoc b/docs/reference/esql/esql-query-api.asciidoc index 63b8738266132..8e07a627567df 100644 --- a/docs/reference/esql/esql-query-api.asciidoc +++ b/docs/reference/esql/esql-query-api.asciidoc @@ -92,8 +92,8 @@ https://en.wikipedia.org/wiki/Query_plan[EXPLAIN PLAN]. ifeval::["{release-state}"=="unreleased"] -`table`:: -(Optional, object) Named "table" parameters that can be referenced by the <> command. +//`table`:: +//(Optional, object) Named "table" parameters that can be referenced by the <> command. endif::[] [discrete] diff --git a/docs/reference/esql/processing-commands/inlinestats.asciidoc b/docs/reference/esql/processing-commands/inlinestats.disabled similarity index 100% rename from docs/reference/esql/processing-commands/inlinestats.asciidoc rename to docs/reference/esql/processing-commands/inlinestats.disabled diff --git a/docs/reference/esql/processing-commands/lookup.asciidoc b/docs/reference/esql/processing-commands/lookup.disabled similarity index 100% rename from docs/reference/esql/processing-commands/lookup.asciidoc rename to docs/reference/esql/processing-commands/lookup.disabled diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index 60eecbb7658b7..5df85d1004dd1 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -47,6 +47,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS_V2; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_PLANNING_V1; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.METADATA_FIELDS_REMOTE_TEST; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.SYNC; @@ -124,6 +125,7 @@ protected void shouldSkipTest(String testName) throws IOException { assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS_V2.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_PLANNING_V1.capabilityName())); + assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP.capabilityName())); } private TestFeatureService remoteFeaturesService() throws IOException { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index 478c68db68aa7..0d6659ad37a27 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -93,6 +93,8 @@ public class CsvTestsDataLoader { private static final TestsDataset BOOKS = new TestsDataset("books"); private static final TestsDataset SEMANTIC_TEXT = new TestsDataset("semantic_text").withInferenceEndpoint(true); + private static final String LOOKUP_INDEX_SUFFIX = "_lookup"; + public static final Map CSV_DATASET_MAP = Map.ofEntries( Map.entry(EMPLOYEES.indexName, EMPLOYEES), Map.entry(HOSTS.indexName, HOSTS), @@ -128,7 +130,9 @@ public class CsvTestsDataLoader { Map.entry(DISTANCES.indexName, DISTANCES), Map.entry(ADDRESSES.indexName, ADDRESSES), Map.entry(BOOKS.indexName, BOOKS), - Map.entry(SEMANTIC_TEXT.indexName, SEMANTIC_TEXT) + Map.entry(SEMANTIC_TEXT.indexName, SEMANTIC_TEXT), + // JOIN LOOKUP alias + Map.entry(LANGUAGES.indexName + LOOKUP_INDEX_SUFFIX, LANGUAGES.withIndex(LANGUAGES.indexName + LOOKUP_INDEX_SUFFIX)) ); private static final EnrichConfig LANGUAGES_ENRICH = new EnrichConfig("languages_policy", "enrich-policy-languages.json"); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec-ignored similarity index 100% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec-ignored diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec new file mode 100644 index 0000000000000..605bf78c20a32 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -0,0 +1,48 @@ +// +// CSV spec for LOOKUP JOIN command +// Reuses the sample dataset and commands from enrich.csv-spec +// + +basicOnTheDataNode +required_capability: join_lookup + +//TODO: this returns different results in CI then locally +// sometimes null, sometimes spanish (likely related to the execution order) +FROM employees +| EVAL language_code = languages +| LOOKUP JOIN languages_lookup ON language_code +| WHERE emp_no < 500 +| KEEP emp_no, language_name +| SORT emp_no +| LIMIT 1 +; + +emp_no:integer | language_name:keyword +//10091 | Spanish +; + +basicRow-Ignore +required_capability: join_lookup + +ROW language_code = 1 +| LOOKUP JOIN languages_lookup ON language_code +; + +language_code:keyword | language_name:keyword +1 | English +; + +basicOnTheCoordinator +required_capability: join_lookup + +FROM employees +| SORT emp_no +| LIMIT 1 +| EVAL language_code = languages +| LOOKUP JOIN languages_lookup ON language_code +| KEEP emp_no, language_name +; + +emp_no:integer | language_name:keyword +10001 | French +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec-ignored similarity index 80% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec-ignored index 9cf96f7c0b6de..685e3ab2778e1 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec-ignored @@ -4,7 +4,7 @@ FROM employees | SORT emp_no | LIMIT 4 | RENAME languages AS int -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int | RENAME int AS languages, name AS lang_name | KEEP emp_no, languages, lang_name ; @@ -19,7 +19,7 @@ emp_no:integer | languages:integer | lang_name:keyword keywordByMvIntAndQuotedSource required_capability: lookup_v4 ROW int=[1, 2, 3] -| LOOKUP "int_number_names" ON int +| LOOKUP_🐔 "int_number_names" ON int ; int:integer | name:keyword @@ -29,7 +29,7 @@ int:integer | name:keyword keywordByDupeIntAndTripleQuotedSource required_capability: lookup_v4 ROW int=[1, 1, 1] -| LOOKUP """int_number_names""" ON int +| LOOKUP_🐔 """int_number_names""" ON int ; int:integer | name:keyword @@ -39,10 +39,10 @@ int:integer | name:keyword intByKeyword required_capability: lookup_v4 ROW name="two" -| LOOKUP int_number_names ON name +| LOOKUP_🐔 int_number_names ON name ; -name:keyword | int:integer +name:keyword | int:integer two | 2 ; @@ -53,7 +53,7 @@ FROM employees | SORT emp_no | LIMIT 4 | RENAME languages.long AS long -| LOOKUP long_number_names ON long +| LOOKUP_🐔 long_number_names ON long | RENAME long AS languages, name AS lang_name | KEEP emp_no, languages, lang_name ; @@ -68,7 +68,7 @@ emp_no:integer | languages:long | lang_name:keyword longByKeyword required_capability: lookup_v4 ROW name="two" -| LOOKUP long_number_names ON name +| LOOKUP_🐔 long_number_names ON name ; name:keyword | long:long @@ -81,7 +81,7 @@ FROM employees | SORT emp_no | LIMIT 4 | RENAME height AS double -| LOOKUP double_number_names ON double +| LOOKUP_🐔 double_number_names ON double | RENAME double AS height, name AS height_name | KEEP emp_no, height, height_name ; @@ -96,7 +96,7 @@ emp_no:integer | height:double | height_name:keyword floatByKeyword required_capability: lookup_v4 ROW name="two point zero eight" -| LOOKUP double_number_names ON name +| LOOKUP_🐔 double_number_names ON name ; name:keyword | double:double @@ -106,7 +106,7 @@ two point zero eight | 2.08 floatByNullMissing required_capability: lookup_v4 ROW name=null -| LOOKUP double_number_names ON name +| LOOKUP_🐔 double_number_names ON name ; name:null | double:double @@ -116,7 +116,7 @@ name:null | double:double floatByNullMatching required_capability: lookup_v4 ROW name=null -| LOOKUP double_number_names_with_null ON name +| LOOKUP_🐔 double_number_names_with_null ON name ; name:null | double:double @@ -126,7 +126,7 @@ name:null | double:double intIntByKeywordKeyword required_capability: lookup_v4 ROW aa="foo", ab="zoo" -| LOOKUP big ON aa, ab +| LOOKUP_🐔 big ON aa, ab ; aa:keyword | ab:keyword | na:integer | nb:integer @@ -136,7 +136,7 @@ foo | zoo | 1 | -1 intIntByKeywordKeywordMissing required_capability: lookup_v4 ROW aa="foo", ab="zoi" -| LOOKUP big ON aa, ab +| LOOKUP_🐔 big ON aa, ab ; aa:keyword | ab:keyword | na:integer | nb:integer @@ -146,7 +146,7 @@ foo | zoi | null | null intIntByKeywordKeywordSameValues required_capability: lookup_v4 ROW aa="foo", ab="foo" -| LOOKUP big ON aa, ab +| LOOKUP_🐔 big ON aa, ab ; aa:keyword | ab:keyword | na:integer | nb:integer @@ -156,7 +156,7 @@ foo | foo | 2 | -2 intIntByKeywordKeywordSameValuesMissing required_capability: lookup_v4 ROW aa="bar", ab="bar" -| LOOKUP big ON aa, ab +| LOOKUP_🐔 big ON aa, ab ; aa:keyword | ab:keyword | na:integer | nb:integer @@ -168,7 +168,7 @@ lookupBeforeStats-Ignore required_capability: lookup_v4 FROM employees | RENAME languages AS int -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int | RENAME name AS languages | STATS height=ROUND(AVG(height), 3) BY languages | SORT height ASC; @@ -178,7 +178,7 @@ height:double | languages:keyword 1.732 | one 1.762 | two 1.764 | three - 1.809 | null + 1.809 | null 1.847 | five ; @@ -186,14 +186,14 @@ lookupAfterStats required_capability: lookup_v4 FROM employees | STATS int=TO_INT(AVG(height)) -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int | KEEP name; name:keyword two ; -// Makes sure the LOOKUP squashes previous names +// Makes sure the LOOKUP_🐔 squashes previous names doesNotDuplicateNames required_capability: lookup_v4 FROM employees @@ -201,7 +201,7 @@ FROM employees | LIMIT 4 | RENAME languages.long AS long | EVAL name = CONCAT(first_name, " ", last_name) -| LOOKUP long_number_names ON long +| LOOKUP_🐔 long_number_names ON long | RENAME long AS languages | KEEP emp_no, languages, name ; @@ -219,7 +219,7 @@ required_capability: lookup_v4 FROM employees | WHERE emp_no < 10005 | RENAME languages AS int -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int | RENAME name AS languages | KEEP languages, emp_no | SORT languages ASC, emp_no ASC @@ -238,7 +238,7 @@ FROM employees | WHERE emp_no < 10005 | SORT languages ASC, emp_no ASC | RENAME languages AS int -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int | RENAME name AS languages | KEEP languages, emp_no ; @@ -256,7 +256,7 @@ FROM employees | KEEP emp_no | WHERE emp_no == 10001 | EVAL left = "left", int = emp_no - 10000, name = "name", right = "right" -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int ; emp_no:integer | left:keyword | int:integer | right:keyword | name:keyword @@ -269,65 +269,57 @@ FROM employees | KEEP emp_no | WHERE emp_no == 10001 | EVAL left = "left", nb = -10011+emp_no, na = "na", middle = "middle", ab = "ab", aa = "bar", right = "right" -| LOOKUP big ON aa, nb +| LOOKUP_🐔 big ON aa, nb ; -emp_no:integer | left:keyword | nb:integer | middle:keyword | aa:keyword | right:keyword | ab:keyword | na:integer +emp_no:integer | left:keyword | nb:integer | middle:keyword | aa:keyword | right:keyword | ab:keyword | na:integer 10001 | left | -10 | middle | bar | right | zop | 10 ; // -// Make sure that the new LOOKUP syntax doesn't clash with any existing things -// named "lookup" +// Make sure that the new LOOKUP_🐔 syntax doesn't clash with any existing things +// named "lookup_🐔" // -rowNamedLookup -required_capability: lookup_v4 -ROW lookup = "a" -; - -lookup:keyword - a -; rowNamedLOOKUP required_capability: lookup_v4 -ROW LOOKUP = "a" +ROW lookup_🐔 = "a" ; -LOOKUP:keyword +lookup_🐔:keyword a ; evalNamedLookup required_capability: lookup_v4 -ROW a = "a" | EVAL lookup = CONCAT(a, "1") +ROW a = "a" | EVAL lookup_🐔 = CONCAT(a, "1") ; -a:keyword | lookup:keyword +a:keyword | lookup_🐔:keyword a | a1 ; dissectNamedLookup required_capability: lookup_v4 -row a = "foo bar" | dissect a "foo %{lookup}"; +row a = "foo bar" | dissect a "foo %{lookup_🐔}"; -a:keyword | lookup:keyword +a:keyword | lookup_🐔:keyword foo bar | bar ; renameIntoLookup required_capability: lookup_v4 -row a = "foo bar" | RENAME a AS lookup; +row a = "foo bar" | RENAME a AS lookup_🐔; -lookup:keyword +lookup_🐔:keyword foo bar ; sortOnLookup required_capability: lookup_v4 -ROW lookup = "a" | SORT lookup +ROW lookup_🐔 = "a" | SORT lookup_🐔 ; -lookup:keyword +lookup_🐔:keyword a ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec index 0f4c3ef6ef9e9..ad6dd4fecc3f7 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec @@ -1517,7 +1517,7 @@ FROM sample_data, sample_data_ts_long null | 172.21.0.5 | 1232382 | Disconnected | 8268153 ; -multiIndexIndirectUseOfUnionTypesInLookup +multiIndexIndirectUseOfUnionTypesInLookup-Ignore // TODO: `union_types` is required only because this makes the test skip in the csv tests; better solution: // make the csv tests work with multiple indices. required_capability: union_types @@ -1526,7 +1526,7 @@ FROM sample_data, sample_data_ts_long | SORT client_ip ASC | LIMIT 1 | EVAL int = (event_duration - 1232380)::integer -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int ; @timestamp:null | client_ip:ip | event_duration:long | message:keyword | int:integer | name:keyword diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index 6ec93d203d984..b2f0e2942d3cc 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -85,8 +85,15 @@ WHERE : 'where' -> pushMode(EXPRESSION_MODE); // main section while preserving alphabetical order: // MYCOMMAND : 'mycommand' -> ... DEV_INLINESTATS : {this.isDevVersion()}? 'inlinestats' -> pushMode(EXPRESSION_MODE); -DEV_LOOKUP : {this.isDevVersion()}? 'lookup' -> pushMode(LOOKUP_MODE); +DEV_LOOKUP : {this.isDevVersion()}? 'lookup_🐔' -> pushMode(LOOKUP_MODE); DEV_METRICS : {this.isDevVersion()}? 'metrics' -> pushMode(METRICS_MODE); +// list of all JOIN commands +DEV_JOIN : {this.isDevVersion()}? 'join' -> pushMode(JOIN_MODE); +DEV_JOIN_FULL : {this.isDevVersion()}? 'full' -> pushMode(JOIN_MODE); +DEV_JOIN_LEFT : {this.isDevVersion()}? 'left' -> pushMode(JOIN_MODE); +DEV_JOIN_RIGHT : {this.isDevVersion()}? 'right' -> pushMode(JOIN_MODE); +DEV_JOIN_LOOKUP : {this.isDevVersion()}? 'lookup' -> pushMode(JOIN_MODE); + // // Catch-all for unrecognized commands - don't define any beyond this line @@ -543,6 +550,31 @@ LOOKUP_FIELD_WS : WS -> channel(HIDDEN) ; +// +// JOIN-related commands +// +mode JOIN_MODE; +JOIN_PIPE : PIPE -> type(PIPE), popMode; +JOIN_JOIN : DEV_JOIN -> type(DEV_JOIN); +JOIN_AS : AS -> type(AS); +JOIN_ON : ON -> type(ON), popMode, pushMode(EXPRESSION_MODE); +USING : 'USING' -> popMode, pushMode(EXPRESSION_MODE); + +JOIN_UNQUOTED_IDENTIFER: UNQUOTED_IDENTIFIER -> type(UNQUOTED_IDENTIFIER); +JOIN_QUOTED_IDENTIFIER : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER); + +JOIN_LINE_COMMENT + : LINE_COMMENT -> channel(HIDDEN) + ; + +JOIN_MULTILINE_COMMENT + : MULTILINE_COMMENT -> channel(HIDDEN) + ; + +JOIN_WS + : WS -> channel(HIDDEN) + ; + // // METRICS command // diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index 3dd1a2c754038..16376d6863b2f 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -17,106 +17,115 @@ WHERE=16 DEV_INLINESTATS=17 DEV_LOOKUP=18 DEV_METRICS=19 -UNKNOWN_CMD=20 -LINE_COMMENT=21 -MULTILINE_COMMENT=22 -WS=23 -COLON=24 -PIPE=25 -QUOTED_STRING=26 -INTEGER_LITERAL=27 -DECIMAL_LITERAL=28 -BY=29 -AND=30 -ASC=31 -ASSIGN=32 -CAST_OP=33 -COMMA=34 -DESC=35 -DOT=36 -FALSE=37 -FIRST=38 -IN=39 -IS=40 -LAST=41 -LIKE=42 -LP=43 -NOT=44 -NULL=45 -NULLS=46 -OR=47 -PARAM=48 -RLIKE=49 -RP=50 -TRUE=51 -EQ=52 -CIEQ=53 -NEQ=54 -LT=55 -LTE=56 -GT=57 -GTE=58 -PLUS=59 -MINUS=60 -ASTERISK=61 -SLASH=62 -PERCENT=63 -NAMED_OR_POSITIONAL_PARAM=64 -OPENING_BRACKET=65 -CLOSING_BRACKET=66 -UNQUOTED_IDENTIFIER=67 -QUOTED_IDENTIFIER=68 -EXPR_LINE_COMMENT=69 -EXPR_MULTILINE_COMMENT=70 -EXPR_WS=71 -EXPLAIN_WS=72 -EXPLAIN_LINE_COMMENT=73 -EXPLAIN_MULTILINE_COMMENT=74 -METADATA=75 -UNQUOTED_SOURCE=76 -FROM_LINE_COMMENT=77 -FROM_MULTILINE_COMMENT=78 -FROM_WS=79 -ID_PATTERN=80 -PROJECT_LINE_COMMENT=81 -PROJECT_MULTILINE_COMMENT=82 -PROJECT_WS=83 -AS=84 -RENAME_LINE_COMMENT=85 -RENAME_MULTILINE_COMMENT=86 -RENAME_WS=87 -ON=88 -WITH=89 -ENRICH_POLICY_NAME=90 -ENRICH_LINE_COMMENT=91 -ENRICH_MULTILINE_COMMENT=92 -ENRICH_WS=93 -ENRICH_FIELD_LINE_COMMENT=94 -ENRICH_FIELD_MULTILINE_COMMENT=95 -ENRICH_FIELD_WS=96 -MVEXPAND_LINE_COMMENT=97 -MVEXPAND_MULTILINE_COMMENT=98 -MVEXPAND_WS=99 -INFO=100 -SHOW_LINE_COMMENT=101 -SHOW_MULTILINE_COMMENT=102 -SHOW_WS=103 -SETTING=104 -SETTING_LINE_COMMENT=105 -SETTTING_MULTILINE_COMMENT=106 -SETTING_WS=107 -LOOKUP_LINE_COMMENT=108 -LOOKUP_MULTILINE_COMMENT=109 -LOOKUP_WS=110 -LOOKUP_FIELD_LINE_COMMENT=111 -LOOKUP_FIELD_MULTILINE_COMMENT=112 -LOOKUP_FIELD_WS=113 -METRICS_LINE_COMMENT=114 -METRICS_MULTILINE_COMMENT=115 -METRICS_WS=116 -CLOSING_METRICS_LINE_COMMENT=117 -CLOSING_METRICS_MULTILINE_COMMENT=118 -CLOSING_METRICS_WS=119 +DEV_JOIN=20 +DEV_JOIN_FULL=21 +DEV_JOIN_LEFT=22 +DEV_JOIN_RIGHT=23 +DEV_JOIN_LOOKUP=24 +UNKNOWN_CMD=25 +LINE_COMMENT=26 +MULTILINE_COMMENT=27 +WS=28 +COLON=29 +PIPE=30 +QUOTED_STRING=31 +INTEGER_LITERAL=32 +DECIMAL_LITERAL=33 +BY=34 +AND=35 +ASC=36 +ASSIGN=37 +CAST_OP=38 +COMMA=39 +DESC=40 +DOT=41 +FALSE=42 +FIRST=43 +IN=44 +IS=45 +LAST=46 +LIKE=47 +LP=48 +NOT=49 +NULL=50 +NULLS=51 +OR=52 +PARAM=53 +RLIKE=54 +RP=55 +TRUE=56 +EQ=57 +CIEQ=58 +NEQ=59 +LT=60 +LTE=61 +GT=62 +GTE=63 +PLUS=64 +MINUS=65 +ASTERISK=66 +SLASH=67 +PERCENT=68 +NAMED_OR_POSITIONAL_PARAM=69 +OPENING_BRACKET=70 +CLOSING_BRACKET=71 +UNQUOTED_IDENTIFIER=72 +QUOTED_IDENTIFIER=73 +EXPR_LINE_COMMENT=74 +EXPR_MULTILINE_COMMENT=75 +EXPR_WS=76 +EXPLAIN_WS=77 +EXPLAIN_LINE_COMMENT=78 +EXPLAIN_MULTILINE_COMMENT=79 +METADATA=80 +UNQUOTED_SOURCE=81 +FROM_LINE_COMMENT=82 +FROM_MULTILINE_COMMENT=83 +FROM_WS=84 +ID_PATTERN=85 +PROJECT_LINE_COMMENT=86 +PROJECT_MULTILINE_COMMENT=87 +PROJECT_WS=88 +AS=89 +RENAME_LINE_COMMENT=90 +RENAME_MULTILINE_COMMENT=91 +RENAME_WS=92 +ON=93 +WITH=94 +ENRICH_POLICY_NAME=95 +ENRICH_LINE_COMMENT=96 +ENRICH_MULTILINE_COMMENT=97 +ENRICH_WS=98 +ENRICH_FIELD_LINE_COMMENT=99 +ENRICH_FIELD_MULTILINE_COMMENT=100 +ENRICH_FIELD_WS=101 +MVEXPAND_LINE_COMMENT=102 +MVEXPAND_MULTILINE_COMMENT=103 +MVEXPAND_WS=104 +INFO=105 +SHOW_LINE_COMMENT=106 +SHOW_MULTILINE_COMMENT=107 +SHOW_WS=108 +SETTING=109 +SETTING_LINE_COMMENT=110 +SETTTING_MULTILINE_COMMENT=111 +SETTING_WS=112 +LOOKUP_LINE_COMMENT=113 +LOOKUP_MULTILINE_COMMENT=114 +LOOKUP_WS=115 +LOOKUP_FIELD_LINE_COMMENT=116 +LOOKUP_FIELD_MULTILINE_COMMENT=117 +LOOKUP_FIELD_WS=118 +USING=119 +JOIN_LINE_COMMENT=120 +JOIN_MULTILINE_COMMENT=121 +JOIN_WS=122 +METRICS_LINE_COMMENT=123 +METRICS_MULTILINE_COMMENT=124 +METRICS_WS=125 +CLOSING_METRICS_LINE_COMMENT=126 +CLOSING_METRICS_MULTILINE_COMMENT=127 +CLOSING_METRICS_WS=128 'dissect'=1 'drop'=2 'enrich'=3 @@ -133,46 +142,47 @@ CLOSING_METRICS_WS=119 'sort'=14 'stats'=15 'where'=16 -':'=24 -'|'=25 -'by'=29 -'and'=30 -'asc'=31 -'='=32 -'::'=33 -','=34 -'desc'=35 -'.'=36 -'false'=37 -'first'=38 -'in'=39 -'is'=40 -'last'=41 -'like'=42 -'('=43 -'not'=44 -'null'=45 -'nulls'=46 -'or'=47 -'?'=48 -'rlike'=49 -')'=50 -'true'=51 -'=='=52 -'=~'=53 -'!='=54 -'<'=55 -'<='=56 -'>'=57 -'>='=58 -'+'=59 -'-'=60 -'*'=61 -'/'=62 -'%'=63 -']'=66 -'metadata'=75 -'as'=84 -'on'=88 -'with'=89 -'info'=100 +':'=29 +'|'=30 +'by'=34 +'and'=35 +'asc'=36 +'='=37 +'::'=38 +','=39 +'desc'=40 +'.'=41 +'false'=42 +'first'=43 +'in'=44 +'is'=45 +'last'=46 +'like'=47 +'('=48 +'not'=49 +'null'=50 +'nulls'=51 +'or'=52 +'?'=53 +'rlike'=54 +')'=55 +'true'=56 +'=='=57 +'=~'=58 +'!='=59 +'<'=60 +'<='=61 +'>'=62 +'>='=63 +'+'=64 +'-'=65 +'*'=66 +'/'=67 +'%'=68 +']'=71 +'metadata'=80 +'as'=89 +'on'=93 +'with'=94 +'info'=105 +'USING'=119 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index 67f194a1bff64..33c4d8957d387 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -54,6 +54,7 @@ processingCommand // in development | {this.isDevVersion()}? inlinestatsCommand | {this.isDevVersion()}? lookupCommand + | {this.isDevVersion()}? joinCommand ; whereCommand @@ -322,3 +323,19 @@ lookupCommand inlinestatsCommand : DEV_INLINESTATS stats=aggFields (BY grouping=fields)? ; + +joinCommand + : type=(DEV_JOIN_LOOKUP | DEV_JOIN_LEFT | DEV_JOIN_RIGHT)? DEV_JOIN joinTarget joinCondition + ; + +joinTarget + : index=identifier (AS alias=identifier)? + ; + +joinCondition + : ON joinPredicate (COMMA joinPredicate)* + ; + +joinPredicate + : valueExpression + ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index 3dd1a2c754038..16376d6863b2f 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -17,106 +17,115 @@ WHERE=16 DEV_INLINESTATS=17 DEV_LOOKUP=18 DEV_METRICS=19 -UNKNOWN_CMD=20 -LINE_COMMENT=21 -MULTILINE_COMMENT=22 -WS=23 -COLON=24 -PIPE=25 -QUOTED_STRING=26 -INTEGER_LITERAL=27 -DECIMAL_LITERAL=28 -BY=29 -AND=30 -ASC=31 -ASSIGN=32 -CAST_OP=33 -COMMA=34 -DESC=35 -DOT=36 -FALSE=37 -FIRST=38 -IN=39 -IS=40 -LAST=41 -LIKE=42 -LP=43 -NOT=44 -NULL=45 -NULLS=46 -OR=47 -PARAM=48 -RLIKE=49 -RP=50 -TRUE=51 -EQ=52 -CIEQ=53 -NEQ=54 -LT=55 -LTE=56 -GT=57 -GTE=58 -PLUS=59 -MINUS=60 -ASTERISK=61 -SLASH=62 -PERCENT=63 -NAMED_OR_POSITIONAL_PARAM=64 -OPENING_BRACKET=65 -CLOSING_BRACKET=66 -UNQUOTED_IDENTIFIER=67 -QUOTED_IDENTIFIER=68 -EXPR_LINE_COMMENT=69 -EXPR_MULTILINE_COMMENT=70 -EXPR_WS=71 -EXPLAIN_WS=72 -EXPLAIN_LINE_COMMENT=73 -EXPLAIN_MULTILINE_COMMENT=74 -METADATA=75 -UNQUOTED_SOURCE=76 -FROM_LINE_COMMENT=77 -FROM_MULTILINE_COMMENT=78 -FROM_WS=79 -ID_PATTERN=80 -PROJECT_LINE_COMMENT=81 -PROJECT_MULTILINE_COMMENT=82 -PROJECT_WS=83 -AS=84 -RENAME_LINE_COMMENT=85 -RENAME_MULTILINE_COMMENT=86 -RENAME_WS=87 -ON=88 -WITH=89 -ENRICH_POLICY_NAME=90 -ENRICH_LINE_COMMENT=91 -ENRICH_MULTILINE_COMMENT=92 -ENRICH_WS=93 -ENRICH_FIELD_LINE_COMMENT=94 -ENRICH_FIELD_MULTILINE_COMMENT=95 -ENRICH_FIELD_WS=96 -MVEXPAND_LINE_COMMENT=97 -MVEXPAND_MULTILINE_COMMENT=98 -MVEXPAND_WS=99 -INFO=100 -SHOW_LINE_COMMENT=101 -SHOW_MULTILINE_COMMENT=102 -SHOW_WS=103 -SETTING=104 -SETTING_LINE_COMMENT=105 -SETTTING_MULTILINE_COMMENT=106 -SETTING_WS=107 -LOOKUP_LINE_COMMENT=108 -LOOKUP_MULTILINE_COMMENT=109 -LOOKUP_WS=110 -LOOKUP_FIELD_LINE_COMMENT=111 -LOOKUP_FIELD_MULTILINE_COMMENT=112 -LOOKUP_FIELD_WS=113 -METRICS_LINE_COMMENT=114 -METRICS_MULTILINE_COMMENT=115 -METRICS_WS=116 -CLOSING_METRICS_LINE_COMMENT=117 -CLOSING_METRICS_MULTILINE_COMMENT=118 -CLOSING_METRICS_WS=119 +DEV_JOIN=20 +DEV_JOIN_FULL=21 +DEV_JOIN_LEFT=22 +DEV_JOIN_RIGHT=23 +DEV_JOIN_LOOKUP=24 +UNKNOWN_CMD=25 +LINE_COMMENT=26 +MULTILINE_COMMENT=27 +WS=28 +COLON=29 +PIPE=30 +QUOTED_STRING=31 +INTEGER_LITERAL=32 +DECIMAL_LITERAL=33 +BY=34 +AND=35 +ASC=36 +ASSIGN=37 +CAST_OP=38 +COMMA=39 +DESC=40 +DOT=41 +FALSE=42 +FIRST=43 +IN=44 +IS=45 +LAST=46 +LIKE=47 +LP=48 +NOT=49 +NULL=50 +NULLS=51 +OR=52 +PARAM=53 +RLIKE=54 +RP=55 +TRUE=56 +EQ=57 +CIEQ=58 +NEQ=59 +LT=60 +LTE=61 +GT=62 +GTE=63 +PLUS=64 +MINUS=65 +ASTERISK=66 +SLASH=67 +PERCENT=68 +NAMED_OR_POSITIONAL_PARAM=69 +OPENING_BRACKET=70 +CLOSING_BRACKET=71 +UNQUOTED_IDENTIFIER=72 +QUOTED_IDENTIFIER=73 +EXPR_LINE_COMMENT=74 +EXPR_MULTILINE_COMMENT=75 +EXPR_WS=76 +EXPLAIN_WS=77 +EXPLAIN_LINE_COMMENT=78 +EXPLAIN_MULTILINE_COMMENT=79 +METADATA=80 +UNQUOTED_SOURCE=81 +FROM_LINE_COMMENT=82 +FROM_MULTILINE_COMMENT=83 +FROM_WS=84 +ID_PATTERN=85 +PROJECT_LINE_COMMENT=86 +PROJECT_MULTILINE_COMMENT=87 +PROJECT_WS=88 +AS=89 +RENAME_LINE_COMMENT=90 +RENAME_MULTILINE_COMMENT=91 +RENAME_WS=92 +ON=93 +WITH=94 +ENRICH_POLICY_NAME=95 +ENRICH_LINE_COMMENT=96 +ENRICH_MULTILINE_COMMENT=97 +ENRICH_WS=98 +ENRICH_FIELD_LINE_COMMENT=99 +ENRICH_FIELD_MULTILINE_COMMENT=100 +ENRICH_FIELD_WS=101 +MVEXPAND_LINE_COMMENT=102 +MVEXPAND_MULTILINE_COMMENT=103 +MVEXPAND_WS=104 +INFO=105 +SHOW_LINE_COMMENT=106 +SHOW_MULTILINE_COMMENT=107 +SHOW_WS=108 +SETTING=109 +SETTING_LINE_COMMENT=110 +SETTTING_MULTILINE_COMMENT=111 +SETTING_WS=112 +LOOKUP_LINE_COMMENT=113 +LOOKUP_MULTILINE_COMMENT=114 +LOOKUP_WS=115 +LOOKUP_FIELD_LINE_COMMENT=116 +LOOKUP_FIELD_MULTILINE_COMMENT=117 +LOOKUP_FIELD_WS=118 +USING=119 +JOIN_LINE_COMMENT=120 +JOIN_MULTILINE_COMMENT=121 +JOIN_WS=122 +METRICS_LINE_COMMENT=123 +METRICS_MULTILINE_COMMENT=124 +METRICS_WS=125 +CLOSING_METRICS_LINE_COMMENT=126 +CLOSING_METRICS_MULTILINE_COMMENT=127 +CLOSING_METRICS_WS=128 'dissect'=1 'drop'=2 'enrich'=3 @@ -133,46 +142,47 @@ CLOSING_METRICS_WS=119 'sort'=14 'stats'=15 'where'=16 -':'=24 -'|'=25 -'by'=29 -'and'=30 -'asc'=31 -'='=32 -'::'=33 -','=34 -'desc'=35 -'.'=36 -'false'=37 -'first'=38 -'in'=39 -'is'=40 -'last'=41 -'like'=42 -'('=43 -'not'=44 -'null'=45 -'nulls'=46 -'or'=47 -'?'=48 -'rlike'=49 -')'=50 -'true'=51 -'=='=52 -'=~'=53 -'!='=54 -'<'=55 -'<='=56 -'>'=57 -'>='=58 -'+'=59 -'-'=60 -'*'=61 -'/'=62 -'%'=63 -']'=66 -'metadata'=75 -'as'=84 -'on'=88 -'with'=89 -'info'=100 +':'=29 +'|'=30 +'by'=34 +'and'=35 +'asc'=36 +'='=37 +'::'=38 +','=39 +'desc'=40 +'.'=41 +'false'=42 +'first'=43 +'in'=44 +'is'=45 +'last'=46 +'like'=47 +'('=48 +'not'=49 +'null'=50 +'nulls'=51 +'or'=52 +'?'=53 +'rlike'=54 +')'=55 +'true'=56 +'=='=57 +'=~'=58 +'!='=59 +'<'=60 +'<='=61 +'>'=62 +'>='=63 +'+'=64 +'-'=65 +'*'=66 +'/'=67 +'%'=68 +']'=71 +'metadata'=80 +'as'=89 +'on'=93 +'with'=94 +'info'=105 +'USING'=119 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 9532e3dc77cb4..2a62216072e9c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -494,7 +494,12 @@ public enum Cap { /** * Support implicit casting from string literal to DATE_PERIOD or TIME_DURATION. */ - IMPLICIT_CASTING_STRING_LITERAL_TO_TEMPORAL_AMOUNT; + IMPLICIT_CASTING_STRING_LITERAL_TO_TEMPORAL_AMOUNT, + + /** + * LOOKUP JOIN + */ + JOIN_LOOKUP(Build.current().isSnapshot()); private final boolean enabled; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 562d42a94483f..7ad4c3d3e644d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.logging.Logger; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.Column; @@ -20,6 +21,7 @@ import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.EmptyAttribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; @@ -75,6 +77,12 @@ import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.Rename; import org.elasticsearch.xpack.esql.plan.logical.UnresolvedRelation; +import org.elasticsearch.xpack.esql.plan.logical.join.Join; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.UsingJoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.LookupJoin; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; @@ -98,12 +106,14 @@ import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; +import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.core.enrich.EnrichPolicy.GEO_MATCH_TYPE; @@ -189,6 +199,9 @@ private static class ResolveTable extends ParameterizedAnalyzerRule maybeResolveAttribute(ua, childrenOutput)); } @@ -588,6 +620,77 @@ private LogicalPlan resolveLookup(Lookup l, List childrenOutput) { return l; } + private Join resolveLookupJoin(LookupJoin join) { + JoinConfig config = join.config(); + // for now, support only (LEFT) USING clauses + JoinType type = config.type(); + // rewrite the join into a equi-join between the field with the same name between left and right + // per SQL standard, the USING columns are placed first in the output, followed by the rest of left, then right + if (type instanceof UsingJoinType using) { + List cols = using.columns(); + // the lookup cannot be resolved, bail out + if (Expressions.anyMatch(cols, c -> c instanceof UnresolvedAttribute ua && ua.customMessage())) { + return join; + } + + JoinType coreJoin = using.coreJoin(); + // verify the join type + if (coreJoin != JoinTypes.LEFT) { + String name = cols.get(0).name(); + UnresolvedAttribute errorAttribute = new UnresolvedAttribute( + join.source(), + name, + "Only LEFT join is supported with USING" + ); + return join.withConfig(new JoinConfig(type, singletonList(errorAttribute), emptyList(), emptyList())); + } + // resolve the using columns against the left and the right side then assemble the new join config + List leftKeys = resolveUsingColumns(cols, join.left().output(), "left"); + List rightKeys = resolveUsingColumns(cols, join.right().output(), "right"); + List output = new ArrayList<>(join.left().output()); + // the order is stable (since the AttributeSet preservers the insertion order) + output.addAll(join.right().outputSet().subtract(new AttributeSet(rightKeys))); + + // update the config - pick the left keys as those in the output + type = new UsingJoinType(coreJoin, rightKeys); + config = new JoinConfig(type, leftKeys, leftKeys, rightKeys); + join = new LookupJoin(join.source(), join.left(), join.right(), config, output); + } + // everything else is unsupported for now + else { + UnresolvedAttribute errorAttribute = new UnresolvedAttribute(join.source(), "unsupported", "Unsupported join type"); + // add error message + return join.withConfig(new JoinConfig(type, singletonList(errorAttribute), emptyList(), emptyList())); + } + return join; + } + + private List resolveUsingColumns(List cols, List output, String side) { + List resolved = new ArrayList<>(cols.size()); + for (Attribute col : cols) { + if (col instanceof UnresolvedAttribute ua) { + Attribute resolvedCol = maybeResolveAttribute(ua, output); + if (resolvedCol instanceof UnresolvedAttribute ucol) { + String message = ua.unresolvedMessage(); + String match = "column [" + ucol.name() + "]"; + resolvedCol = ucol.withUnresolvedMessage(message.replace(match, match + "in " + side + " side of join")); + } + resolved.add(resolvedCol); + } + // columns are expected to be unresolved - if that's not the case return an error + else { + return singletonList( + new UnresolvedAttribute( + col.source(), + col.name(), + "Surprised to discover column [ " + col.name() + "] already resolved" + ) + ); + } + } + return resolved; + } + private Attribute maybeResolveAttribute(UnresolvedAttribute ua, List childrenOutput) { return maybeResolveAttribute(ua, childrenOutput, log); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java index 9d7c5e141a2b1..460d30618df79 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.analysis; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.UnresolvedRelation; @@ -22,14 +23,16 @@ public class PreAnalyzer { public static class PreAnalysis { - public static final PreAnalysis EMPTY = new PreAnalysis(emptyList(), emptyList()); + public static final PreAnalysis EMPTY = new PreAnalysis(emptyList(), emptyList(), emptyList()); public final List indices; public final List enriches; + public final List lookupIndices; - public PreAnalysis(List indices, List enriches) { + public PreAnalysis(List indices, List enriches, List lookupIndices) { this.indices = indices; this.enriches = enriches; + this.lookupIndices = lookupIndices; } } @@ -44,13 +47,17 @@ public PreAnalysis preAnalyze(LogicalPlan plan) { protected PreAnalysis doPreAnalyze(LogicalPlan plan) { List indices = new ArrayList<>(); List unresolvedEnriches = new ArrayList<>(); + List lookupIndices = new ArrayList<>(); - plan.forEachUp(UnresolvedRelation.class, p -> indices.add(new TableInfo(p.table()))); + plan.forEachUp(UnresolvedRelation.class, p -> { + List list = p.indexMode() == IndexMode.LOOKUP ? lookupIndices : indices; + list.add(new TableInfo(p.table())); + }); plan.forEachUp(Enrich.class, unresolvedEnriches::add); // mark plan as preAnalyzed (if it were marked, there would be no analysis) plan.forEachUp(LogicalPlan::setPreAnalyzed); - return new PreAnalysis(indices, unresolvedEnriches); + return new PreAnalysis(indices, unresolvedEnriches, lookupIndices); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index 0641a03c88b69..54661fa42ccbe 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.analysis; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.common.Failure; @@ -53,6 +54,7 @@ import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; +import org.elasticsearch.xpack.esql.plan.logical.join.LookupJoin; import org.elasticsearch.xpack.esql.stats.FeatureMetric; import org.elasticsearch.xpack.esql.stats.Metrics; @@ -171,6 +173,20 @@ else if (p instanceof Lookup lookup) { else { lookup.matchFields().forEach(unresolvedExpressions); } + } else if (p instanceof LookupJoin lj) { + // expect right side to always be a lookup index + lj.right().forEachUp(EsRelation.class, r -> { + if (r.indexMode() != IndexMode.LOOKUP) { + failures.add( + fail( + r, + "LOOKUP JOIN right side [{}] must be a lookup index (index_mode=lookup, not [{}]", + r.index().name(), + r.indexMode().getName() + ) + ); + } + }); } else { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java index 153efa5b5c233..fb9d3f7e2f91e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java @@ -18,7 +18,7 @@ import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.Join; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; public final class PushDownAndCombineLimits extends OptimizerRules.OptimizerRule { @@ -63,7 +63,7 @@ public LogicalPlan rule(Limit limit) { } } } else if (limit.child() instanceof Join join) { - if (join.config().type() == JoinType.LEFT && join.right() instanceof LocalRelation) { + if (join.config().type() == JoinTypes.LEFT && join.right() instanceof LocalRelation) { // This is a hash join from something like a lookup. return join.replaceChildren(limit.replaceChild(join.left()), join.right()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java index 1c20f765c6d51..ea9cd76bcb9bc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java @@ -17,10 +17,12 @@ import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; import org.elasticsearch.xpack.esql.plan.physical.LeafExec; +import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.rule.Rule; import java.util.ArrayList; +import java.util.Collections; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; @@ -93,8 +95,16 @@ public PhysicalPlan apply(PhysicalPlan plan) { private static Set missingAttributes(PhysicalPlan p) { var missing = new LinkedHashSet(); - var input = p.inputSet(); + var inputSet = p.inputSet(); + // FIXME: the extractors should work on the right side as well + // skip the lookup join since the right side is always materialized and a projection + if (p instanceof LookupJoinExec join) { + // collect fields used in the join condition + return Collections.emptySet(); + } + + var input = inputSet; // collect field attributes used inside expressions p.forEachExpression(TypedAttribute.class, f -> { if (f instanceof FieldAttribute || f instanceof MetadataAttribute) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 8f9c5956dddd5..8b8bab2edbc41 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -23,6 +23,11 @@ null null null null +null +null +null +null +null ':' '|' null @@ -113,6 +118,10 @@ null null null null +'USING' +null +null +null null null null @@ -141,6 +150,11 @@ WHERE DEV_INLINESTATS DEV_LOOKUP DEV_METRICS +DEV_JOIN +DEV_JOIN_FULL +DEV_JOIN_LEFT +DEV_JOIN_RIGHT +DEV_JOIN_LOOKUP UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT @@ -235,6 +249,10 @@ LOOKUP_WS LOOKUP_FIELD_LINE_COMMENT LOOKUP_FIELD_MULTILINE_COMMENT LOOKUP_FIELD_WS +USING +JOIN_LINE_COMMENT +JOIN_MULTILINE_COMMENT +JOIN_WS METRICS_LINE_COMMENT METRICS_MULTILINE_COMMENT METRICS_WS @@ -262,6 +280,11 @@ WHERE DEV_INLINESTATS DEV_LOOKUP DEV_METRICS +DEV_JOIN +DEV_JOIN_FULL +DEV_JOIN_LEFT +DEV_JOIN_RIGHT +DEV_JOIN_LOOKUP UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT @@ -427,6 +450,16 @@ LOOKUP_FIELD_ID_PATTERN LOOKUP_FIELD_LINE_COMMENT LOOKUP_FIELD_MULTILINE_COMMENT LOOKUP_FIELD_WS +JOIN_PIPE +JOIN_JOIN +JOIN_AS +JOIN_ON +USING +JOIN_UNQUOTED_IDENTIFER +JOIN_QUOTED_IDENTIFIER +JOIN_LINE_COMMENT +JOIN_MULTILINE_COMMENT +JOIN_WS METRICS_PIPE METRICS_UNQUOTED_SOURCE METRICS_QUOTED_SOURCE @@ -461,8 +494,9 @@ SHOW_MODE SETTING_MODE LOOKUP_MODE LOOKUP_FIELD_MODE +JOIN_MODE METRICS_MODE CLOSING_METRICS_MODE atn: -[4, 0, 119, 1484, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 4, 19, 580, 8, 19, 11, 19, 12, 19, 581, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 590, 8, 20, 10, 20, 12, 20, 593, 9, 20, 1, 20, 3, 20, 596, 8, 20, 1, 20, 3, 20, 599, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 608, 8, 21, 10, 21, 12, 21, 611, 9, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 4, 22, 619, 8, 22, 11, 22, 12, 22, 620, 1, 22, 1, 22, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 3, 29, 642, 8, 29, 1, 29, 4, 29, 645, 8, 29, 11, 29, 12, 29, 646, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 3, 32, 656, 8, 32, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 3, 34, 663, 8, 34, 1, 35, 1, 35, 1, 35, 5, 35, 668, 8, 35, 10, 35, 12, 35, 671, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 679, 8, 35, 10, 35, 12, 35, 682, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 3, 35, 689, 8, 35, 1, 35, 3, 35, 692, 8, 35, 3, 35, 694, 8, 35, 1, 36, 4, 36, 697, 8, 36, 11, 36, 12, 36, 698, 1, 37, 4, 37, 702, 8, 37, 11, 37, 12, 37, 703, 1, 37, 1, 37, 5, 37, 708, 8, 37, 10, 37, 12, 37, 711, 9, 37, 1, 37, 1, 37, 4, 37, 715, 8, 37, 11, 37, 12, 37, 716, 1, 37, 4, 37, 720, 8, 37, 11, 37, 12, 37, 721, 1, 37, 1, 37, 5, 37, 726, 8, 37, 10, 37, 12, 37, 729, 9, 37, 3, 37, 731, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 4, 37, 737, 8, 37, 11, 37, 12, 37, 738, 1, 37, 1, 37, 3, 37, 743, 8, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 3, 75, 874, 8, 75, 1, 75, 5, 75, 877, 8, 75, 10, 75, 12, 75, 880, 9, 75, 1, 75, 1, 75, 4, 75, 884, 8, 75, 11, 75, 12, 75, 885, 3, 75, 888, 8, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 5, 78, 902, 8, 78, 10, 78, 12, 78, 905, 9, 78, 1, 78, 1, 78, 3, 78, 909, 8, 78, 1, 78, 4, 78, 912, 8, 78, 11, 78, 12, 78, 913, 3, 78, 916, 8, 78, 1, 79, 1, 79, 4, 79, 920, 8, 79, 11, 79, 12, 79, 921, 1, 79, 1, 79, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 3, 96, 999, 8, 96, 1, 97, 4, 97, 1002, 8, 97, 11, 97, 12, 97, 1003, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 3, 108, 1053, 8, 108, 1, 109, 1, 109, 3, 109, 1057, 8, 109, 1, 109, 5, 109, 1060, 8, 109, 10, 109, 12, 109, 1063, 9, 109, 1, 109, 1, 109, 3, 109, 1067, 8, 109, 1, 109, 4, 109, 1070, 8, 109, 11, 109, 12, 109, 1071, 3, 109, 1074, 8, 109, 1, 110, 1, 110, 4, 110, 1078, 8, 110, 11, 110, 12, 110, 1079, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 130, 4, 130, 1165, 8, 130, 11, 130, 12, 130, 1166, 1, 130, 1, 130, 3, 130, 1171, 8, 130, 1, 130, 4, 130, 1174, 8, 130, 11, 130, 12, 130, 1175, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 4, 163, 1321, 8, 163, 11, 163, 12, 163, 1322, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 198, 2, 609, 680, 0, 199, 15, 1, 17, 2, 19, 3, 21, 4, 23, 5, 25, 6, 27, 7, 29, 8, 31, 9, 33, 10, 35, 11, 37, 12, 39, 13, 41, 14, 43, 15, 45, 16, 47, 17, 49, 18, 51, 19, 53, 20, 55, 21, 57, 22, 59, 23, 61, 24, 63, 25, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 0, 85, 26, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 0, 163, 0, 165, 64, 167, 65, 169, 66, 171, 67, 173, 0, 175, 68, 177, 69, 179, 70, 181, 71, 183, 0, 185, 0, 187, 72, 189, 73, 191, 74, 193, 0, 195, 0, 197, 0, 199, 0, 201, 0, 203, 0, 205, 75, 207, 0, 209, 76, 211, 0, 213, 0, 215, 77, 217, 78, 219, 79, 221, 0, 223, 0, 225, 0, 227, 0, 229, 0, 231, 0, 233, 0, 235, 80, 237, 81, 239, 82, 241, 83, 243, 0, 245, 0, 247, 0, 249, 0, 251, 0, 253, 0, 255, 84, 257, 0, 259, 85, 261, 86, 263, 87, 265, 0, 267, 0, 269, 88, 271, 89, 273, 0, 275, 90, 277, 0, 279, 91, 281, 92, 283, 93, 285, 0, 287, 0, 289, 0, 291, 0, 293, 0, 295, 0, 297, 0, 299, 0, 301, 0, 303, 94, 305, 95, 307, 96, 309, 0, 311, 0, 313, 0, 315, 0, 317, 0, 319, 0, 321, 97, 323, 98, 325, 99, 327, 0, 329, 100, 331, 101, 333, 102, 335, 103, 337, 0, 339, 0, 341, 104, 343, 105, 345, 106, 347, 107, 349, 0, 351, 0, 353, 0, 355, 0, 357, 0, 359, 0, 361, 0, 363, 108, 365, 109, 367, 110, 369, 0, 371, 0, 373, 0, 375, 0, 377, 111, 379, 112, 381, 113, 383, 0, 385, 0, 387, 0, 389, 114, 391, 115, 393, 116, 395, 0, 397, 0, 399, 117, 401, 118, 403, 119, 405, 0, 407, 0, 409, 0, 411, 0, 15, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 35, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1512, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 1, 63, 1, 0, 0, 0, 1, 85, 1, 0, 0, 0, 1, 87, 1, 0, 0, 0, 1, 89, 1, 0, 0, 0, 1, 91, 1, 0, 0, 0, 1, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 1, 97, 1, 0, 0, 0, 1, 99, 1, 0, 0, 0, 1, 101, 1, 0, 0, 0, 1, 103, 1, 0, 0, 0, 1, 105, 1, 0, 0, 0, 1, 107, 1, 0, 0, 0, 1, 109, 1, 0, 0, 0, 1, 111, 1, 0, 0, 0, 1, 113, 1, 0, 0, 0, 1, 115, 1, 0, 0, 0, 1, 117, 1, 0, 0, 0, 1, 119, 1, 0, 0, 0, 1, 121, 1, 0, 0, 0, 1, 123, 1, 0, 0, 0, 1, 125, 1, 0, 0, 0, 1, 127, 1, 0, 0, 0, 1, 129, 1, 0, 0, 0, 1, 131, 1, 0, 0, 0, 1, 133, 1, 0, 0, 0, 1, 135, 1, 0, 0, 0, 1, 137, 1, 0, 0, 0, 1, 139, 1, 0, 0, 0, 1, 141, 1, 0, 0, 0, 1, 143, 1, 0, 0, 0, 1, 145, 1, 0, 0, 0, 1, 147, 1, 0, 0, 0, 1, 149, 1, 0, 0, 0, 1, 151, 1, 0, 0, 0, 1, 153, 1, 0, 0, 0, 1, 155, 1, 0, 0, 0, 1, 157, 1, 0, 0, 0, 1, 159, 1, 0, 0, 0, 1, 161, 1, 0, 0, 0, 1, 163, 1, 0, 0, 0, 1, 165, 1, 0, 0, 0, 1, 167, 1, 0, 0, 0, 1, 169, 1, 0, 0, 0, 1, 171, 1, 0, 0, 0, 1, 175, 1, 0, 0, 0, 1, 177, 1, 0, 0, 0, 1, 179, 1, 0, 0, 0, 1, 181, 1, 0, 0, 0, 2, 183, 1, 0, 0, 0, 2, 185, 1, 0, 0, 0, 2, 187, 1, 0, 0, 0, 2, 189, 1, 0, 0, 0, 2, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 3, 197, 1, 0, 0, 0, 3, 199, 1, 0, 0, 0, 3, 201, 1, 0, 0, 0, 3, 203, 1, 0, 0, 0, 3, 205, 1, 0, 0, 0, 3, 209, 1, 0, 0, 0, 3, 211, 1, 0, 0, 0, 3, 213, 1, 0, 0, 0, 3, 215, 1, 0, 0, 0, 3, 217, 1, 0, 0, 0, 3, 219, 1, 0, 0, 0, 4, 221, 1, 0, 0, 0, 4, 223, 1, 0, 0, 0, 4, 225, 1, 0, 0, 0, 4, 227, 1, 0, 0, 0, 4, 229, 1, 0, 0, 0, 4, 235, 1, 0, 0, 0, 4, 237, 1, 0, 0, 0, 4, 239, 1, 0, 0, 0, 4, 241, 1, 0, 0, 0, 5, 243, 1, 0, 0, 0, 5, 245, 1, 0, 0, 0, 5, 247, 1, 0, 0, 0, 5, 249, 1, 0, 0, 0, 5, 251, 1, 0, 0, 0, 5, 253, 1, 0, 0, 0, 5, 255, 1, 0, 0, 0, 5, 257, 1, 0, 0, 0, 5, 259, 1, 0, 0, 0, 5, 261, 1, 0, 0, 0, 5, 263, 1, 0, 0, 0, 6, 265, 1, 0, 0, 0, 6, 267, 1, 0, 0, 0, 6, 269, 1, 0, 0, 0, 6, 271, 1, 0, 0, 0, 6, 275, 1, 0, 0, 0, 6, 277, 1, 0, 0, 0, 6, 279, 1, 0, 0, 0, 6, 281, 1, 0, 0, 0, 6, 283, 1, 0, 0, 0, 7, 285, 1, 0, 0, 0, 7, 287, 1, 0, 0, 0, 7, 289, 1, 0, 0, 0, 7, 291, 1, 0, 0, 0, 7, 293, 1, 0, 0, 0, 7, 295, 1, 0, 0, 0, 7, 297, 1, 0, 0, 0, 7, 299, 1, 0, 0, 0, 7, 301, 1, 0, 0, 0, 7, 303, 1, 0, 0, 0, 7, 305, 1, 0, 0, 0, 7, 307, 1, 0, 0, 0, 8, 309, 1, 0, 0, 0, 8, 311, 1, 0, 0, 0, 8, 313, 1, 0, 0, 0, 8, 315, 1, 0, 0, 0, 8, 317, 1, 0, 0, 0, 8, 319, 1, 0, 0, 0, 8, 321, 1, 0, 0, 0, 8, 323, 1, 0, 0, 0, 8, 325, 1, 0, 0, 0, 9, 327, 1, 0, 0, 0, 9, 329, 1, 0, 0, 0, 9, 331, 1, 0, 0, 0, 9, 333, 1, 0, 0, 0, 9, 335, 1, 0, 0, 0, 10, 337, 1, 0, 0, 0, 10, 339, 1, 0, 0, 0, 10, 341, 1, 0, 0, 0, 10, 343, 1, 0, 0, 0, 10, 345, 1, 0, 0, 0, 10, 347, 1, 0, 0, 0, 11, 349, 1, 0, 0, 0, 11, 351, 1, 0, 0, 0, 11, 353, 1, 0, 0, 0, 11, 355, 1, 0, 0, 0, 11, 357, 1, 0, 0, 0, 11, 359, 1, 0, 0, 0, 11, 361, 1, 0, 0, 0, 11, 363, 1, 0, 0, 0, 11, 365, 1, 0, 0, 0, 11, 367, 1, 0, 0, 0, 12, 369, 1, 0, 0, 0, 12, 371, 1, 0, 0, 0, 12, 373, 1, 0, 0, 0, 12, 375, 1, 0, 0, 0, 12, 377, 1, 0, 0, 0, 12, 379, 1, 0, 0, 0, 12, 381, 1, 0, 0, 0, 13, 383, 1, 0, 0, 0, 13, 385, 1, 0, 0, 0, 13, 387, 1, 0, 0, 0, 13, 389, 1, 0, 0, 0, 13, 391, 1, 0, 0, 0, 13, 393, 1, 0, 0, 0, 14, 395, 1, 0, 0, 0, 14, 397, 1, 0, 0, 0, 14, 399, 1, 0, 0, 0, 14, 401, 1, 0, 0, 0, 14, 403, 1, 0, 0, 0, 14, 405, 1, 0, 0, 0, 14, 407, 1, 0, 0, 0, 14, 409, 1, 0, 0, 0, 14, 411, 1, 0, 0, 0, 15, 413, 1, 0, 0, 0, 17, 423, 1, 0, 0, 0, 19, 430, 1, 0, 0, 0, 21, 439, 1, 0, 0, 0, 23, 446, 1, 0, 0, 0, 25, 456, 1, 0, 0, 0, 27, 463, 1, 0, 0, 0, 29, 470, 1, 0, 0, 0, 31, 477, 1, 0, 0, 0, 33, 485, 1, 0, 0, 0, 35, 497, 1, 0, 0, 0, 37, 506, 1, 0, 0, 0, 39, 512, 1, 0, 0, 0, 41, 519, 1, 0, 0, 0, 43, 526, 1, 0, 0, 0, 45, 534, 1, 0, 0, 0, 47, 542, 1, 0, 0, 0, 49, 557, 1, 0, 0, 0, 51, 567, 1, 0, 0, 0, 53, 579, 1, 0, 0, 0, 55, 585, 1, 0, 0, 0, 57, 602, 1, 0, 0, 0, 59, 618, 1, 0, 0, 0, 61, 624, 1, 0, 0, 0, 63, 626, 1, 0, 0, 0, 65, 630, 1, 0, 0, 0, 67, 632, 1, 0, 0, 0, 69, 634, 1, 0, 0, 0, 71, 637, 1, 0, 0, 0, 73, 639, 1, 0, 0, 0, 75, 648, 1, 0, 0, 0, 77, 650, 1, 0, 0, 0, 79, 655, 1, 0, 0, 0, 81, 657, 1, 0, 0, 0, 83, 662, 1, 0, 0, 0, 85, 693, 1, 0, 0, 0, 87, 696, 1, 0, 0, 0, 89, 742, 1, 0, 0, 0, 91, 744, 1, 0, 0, 0, 93, 747, 1, 0, 0, 0, 95, 751, 1, 0, 0, 0, 97, 755, 1, 0, 0, 0, 99, 757, 1, 0, 0, 0, 101, 760, 1, 0, 0, 0, 103, 762, 1, 0, 0, 0, 105, 767, 1, 0, 0, 0, 107, 769, 1, 0, 0, 0, 109, 775, 1, 0, 0, 0, 111, 781, 1, 0, 0, 0, 113, 784, 1, 0, 0, 0, 115, 787, 1, 0, 0, 0, 117, 792, 1, 0, 0, 0, 119, 797, 1, 0, 0, 0, 121, 799, 1, 0, 0, 0, 123, 803, 1, 0, 0, 0, 125, 808, 1, 0, 0, 0, 127, 814, 1, 0, 0, 0, 129, 817, 1, 0, 0, 0, 131, 819, 1, 0, 0, 0, 133, 825, 1, 0, 0, 0, 135, 827, 1, 0, 0, 0, 137, 832, 1, 0, 0, 0, 139, 835, 1, 0, 0, 0, 141, 838, 1, 0, 0, 0, 143, 841, 1, 0, 0, 0, 145, 843, 1, 0, 0, 0, 147, 846, 1, 0, 0, 0, 149, 848, 1, 0, 0, 0, 151, 851, 1, 0, 0, 0, 153, 853, 1, 0, 0, 0, 155, 855, 1, 0, 0, 0, 157, 857, 1, 0, 0, 0, 159, 859, 1, 0, 0, 0, 161, 861, 1, 0, 0, 0, 163, 866, 1, 0, 0, 0, 165, 887, 1, 0, 0, 0, 167, 889, 1, 0, 0, 0, 169, 894, 1, 0, 0, 0, 171, 915, 1, 0, 0, 0, 173, 917, 1, 0, 0, 0, 175, 925, 1, 0, 0, 0, 177, 927, 1, 0, 0, 0, 179, 931, 1, 0, 0, 0, 181, 935, 1, 0, 0, 0, 183, 939, 1, 0, 0, 0, 185, 944, 1, 0, 0, 0, 187, 949, 1, 0, 0, 0, 189, 953, 1, 0, 0, 0, 191, 957, 1, 0, 0, 0, 193, 961, 1, 0, 0, 0, 195, 966, 1, 0, 0, 0, 197, 970, 1, 0, 0, 0, 199, 974, 1, 0, 0, 0, 201, 978, 1, 0, 0, 0, 203, 982, 1, 0, 0, 0, 205, 986, 1, 0, 0, 0, 207, 998, 1, 0, 0, 0, 209, 1001, 1, 0, 0, 0, 211, 1005, 1, 0, 0, 0, 213, 1009, 1, 0, 0, 0, 215, 1013, 1, 0, 0, 0, 217, 1017, 1, 0, 0, 0, 219, 1021, 1, 0, 0, 0, 221, 1025, 1, 0, 0, 0, 223, 1030, 1, 0, 0, 0, 225, 1034, 1, 0, 0, 0, 227, 1038, 1, 0, 0, 0, 229, 1043, 1, 0, 0, 0, 231, 1052, 1, 0, 0, 0, 233, 1073, 1, 0, 0, 0, 235, 1077, 1, 0, 0, 0, 237, 1081, 1, 0, 0, 0, 239, 1085, 1, 0, 0, 0, 241, 1089, 1, 0, 0, 0, 243, 1093, 1, 0, 0, 0, 245, 1098, 1, 0, 0, 0, 247, 1102, 1, 0, 0, 0, 249, 1106, 1, 0, 0, 0, 251, 1110, 1, 0, 0, 0, 253, 1115, 1, 0, 0, 0, 255, 1120, 1, 0, 0, 0, 257, 1123, 1, 0, 0, 0, 259, 1127, 1, 0, 0, 0, 261, 1131, 1, 0, 0, 0, 263, 1135, 1, 0, 0, 0, 265, 1139, 1, 0, 0, 0, 267, 1144, 1, 0, 0, 0, 269, 1149, 1, 0, 0, 0, 271, 1154, 1, 0, 0, 0, 273, 1161, 1, 0, 0, 0, 275, 1170, 1, 0, 0, 0, 277, 1177, 1, 0, 0, 0, 279, 1181, 1, 0, 0, 0, 281, 1185, 1, 0, 0, 0, 283, 1189, 1, 0, 0, 0, 285, 1193, 1, 0, 0, 0, 287, 1199, 1, 0, 0, 0, 289, 1203, 1, 0, 0, 0, 291, 1207, 1, 0, 0, 0, 293, 1211, 1, 0, 0, 0, 295, 1215, 1, 0, 0, 0, 297, 1219, 1, 0, 0, 0, 299, 1223, 1, 0, 0, 0, 301, 1228, 1, 0, 0, 0, 303, 1233, 1, 0, 0, 0, 305, 1237, 1, 0, 0, 0, 307, 1241, 1, 0, 0, 0, 309, 1245, 1, 0, 0, 0, 311, 1250, 1, 0, 0, 0, 313, 1254, 1, 0, 0, 0, 315, 1259, 1, 0, 0, 0, 317, 1264, 1, 0, 0, 0, 319, 1268, 1, 0, 0, 0, 321, 1272, 1, 0, 0, 0, 323, 1276, 1, 0, 0, 0, 325, 1280, 1, 0, 0, 0, 327, 1284, 1, 0, 0, 0, 329, 1289, 1, 0, 0, 0, 331, 1294, 1, 0, 0, 0, 333, 1298, 1, 0, 0, 0, 335, 1302, 1, 0, 0, 0, 337, 1306, 1, 0, 0, 0, 339, 1311, 1, 0, 0, 0, 341, 1320, 1, 0, 0, 0, 343, 1324, 1, 0, 0, 0, 345, 1328, 1, 0, 0, 0, 347, 1332, 1, 0, 0, 0, 349, 1336, 1, 0, 0, 0, 351, 1341, 1, 0, 0, 0, 353, 1345, 1, 0, 0, 0, 355, 1349, 1, 0, 0, 0, 357, 1353, 1, 0, 0, 0, 359, 1358, 1, 0, 0, 0, 361, 1362, 1, 0, 0, 0, 363, 1366, 1, 0, 0, 0, 365, 1370, 1, 0, 0, 0, 367, 1374, 1, 0, 0, 0, 369, 1378, 1, 0, 0, 0, 371, 1384, 1, 0, 0, 0, 373, 1388, 1, 0, 0, 0, 375, 1392, 1, 0, 0, 0, 377, 1396, 1, 0, 0, 0, 379, 1400, 1, 0, 0, 0, 381, 1404, 1, 0, 0, 0, 383, 1408, 1, 0, 0, 0, 385, 1413, 1, 0, 0, 0, 387, 1419, 1, 0, 0, 0, 389, 1425, 1, 0, 0, 0, 391, 1429, 1, 0, 0, 0, 393, 1433, 1, 0, 0, 0, 395, 1437, 1, 0, 0, 0, 397, 1443, 1, 0, 0, 0, 399, 1449, 1, 0, 0, 0, 401, 1453, 1, 0, 0, 0, 403, 1457, 1, 0, 0, 0, 405, 1461, 1, 0, 0, 0, 407, 1467, 1, 0, 0, 0, 409, 1473, 1, 0, 0, 0, 411, 1479, 1, 0, 0, 0, 413, 414, 7, 0, 0, 0, 414, 415, 7, 1, 0, 0, 415, 416, 7, 2, 0, 0, 416, 417, 7, 2, 0, 0, 417, 418, 7, 3, 0, 0, 418, 419, 7, 4, 0, 0, 419, 420, 7, 5, 0, 0, 420, 421, 1, 0, 0, 0, 421, 422, 6, 0, 0, 0, 422, 16, 1, 0, 0, 0, 423, 424, 7, 0, 0, 0, 424, 425, 7, 6, 0, 0, 425, 426, 7, 7, 0, 0, 426, 427, 7, 8, 0, 0, 427, 428, 1, 0, 0, 0, 428, 429, 6, 1, 1, 0, 429, 18, 1, 0, 0, 0, 430, 431, 7, 3, 0, 0, 431, 432, 7, 9, 0, 0, 432, 433, 7, 6, 0, 0, 433, 434, 7, 1, 0, 0, 434, 435, 7, 4, 0, 0, 435, 436, 7, 10, 0, 0, 436, 437, 1, 0, 0, 0, 437, 438, 6, 2, 2, 0, 438, 20, 1, 0, 0, 0, 439, 440, 7, 3, 0, 0, 440, 441, 7, 11, 0, 0, 441, 442, 7, 12, 0, 0, 442, 443, 7, 13, 0, 0, 443, 444, 1, 0, 0, 0, 444, 445, 6, 3, 0, 0, 445, 22, 1, 0, 0, 0, 446, 447, 7, 3, 0, 0, 447, 448, 7, 14, 0, 0, 448, 449, 7, 8, 0, 0, 449, 450, 7, 13, 0, 0, 450, 451, 7, 12, 0, 0, 451, 452, 7, 1, 0, 0, 452, 453, 7, 9, 0, 0, 453, 454, 1, 0, 0, 0, 454, 455, 6, 4, 3, 0, 455, 24, 1, 0, 0, 0, 456, 457, 7, 15, 0, 0, 457, 458, 7, 6, 0, 0, 458, 459, 7, 7, 0, 0, 459, 460, 7, 16, 0, 0, 460, 461, 1, 0, 0, 0, 461, 462, 6, 5, 4, 0, 462, 26, 1, 0, 0, 0, 463, 464, 7, 17, 0, 0, 464, 465, 7, 6, 0, 0, 465, 466, 7, 7, 0, 0, 466, 467, 7, 18, 0, 0, 467, 468, 1, 0, 0, 0, 468, 469, 6, 6, 0, 0, 469, 28, 1, 0, 0, 0, 470, 471, 7, 18, 0, 0, 471, 472, 7, 3, 0, 0, 472, 473, 7, 3, 0, 0, 473, 474, 7, 8, 0, 0, 474, 475, 1, 0, 0, 0, 475, 476, 6, 7, 1, 0, 476, 30, 1, 0, 0, 0, 477, 478, 7, 13, 0, 0, 478, 479, 7, 1, 0, 0, 479, 480, 7, 16, 0, 0, 480, 481, 7, 1, 0, 0, 481, 482, 7, 5, 0, 0, 482, 483, 1, 0, 0, 0, 483, 484, 6, 8, 0, 0, 484, 32, 1, 0, 0, 0, 485, 486, 7, 16, 0, 0, 486, 487, 7, 11, 0, 0, 487, 488, 5, 95, 0, 0, 488, 489, 7, 3, 0, 0, 489, 490, 7, 14, 0, 0, 490, 491, 7, 8, 0, 0, 491, 492, 7, 12, 0, 0, 492, 493, 7, 9, 0, 0, 493, 494, 7, 0, 0, 0, 494, 495, 1, 0, 0, 0, 495, 496, 6, 9, 5, 0, 496, 34, 1, 0, 0, 0, 497, 498, 7, 6, 0, 0, 498, 499, 7, 3, 0, 0, 499, 500, 7, 9, 0, 0, 500, 501, 7, 12, 0, 0, 501, 502, 7, 16, 0, 0, 502, 503, 7, 3, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 10, 6, 0, 505, 36, 1, 0, 0, 0, 506, 507, 7, 6, 0, 0, 507, 508, 7, 7, 0, 0, 508, 509, 7, 19, 0, 0, 509, 510, 1, 0, 0, 0, 510, 511, 6, 11, 0, 0, 511, 38, 1, 0, 0, 0, 512, 513, 7, 2, 0, 0, 513, 514, 7, 10, 0, 0, 514, 515, 7, 7, 0, 0, 515, 516, 7, 19, 0, 0, 516, 517, 1, 0, 0, 0, 517, 518, 6, 12, 7, 0, 518, 40, 1, 0, 0, 0, 519, 520, 7, 2, 0, 0, 520, 521, 7, 7, 0, 0, 521, 522, 7, 6, 0, 0, 522, 523, 7, 5, 0, 0, 523, 524, 1, 0, 0, 0, 524, 525, 6, 13, 0, 0, 525, 42, 1, 0, 0, 0, 526, 527, 7, 2, 0, 0, 527, 528, 7, 5, 0, 0, 528, 529, 7, 12, 0, 0, 529, 530, 7, 5, 0, 0, 530, 531, 7, 2, 0, 0, 531, 532, 1, 0, 0, 0, 532, 533, 6, 14, 0, 0, 533, 44, 1, 0, 0, 0, 534, 535, 7, 19, 0, 0, 535, 536, 7, 10, 0, 0, 536, 537, 7, 3, 0, 0, 537, 538, 7, 6, 0, 0, 538, 539, 7, 3, 0, 0, 539, 540, 1, 0, 0, 0, 540, 541, 6, 15, 0, 0, 541, 46, 1, 0, 0, 0, 542, 543, 4, 16, 0, 0, 543, 544, 7, 1, 0, 0, 544, 545, 7, 9, 0, 0, 545, 546, 7, 13, 0, 0, 546, 547, 7, 1, 0, 0, 547, 548, 7, 9, 0, 0, 548, 549, 7, 3, 0, 0, 549, 550, 7, 2, 0, 0, 550, 551, 7, 5, 0, 0, 551, 552, 7, 12, 0, 0, 552, 553, 7, 5, 0, 0, 553, 554, 7, 2, 0, 0, 554, 555, 1, 0, 0, 0, 555, 556, 6, 16, 0, 0, 556, 48, 1, 0, 0, 0, 557, 558, 4, 17, 1, 0, 558, 559, 7, 13, 0, 0, 559, 560, 7, 7, 0, 0, 560, 561, 7, 7, 0, 0, 561, 562, 7, 18, 0, 0, 562, 563, 7, 20, 0, 0, 563, 564, 7, 8, 0, 0, 564, 565, 1, 0, 0, 0, 565, 566, 6, 17, 8, 0, 566, 50, 1, 0, 0, 0, 567, 568, 4, 18, 2, 0, 568, 569, 7, 16, 0, 0, 569, 570, 7, 3, 0, 0, 570, 571, 7, 5, 0, 0, 571, 572, 7, 6, 0, 0, 572, 573, 7, 1, 0, 0, 573, 574, 7, 4, 0, 0, 574, 575, 7, 2, 0, 0, 575, 576, 1, 0, 0, 0, 576, 577, 6, 18, 9, 0, 577, 52, 1, 0, 0, 0, 578, 580, 8, 21, 0, 0, 579, 578, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 579, 1, 0, 0, 0, 581, 582, 1, 0, 0, 0, 582, 583, 1, 0, 0, 0, 583, 584, 6, 19, 0, 0, 584, 54, 1, 0, 0, 0, 585, 586, 5, 47, 0, 0, 586, 587, 5, 47, 0, 0, 587, 591, 1, 0, 0, 0, 588, 590, 8, 22, 0, 0, 589, 588, 1, 0, 0, 0, 590, 593, 1, 0, 0, 0, 591, 589, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 595, 1, 0, 0, 0, 593, 591, 1, 0, 0, 0, 594, 596, 5, 13, 0, 0, 595, 594, 1, 0, 0, 0, 595, 596, 1, 0, 0, 0, 596, 598, 1, 0, 0, 0, 597, 599, 5, 10, 0, 0, 598, 597, 1, 0, 0, 0, 598, 599, 1, 0, 0, 0, 599, 600, 1, 0, 0, 0, 600, 601, 6, 20, 10, 0, 601, 56, 1, 0, 0, 0, 602, 603, 5, 47, 0, 0, 603, 604, 5, 42, 0, 0, 604, 609, 1, 0, 0, 0, 605, 608, 3, 57, 21, 0, 606, 608, 9, 0, 0, 0, 607, 605, 1, 0, 0, 0, 607, 606, 1, 0, 0, 0, 608, 611, 1, 0, 0, 0, 609, 610, 1, 0, 0, 0, 609, 607, 1, 0, 0, 0, 610, 612, 1, 0, 0, 0, 611, 609, 1, 0, 0, 0, 612, 613, 5, 42, 0, 0, 613, 614, 5, 47, 0, 0, 614, 615, 1, 0, 0, 0, 615, 616, 6, 21, 10, 0, 616, 58, 1, 0, 0, 0, 617, 619, 7, 23, 0, 0, 618, 617, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 618, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 623, 6, 22, 10, 0, 623, 60, 1, 0, 0, 0, 624, 625, 5, 58, 0, 0, 625, 62, 1, 0, 0, 0, 626, 627, 5, 124, 0, 0, 627, 628, 1, 0, 0, 0, 628, 629, 6, 24, 11, 0, 629, 64, 1, 0, 0, 0, 630, 631, 7, 24, 0, 0, 631, 66, 1, 0, 0, 0, 632, 633, 7, 25, 0, 0, 633, 68, 1, 0, 0, 0, 634, 635, 5, 92, 0, 0, 635, 636, 7, 26, 0, 0, 636, 70, 1, 0, 0, 0, 637, 638, 8, 27, 0, 0, 638, 72, 1, 0, 0, 0, 639, 641, 7, 3, 0, 0, 640, 642, 7, 28, 0, 0, 641, 640, 1, 0, 0, 0, 641, 642, 1, 0, 0, 0, 642, 644, 1, 0, 0, 0, 643, 645, 3, 65, 25, 0, 644, 643, 1, 0, 0, 0, 645, 646, 1, 0, 0, 0, 646, 644, 1, 0, 0, 0, 646, 647, 1, 0, 0, 0, 647, 74, 1, 0, 0, 0, 648, 649, 5, 64, 0, 0, 649, 76, 1, 0, 0, 0, 650, 651, 5, 96, 0, 0, 651, 78, 1, 0, 0, 0, 652, 656, 8, 29, 0, 0, 653, 654, 5, 96, 0, 0, 654, 656, 5, 96, 0, 0, 655, 652, 1, 0, 0, 0, 655, 653, 1, 0, 0, 0, 656, 80, 1, 0, 0, 0, 657, 658, 5, 95, 0, 0, 658, 82, 1, 0, 0, 0, 659, 663, 3, 67, 26, 0, 660, 663, 3, 65, 25, 0, 661, 663, 3, 81, 33, 0, 662, 659, 1, 0, 0, 0, 662, 660, 1, 0, 0, 0, 662, 661, 1, 0, 0, 0, 663, 84, 1, 0, 0, 0, 664, 669, 5, 34, 0, 0, 665, 668, 3, 69, 27, 0, 666, 668, 3, 71, 28, 0, 667, 665, 1, 0, 0, 0, 667, 666, 1, 0, 0, 0, 668, 671, 1, 0, 0, 0, 669, 667, 1, 0, 0, 0, 669, 670, 1, 0, 0, 0, 670, 672, 1, 0, 0, 0, 671, 669, 1, 0, 0, 0, 672, 694, 5, 34, 0, 0, 673, 674, 5, 34, 0, 0, 674, 675, 5, 34, 0, 0, 675, 676, 5, 34, 0, 0, 676, 680, 1, 0, 0, 0, 677, 679, 8, 22, 0, 0, 678, 677, 1, 0, 0, 0, 679, 682, 1, 0, 0, 0, 680, 681, 1, 0, 0, 0, 680, 678, 1, 0, 0, 0, 681, 683, 1, 0, 0, 0, 682, 680, 1, 0, 0, 0, 683, 684, 5, 34, 0, 0, 684, 685, 5, 34, 0, 0, 685, 686, 5, 34, 0, 0, 686, 688, 1, 0, 0, 0, 687, 689, 5, 34, 0, 0, 688, 687, 1, 0, 0, 0, 688, 689, 1, 0, 0, 0, 689, 691, 1, 0, 0, 0, 690, 692, 5, 34, 0, 0, 691, 690, 1, 0, 0, 0, 691, 692, 1, 0, 0, 0, 692, 694, 1, 0, 0, 0, 693, 664, 1, 0, 0, 0, 693, 673, 1, 0, 0, 0, 694, 86, 1, 0, 0, 0, 695, 697, 3, 65, 25, 0, 696, 695, 1, 0, 0, 0, 697, 698, 1, 0, 0, 0, 698, 696, 1, 0, 0, 0, 698, 699, 1, 0, 0, 0, 699, 88, 1, 0, 0, 0, 700, 702, 3, 65, 25, 0, 701, 700, 1, 0, 0, 0, 702, 703, 1, 0, 0, 0, 703, 701, 1, 0, 0, 0, 703, 704, 1, 0, 0, 0, 704, 705, 1, 0, 0, 0, 705, 709, 3, 105, 45, 0, 706, 708, 3, 65, 25, 0, 707, 706, 1, 0, 0, 0, 708, 711, 1, 0, 0, 0, 709, 707, 1, 0, 0, 0, 709, 710, 1, 0, 0, 0, 710, 743, 1, 0, 0, 0, 711, 709, 1, 0, 0, 0, 712, 714, 3, 105, 45, 0, 713, 715, 3, 65, 25, 0, 714, 713, 1, 0, 0, 0, 715, 716, 1, 0, 0, 0, 716, 714, 1, 0, 0, 0, 716, 717, 1, 0, 0, 0, 717, 743, 1, 0, 0, 0, 718, 720, 3, 65, 25, 0, 719, 718, 1, 0, 0, 0, 720, 721, 1, 0, 0, 0, 721, 719, 1, 0, 0, 0, 721, 722, 1, 0, 0, 0, 722, 730, 1, 0, 0, 0, 723, 727, 3, 105, 45, 0, 724, 726, 3, 65, 25, 0, 725, 724, 1, 0, 0, 0, 726, 729, 1, 0, 0, 0, 727, 725, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, 731, 1, 0, 0, 0, 729, 727, 1, 0, 0, 0, 730, 723, 1, 0, 0, 0, 730, 731, 1, 0, 0, 0, 731, 732, 1, 0, 0, 0, 732, 733, 3, 73, 29, 0, 733, 743, 1, 0, 0, 0, 734, 736, 3, 105, 45, 0, 735, 737, 3, 65, 25, 0, 736, 735, 1, 0, 0, 0, 737, 738, 1, 0, 0, 0, 738, 736, 1, 0, 0, 0, 738, 739, 1, 0, 0, 0, 739, 740, 1, 0, 0, 0, 740, 741, 3, 73, 29, 0, 741, 743, 1, 0, 0, 0, 742, 701, 1, 0, 0, 0, 742, 712, 1, 0, 0, 0, 742, 719, 1, 0, 0, 0, 742, 734, 1, 0, 0, 0, 743, 90, 1, 0, 0, 0, 744, 745, 7, 30, 0, 0, 745, 746, 7, 31, 0, 0, 746, 92, 1, 0, 0, 0, 747, 748, 7, 12, 0, 0, 748, 749, 7, 9, 0, 0, 749, 750, 7, 0, 0, 0, 750, 94, 1, 0, 0, 0, 751, 752, 7, 12, 0, 0, 752, 753, 7, 2, 0, 0, 753, 754, 7, 4, 0, 0, 754, 96, 1, 0, 0, 0, 755, 756, 5, 61, 0, 0, 756, 98, 1, 0, 0, 0, 757, 758, 5, 58, 0, 0, 758, 759, 5, 58, 0, 0, 759, 100, 1, 0, 0, 0, 760, 761, 5, 44, 0, 0, 761, 102, 1, 0, 0, 0, 762, 763, 7, 0, 0, 0, 763, 764, 7, 3, 0, 0, 764, 765, 7, 2, 0, 0, 765, 766, 7, 4, 0, 0, 766, 104, 1, 0, 0, 0, 767, 768, 5, 46, 0, 0, 768, 106, 1, 0, 0, 0, 769, 770, 7, 15, 0, 0, 770, 771, 7, 12, 0, 0, 771, 772, 7, 13, 0, 0, 772, 773, 7, 2, 0, 0, 773, 774, 7, 3, 0, 0, 774, 108, 1, 0, 0, 0, 775, 776, 7, 15, 0, 0, 776, 777, 7, 1, 0, 0, 777, 778, 7, 6, 0, 0, 778, 779, 7, 2, 0, 0, 779, 780, 7, 5, 0, 0, 780, 110, 1, 0, 0, 0, 781, 782, 7, 1, 0, 0, 782, 783, 7, 9, 0, 0, 783, 112, 1, 0, 0, 0, 784, 785, 7, 1, 0, 0, 785, 786, 7, 2, 0, 0, 786, 114, 1, 0, 0, 0, 787, 788, 7, 13, 0, 0, 788, 789, 7, 12, 0, 0, 789, 790, 7, 2, 0, 0, 790, 791, 7, 5, 0, 0, 791, 116, 1, 0, 0, 0, 792, 793, 7, 13, 0, 0, 793, 794, 7, 1, 0, 0, 794, 795, 7, 18, 0, 0, 795, 796, 7, 3, 0, 0, 796, 118, 1, 0, 0, 0, 797, 798, 5, 40, 0, 0, 798, 120, 1, 0, 0, 0, 799, 800, 7, 9, 0, 0, 800, 801, 7, 7, 0, 0, 801, 802, 7, 5, 0, 0, 802, 122, 1, 0, 0, 0, 803, 804, 7, 9, 0, 0, 804, 805, 7, 20, 0, 0, 805, 806, 7, 13, 0, 0, 806, 807, 7, 13, 0, 0, 807, 124, 1, 0, 0, 0, 808, 809, 7, 9, 0, 0, 809, 810, 7, 20, 0, 0, 810, 811, 7, 13, 0, 0, 811, 812, 7, 13, 0, 0, 812, 813, 7, 2, 0, 0, 813, 126, 1, 0, 0, 0, 814, 815, 7, 7, 0, 0, 815, 816, 7, 6, 0, 0, 816, 128, 1, 0, 0, 0, 817, 818, 5, 63, 0, 0, 818, 130, 1, 0, 0, 0, 819, 820, 7, 6, 0, 0, 820, 821, 7, 13, 0, 0, 821, 822, 7, 1, 0, 0, 822, 823, 7, 18, 0, 0, 823, 824, 7, 3, 0, 0, 824, 132, 1, 0, 0, 0, 825, 826, 5, 41, 0, 0, 826, 134, 1, 0, 0, 0, 827, 828, 7, 5, 0, 0, 828, 829, 7, 6, 0, 0, 829, 830, 7, 20, 0, 0, 830, 831, 7, 3, 0, 0, 831, 136, 1, 0, 0, 0, 832, 833, 5, 61, 0, 0, 833, 834, 5, 61, 0, 0, 834, 138, 1, 0, 0, 0, 835, 836, 5, 61, 0, 0, 836, 837, 5, 126, 0, 0, 837, 140, 1, 0, 0, 0, 838, 839, 5, 33, 0, 0, 839, 840, 5, 61, 0, 0, 840, 142, 1, 0, 0, 0, 841, 842, 5, 60, 0, 0, 842, 144, 1, 0, 0, 0, 843, 844, 5, 60, 0, 0, 844, 845, 5, 61, 0, 0, 845, 146, 1, 0, 0, 0, 846, 847, 5, 62, 0, 0, 847, 148, 1, 0, 0, 0, 848, 849, 5, 62, 0, 0, 849, 850, 5, 61, 0, 0, 850, 150, 1, 0, 0, 0, 851, 852, 5, 43, 0, 0, 852, 152, 1, 0, 0, 0, 853, 854, 5, 45, 0, 0, 854, 154, 1, 0, 0, 0, 855, 856, 5, 42, 0, 0, 856, 156, 1, 0, 0, 0, 857, 858, 5, 47, 0, 0, 858, 158, 1, 0, 0, 0, 859, 860, 5, 37, 0, 0, 860, 160, 1, 0, 0, 0, 861, 862, 4, 73, 3, 0, 862, 863, 3, 61, 23, 0, 863, 864, 1, 0, 0, 0, 864, 865, 6, 73, 12, 0, 865, 162, 1, 0, 0, 0, 866, 867, 3, 45, 15, 0, 867, 868, 1, 0, 0, 0, 868, 869, 6, 74, 13, 0, 869, 164, 1, 0, 0, 0, 870, 873, 3, 129, 57, 0, 871, 874, 3, 67, 26, 0, 872, 874, 3, 81, 33, 0, 873, 871, 1, 0, 0, 0, 873, 872, 1, 0, 0, 0, 874, 878, 1, 0, 0, 0, 875, 877, 3, 83, 34, 0, 876, 875, 1, 0, 0, 0, 877, 880, 1, 0, 0, 0, 878, 876, 1, 0, 0, 0, 878, 879, 1, 0, 0, 0, 879, 888, 1, 0, 0, 0, 880, 878, 1, 0, 0, 0, 881, 883, 3, 129, 57, 0, 882, 884, 3, 65, 25, 0, 883, 882, 1, 0, 0, 0, 884, 885, 1, 0, 0, 0, 885, 883, 1, 0, 0, 0, 885, 886, 1, 0, 0, 0, 886, 888, 1, 0, 0, 0, 887, 870, 1, 0, 0, 0, 887, 881, 1, 0, 0, 0, 888, 166, 1, 0, 0, 0, 889, 890, 5, 91, 0, 0, 890, 891, 1, 0, 0, 0, 891, 892, 6, 76, 0, 0, 892, 893, 6, 76, 0, 0, 893, 168, 1, 0, 0, 0, 894, 895, 5, 93, 0, 0, 895, 896, 1, 0, 0, 0, 896, 897, 6, 77, 11, 0, 897, 898, 6, 77, 11, 0, 898, 170, 1, 0, 0, 0, 899, 903, 3, 67, 26, 0, 900, 902, 3, 83, 34, 0, 901, 900, 1, 0, 0, 0, 902, 905, 1, 0, 0, 0, 903, 901, 1, 0, 0, 0, 903, 904, 1, 0, 0, 0, 904, 916, 1, 0, 0, 0, 905, 903, 1, 0, 0, 0, 906, 909, 3, 81, 33, 0, 907, 909, 3, 75, 30, 0, 908, 906, 1, 0, 0, 0, 908, 907, 1, 0, 0, 0, 909, 911, 1, 0, 0, 0, 910, 912, 3, 83, 34, 0, 911, 910, 1, 0, 0, 0, 912, 913, 1, 0, 0, 0, 913, 911, 1, 0, 0, 0, 913, 914, 1, 0, 0, 0, 914, 916, 1, 0, 0, 0, 915, 899, 1, 0, 0, 0, 915, 908, 1, 0, 0, 0, 916, 172, 1, 0, 0, 0, 917, 919, 3, 77, 31, 0, 918, 920, 3, 79, 32, 0, 919, 918, 1, 0, 0, 0, 920, 921, 1, 0, 0, 0, 921, 919, 1, 0, 0, 0, 921, 922, 1, 0, 0, 0, 922, 923, 1, 0, 0, 0, 923, 924, 3, 77, 31, 0, 924, 174, 1, 0, 0, 0, 925, 926, 3, 173, 79, 0, 926, 176, 1, 0, 0, 0, 927, 928, 3, 55, 20, 0, 928, 929, 1, 0, 0, 0, 929, 930, 6, 81, 10, 0, 930, 178, 1, 0, 0, 0, 931, 932, 3, 57, 21, 0, 932, 933, 1, 0, 0, 0, 933, 934, 6, 82, 10, 0, 934, 180, 1, 0, 0, 0, 935, 936, 3, 59, 22, 0, 936, 937, 1, 0, 0, 0, 937, 938, 6, 83, 10, 0, 938, 182, 1, 0, 0, 0, 939, 940, 3, 167, 76, 0, 940, 941, 1, 0, 0, 0, 941, 942, 6, 84, 14, 0, 942, 943, 6, 84, 15, 0, 943, 184, 1, 0, 0, 0, 944, 945, 3, 63, 24, 0, 945, 946, 1, 0, 0, 0, 946, 947, 6, 85, 16, 0, 947, 948, 6, 85, 11, 0, 948, 186, 1, 0, 0, 0, 949, 950, 3, 59, 22, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 86, 10, 0, 952, 188, 1, 0, 0, 0, 953, 954, 3, 55, 20, 0, 954, 955, 1, 0, 0, 0, 955, 956, 6, 87, 10, 0, 956, 190, 1, 0, 0, 0, 957, 958, 3, 57, 21, 0, 958, 959, 1, 0, 0, 0, 959, 960, 6, 88, 10, 0, 960, 192, 1, 0, 0, 0, 961, 962, 3, 63, 24, 0, 962, 963, 1, 0, 0, 0, 963, 964, 6, 89, 16, 0, 964, 965, 6, 89, 11, 0, 965, 194, 1, 0, 0, 0, 966, 967, 3, 167, 76, 0, 967, 968, 1, 0, 0, 0, 968, 969, 6, 90, 14, 0, 969, 196, 1, 0, 0, 0, 970, 971, 3, 169, 77, 0, 971, 972, 1, 0, 0, 0, 972, 973, 6, 91, 17, 0, 973, 198, 1, 0, 0, 0, 974, 975, 3, 61, 23, 0, 975, 976, 1, 0, 0, 0, 976, 977, 6, 92, 12, 0, 977, 200, 1, 0, 0, 0, 978, 979, 3, 101, 43, 0, 979, 980, 1, 0, 0, 0, 980, 981, 6, 93, 18, 0, 981, 202, 1, 0, 0, 0, 982, 983, 3, 97, 41, 0, 983, 984, 1, 0, 0, 0, 984, 985, 6, 94, 19, 0, 985, 204, 1, 0, 0, 0, 986, 987, 7, 16, 0, 0, 987, 988, 7, 3, 0, 0, 988, 989, 7, 5, 0, 0, 989, 990, 7, 12, 0, 0, 990, 991, 7, 0, 0, 0, 991, 992, 7, 12, 0, 0, 992, 993, 7, 5, 0, 0, 993, 994, 7, 12, 0, 0, 994, 206, 1, 0, 0, 0, 995, 999, 8, 32, 0, 0, 996, 997, 5, 47, 0, 0, 997, 999, 8, 33, 0, 0, 998, 995, 1, 0, 0, 0, 998, 996, 1, 0, 0, 0, 999, 208, 1, 0, 0, 0, 1000, 1002, 3, 207, 96, 0, 1001, 1000, 1, 0, 0, 0, 1002, 1003, 1, 0, 0, 0, 1003, 1001, 1, 0, 0, 0, 1003, 1004, 1, 0, 0, 0, 1004, 210, 1, 0, 0, 0, 1005, 1006, 3, 209, 97, 0, 1006, 1007, 1, 0, 0, 0, 1007, 1008, 6, 98, 20, 0, 1008, 212, 1, 0, 0, 0, 1009, 1010, 3, 85, 35, 0, 1010, 1011, 1, 0, 0, 0, 1011, 1012, 6, 99, 21, 0, 1012, 214, 1, 0, 0, 0, 1013, 1014, 3, 55, 20, 0, 1014, 1015, 1, 0, 0, 0, 1015, 1016, 6, 100, 10, 0, 1016, 216, 1, 0, 0, 0, 1017, 1018, 3, 57, 21, 0, 1018, 1019, 1, 0, 0, 0, 1019, 1020, 6, 101, 10, 0, 1020, 218, 1, 0, 0, 0, 1021, 1022, 3, 59, 22, 0, 1022, 1023, 1, 0, 0, 0, 1023, 1024, 6, 102, 10, 0, 1024, 220, 1, 0, 0, 0, 1025, 1026, 3, 63, 24, 0, 1026, 1027, 1, 0, 0, 0, 1027, 1028, 6, 103, 16, 0, 1028, 1029, 6, 103, 11, 0, 1029, 222, 1, 0, 0, 0, 1030, 1031, 3, 105, 45, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1033, 6, 104, 22, 0, 1033, 224, 1, 0, 0, 0, 1034, 1035, 3, 101, 43, 0, 1035, 1036, 1, 0, 0, 0, 1036, 1037, 6, 105, 18, 0, 1037, 226, 1, 0, 0, 0, 1038, 1039, 4, 106, 4, 0, 1039, 1040, 3, 129, 57, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1042, 6, 106, 23, 0, 1042, 228, 1, 0, 0, 0, 1043, 1044, 4, 107, 5, 0, 1044, 1045, 3, 165, 75, 0, 1045, 1046, 1, 0, 0, 0, 1046, 1047, 6, 107, 24, 0, 1047, 230, 1, 0, 0, 0, 1048, 1053, 3, 67, 26, 0, 1049, 1053, 3, 65, 25, 0, 1050, 1053, 3, 81, 33, 0, 1051, 1053, 3, 155, 70, 0, 1052, 1048, 1, 0, 0, 0, 1052, 1049, 1, 0, 0, 0, 1052, 1050, 1, 0, 0, 0, 1052, 1051, 1, 0, 0, 0, 1053, 232, 1, 0, 0, 0, 1054, 1057, 3, 67, 26, 0, 1055, 1057, 3, 155, 70, 0, 1056, 1054, 1, 0, 0, 0, 1056, 1055, 1, 0, 0, 0, 1057, 1061, 1, 0, 0, 0, 1058, 1060, 3, 231, 108, 0, 1059, 1058, 1, 0, 0, 0, 1060, 1063, 1, 0, 0, 0, 1061, 1059, 1, 0, 0, 0, 1061, 1062, 1, 0, 0, 0, 1062, 1074, 1, 0, 0, 0, 1063, 1061, 1, 0, 0, 0, 1064, 1067, 3, 81, 33, 0, 1065, 1067, 3, 75, 30, 0, 1066, 1064, 1, 0, 0, 0, 1066, 1065, 1, 0, 0, 0, 1067, 1069, 1, 0, 0, 0, 1068, 1070, 3, 231, 108, 0, 1069, 1068, 1, 0, 0, 0, 1070, 1071, 1, 0, 0, 0, 1071, 1069, 1, 0, 0, 0, 1071, 1072, 1, 0, 0, 0, 1072, 1074, 1, 0, 0, 0, 1073, 1056, 1, 0, 0, 0, 1073, 1066, 1, 0, 0, 0, 1074, 234, 1, 0, 0, 0, 1075, 1078, 3, 233, 109, 0, 1076, 1078, 3, 173, 79, 0, 1077, 1075, 1, 0, 0, 0, 1077, 1076, 1, 0, 0, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1077, 1, 0, 0, 0, 1079, 1080, 1, 0, 0, 0, 1080, 236, 1, 0, 0, 0, 1081, 1082, 3, 55, 20, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 111, 10, 0, 1084, 238, 1, 0, 0, 0, 1085, 1086, 3, 57, 21, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 112, 10, 0, 1088, 240, 1, 0, 0, 0, 1089, 1090, 3, 59, 22, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 113, 10, 0, 1092, 242, 1, 0, 0, 0, 1093, 1094, 3, 63, 24, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 114, 16, 0, 1096, 1097, 6, 114, 11, 0, 1097, 244, 1, 0, 0, 0, 1098, 1099, 3, 97, 41, 0, 1099, 1100, 1, 0, 0, 0, 1100, 1101, 6, 115, 19, 0, 1101, 246, 1, 0, 0, 0, 1102, 1103, 3, 101, 43, 0, 1103, 1104, 1, 0, 0, 0, 1104, 1105, 6, 116, 18, 0, 1105, 248, 1, 0, 0, 0, 1106, 1107, 3, 105, 45, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 6, 117, 22, 0, 1109, 250, 1, 0, 0, 0, 1110, 1111, 4, 118, 6, 0, 1111, 1112, 3, 129, 57, 0, 1112, 1113, 1, 0, 0, 0, 1113, 1114, 6, 118, 23, 0, 1114, 252, 1, 0, 0, 0, 1115, 1116, 4, 119, 7, 0, 1116, 1117, 3, 165, 75, 0, 1117, 1118, 1, 0, 0, 0, 1118, 1119, 6, 119, 24, 0, 1119, 254, 1, 0, 0, 0, 1120, 1121, 7, 12, 0, 0, 1121, 1122, 7, 2, 0, 0, 1122, 256, 1, 0, 0, 0, 1123, 1124, 3, 235, 110, 0, 1124, 1125, 1, 0, 0, 0, 1125, 1126, 6, 121, 25, 0, 1126, 258, 1, 0, 0, 0, 1127, 1128, 3, 55, 20, 0, 1128, 1129, 1, 0, 0, 0, 1129, 1130, 6, 122, 10, 0, 1130, 260, 1, 0, 0, 0, 1131, 1132, 3, 57, 21, 0, 1132, 1133, 1, 0, 0, 0, 1133, 1134, 6, 123, 10, 0, 1134, 262, 1, 0, 0, 0, 1135, 1136, 3, 59, 22, 0, 1136, 1137, 1, 0, 0, 0, 1137, 1138, 6, 124, 10, 0, 1138, 264, 1, 0, 0, 0, 1139, 1140, 3, 63, 24, 0, 1140, 1141, 1, 0, 0, 0, 1141, 1142, 6, 125, 16, 0, 1142, 1143, 6, 125, 11, 0, 1143, 266, 1, 0, 0, 0, 1144, 1145, 3, 167, 76, 0, 1145, 1146, 1, 0, 0, 0, 1146, 1147, 6, 126, 14, 0, 1147, 1148, 6, 126, 26, 0, 1148, 268, 1, 0, 0, 0, 1149, 1150, 7, 7, 0, 0, 1150, 1151, 7, 9, 0, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 127, 27, 0, 1153, 270, 1, 0, 0, 0, 1154, 1155, 7, 19, 0, 0, 1155, 1156, 7, 1, 0, 0, 1156, 1157, 7, 5, 0, 0, 1157, 1158, 7, 10, 0, 0, 1158, 1159, 1, 0, 0, 0, 1159, 1160, 6, 128, 27, 0, 1160, 272, 1, 0, 0, 0, 1161, 1162, 8, 34, 0, 0, 1162, 274, 1, 0, 0, 0, 1163, 1165, 3, 273, 129, 0, 1164, 1163, 1, 0, 0, 0, 1165, 1166, 1, 0, 0, 0, 1166, 1164, 1, 0, 0, 0, 1166, 1167, 1, 0, 0, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1169, 3, 61, 23, 0, 1169, 1171, 1, 0, 0, 0, 1170, 1164, 1, 0, 0, 0, 1170, 1171, 1, 0, 0, 0, 1171, 1173, 1, 0, 0, 0, 1172, 1174, 3, 273, 129, 0, 1173, 1172, 1, 0, 0, 0, 1174, 1175, 1, 0, 0, 0, 1175, 1173, 1, 0, 0, 0, 1175, 1176, 1, 0, 0, 0, 1176, 276, 1, 0, 0, 0, 1177, 1178, 3, 275, 130, 0, 1178, 1179, 1, 0, 0, 0, 1179, 1180, 6, 131, 28, 0, 1180, 278, 1, 0, 0, 0, 1181, 1182, 3, 55, 20, 0, 1182, 1183, 1, 0, 0, 0, 1183, 1184, 6, 132, 10, 0, 1184, 280, 1, 0, 0, 0, 1185, 1186, 3, 57, 21, 0, 1186, 1187, 1, 0, 0, 0, 1187, 1188, 6, 133, 10, 0, 1188, 282, 1, 0, 0, 0, 1189, 1190, 3, 59, 22, 0, 1190, 1191, 1, 0, 0, 0, 1191, 1192, 6, 134, 10, 0, 1192, 284, 1, 0, 0, 0, 1193, 1194, 3, 63, 24, 0, 1194, 1195, 1, 0, 0, 0, 1195, 1196, 6, 135, 16, 0, 1196, 1197, 6, 135, 11, 0, 1197, 1198, 6, 135, 11, 0, 1198, 286, 1, 0, 0, 0, 1199, 1200, 3, 97, 41, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 6, 136, 19, 0, 1202, 288, 1, 0, 0, 0, 1203, 1204, 3, 101, 43, 0, 1204, 1205, 1, 0, 0, 0, 1205, 1206, 6, 137, 18, 0, 1206, 290, 1, 0, 0, 0, 1207, 1208, 3, 105, 45, 0, 1208, 1209, 1, 0, 0, 0, 1209, 1210, 6, 138, 22, 0, 1210, 292, 1, 0, 0, 0, 1211, 1212, 3, 271, 128, 0, 1212, 1213, 1, 0, 0, 0, 1213, 1214, 6, 139, 29, 0, 1214, 294, 1, 0, 0, 0, 1215, 1216, 3, 235, 110, 0, 1216, 1217, 1, 0, 0, 0, 1217, 1218, 6, 140, 25, 0, 1218, 296, 1, 0, 0, 0, 1219, 1220, 3, 175, 80, 0, 1220, 1221, 1, 0, 0, 0, 1221, 1222, 6, 141, 30, 0, 1222, 298, 1, 0, 0, 0, 1223, 1224, 4, 142, 8, 0, 1224, 1225, 3, 129, 57, 0, 1225, 1226, 1, 0, 0, 0, 1226, 1227, 6, 142, 23, 0, 1227, 300, 1, 0, 0, 0, 1228, 1229, 4, 143, 9, 0, 1229, 1230, 3, 165, 75, 0, 1230, 1231, 1, 0, 0, 0, 1231, 1232, 6, 143, 24, 0, 1232, 302, 1, 0, 0, 0, 1233, 1234, 3, 55, 20, 0, 1234, 1235, 1, 0, 0, 0, 1235, 1236, 6, 144, 10, 0, 1236, 304, 1, 0, 0, 0, 1237, 1238, 3, 57, 21, 0, 1238, 1239, 1, 0, 0, 0, 1239, 1240, 6, 145, 10, 0, 1240, 306, 1, 0, 0, 0, 1241, 1242, 3, 59, 22, 0, 1242, 1243, 1, 0, 0, 0, 1243, 1244, 6, 146, 10, 0, 1244, 308, 1, 0, 0, 0, 1245, 1246, 3, 63, 24, 0, 1246, 1247, 1, 0, 0, 0, 1247, 1248, 6, 147, 16, 0, 1248, 1249, 6, 147, 11, 0, 1249, 310, 1, 0, 0, 0, 1250, 1251, 3, 105, 45, 0, 1251, 1252, 1, 0, 0, 0, 1252, 1253, 6, 148, 22, 0, 1253, 312, 1, 0, 0, 0, 1254, 1255, 4, 149, 10, 0, 1255, 1256, 3, 129, 57, 0, 1256, 1257, 1, 0, 0, 0, 1257, 1258, 6, 149, 23, 0, 1258, 314, 1, 0, 0, 0, 1259, 1260, 4, 150, 11, 0, 1260, 1261, 3, 165, 75, 0, 1261, 1262, 1, 0, 0, 0, 1262, 1263, 6, 150, 24, 0, 1263, 316, 1, 0, 0, 0, 1264, 1265, 3, 175, 80, 0, 1265, 1266, 1, 0, 0, 0, 1266, 1267, 6, 151, 30, 0, 1267, 318, 1, 0, 0, 0, 1268, 1269, 3, 171, 78, 0, 1269, 1270, 1, 0, 0, 0, 1270, 1271, 6, 152, 31, 0, 1271, 320, 1, 0, 0, 0, 1272, 1273, 3, 55, 20, 0, 1273, 1274, 1, 0, 0, 0, 1274, 1275, 6, 153, 10, 0, 1275, 322, 1, 0, 0, 0, 1276, 1277, 3, 57, 21, 0, 1277, 1278, 1, 0, 0, 0, 1278, 1279, 6, 154, 10, 0, 1279, 324, 1, 0, 0, 0, 1280, 1281, 3, 59, 22, 0, 1281, 1282, 1, 0, 0, 0, 1282, 1283, 6, 155, 10, 0, 1283, 326, 1, 0, 0, 0, 1284, 1285, 3, 63, 24, 0, 1285, 1286, 1, 0, 0, 0, 1286, 1287, 6, 156, 16, 0, 1287, 1288, 6, 156, 11, 0, 1288, 328, 1, 0, 0, 0, 1289, 1290, 7, 1, 0, 0, 1290, 1291, 7, 9, 0, 0, 1291, 1292, 7, 15, 0, 0, 1292, 1293, 7, 7, 0, 0, 1293, 330, 1, 0, 0, 0, 1294, 1295, 3, 55, 20, 0, 1295, 1296, 1, 0, 0, 0, 1296, 1297, 6, 158, 10, 0, 1297, 332, 1, 0, 0, 0, 1298, 1299, 3, 57, 21, 0, 1299, 1300, 1, 0, 0, 0, 1300, 1301, 6, 159, 10, 0, 1301, 334, 1, 0, 0, 0, 1302, 1303, 3, 59, 22, 0, 1303, 1304, 1, 0, 0, 0, 1304, 1305, 6, 160, 10, 0, 1305, 336, 1, 0, 0, 0, 1306, 1307, 3, 169, 77, 0, 1307, 1308, 1, 0, 0, 0, 1308, 1309, 6, 161, 17, 0, 1309, 1310, 6, 161, 11, 0, 1310, 338, 1, 0, 0, 0, 1311, 1312, 3, 61, 23, 0, 1312, 1313, 1, 0, 0, 0, 1313, 1314, 6, 162, 12, 0, 1314, 340, 1, 0, 0, 0, 1315, 1321, 3, 75, 30, 0, 1316, 1321, 3, 65, 25, 0, 1317, 1321, 3, 105, 45, 0, 1318, 1321, 3, 67, 26, 0, 1319, 1321, 3, 81, 33, 0, 1320, 1315, 1, 0, 0, 0, 1320, 1316, 1, 0, 0, 0, 1320, 1317, 1, 0, 0, 0, 1320, 1318, 1, 0, 0, 0, 1320, 1319, 1, 0, 0, 0, 1321, 1322, 1, 0, 0, 0, 1322, 1320, 1, 0, 0, 0, 1322, 1323, 1, 0, 0, 0, 1323, 342, 1, 0, 0, 0, 1324, 1325, 3, 55, 20, 0, 1325, 1326, 1, 0, 0, 0, 1326, 1327, 6, 164, 10, 0, 1327, 344, 1, 0, 0, 0, 1328, 1329, 3, 57, 21, 0, 1329, 1330, 1, 0, 0, 0, 1330, 1331, 6, 165, 10, 0, 1331, 346, 1, 0, 0, 0, 1332, 1333, 3, 59, 22, 0, 1333, 1334, 1, 0, 0, 0, 1334, 1335, 6, 166, 10, 0, 1335, 348, 1, 0, 0, 0, 1336, 1337, 3, 63, 24, 0, 1337, 1338, 1, 0, 0, 0, 1338, 1339, 6, 167, 16, 0, 1339, 1340, 6, 167, 11, 0, 1340, 350, 1, 0, 0, 0, 1341, 1342, 3, 61, 23, 0, 1342, 1343, 1, 0, 0, 0, 1343, 1344, 6, 168, 12, 0, 1344, 352, 1, 0, 0, 0, 1345, 1346, 3, 101, 43, 0, 1346, 1347, 1, 0, 0, 0, 1347, 1348, 6, 169, 18, 0, 1348, 354, 1, 0, 0, 0, 1349, 1350, 3, 105, 45, 0, 1350, 1351, 1, 0, 0, 0, 1351, 1352, 6, 170, 22, 0, 1352, 356, 1, 0, 0, 0, 1353, 1354, 3, 269, 127, 0, 1354, 1355, 1, 0, 0, 0, 1355, 1356, 6, 171, 32, 0, 1356, 1357, 6, 171, 33, 0, 1357, 358, 1, 0, 0, 0, 1358, 1359, 3, 209, 97, 0, 1359, 1360, 1, 0, 0, 0, 1360, 1361, 6, 172, 20, 0, 1361, 360, 1, 0, 0, 0, 1362, 1363, 3, 85, 35, 0, 1363, 1364, 1, 0, 0, 0, 1364, 1365, 6, 173, 21, 0, 1365, 362, 1, 0, 0, 0, 1366, 1367, 3, 55, 20, 0, 1367, 1368, 1, 0, 0, 0, 1368, 1369, 6, 174, 10, 0, 1369, 364, 1, 0, 0, 0, 1370, 1371, 3, 57, 21, 0, 1371, 1372, 1, 0, 0, 0, 1372, 1373, 6, 175, 10, 0, 1373, 366, 1, 0, 0, 0, 1374, 1375, 3, 59, 22, 0, 1375, 1376, 1, 0, 0, 0, 1376, 1377, 6, 176, 10, 0, 1377, 368, 1, 0, 0, 0, 1378, 1379, 3, 63, 24, 0, 1379, 1380, 1, 0, 0, 0, 1380, 1381, 6, 177, 16, 0, 1381, 1382, 6, 177, 11, 0, 1382, 1383, 6, 177, 11, 0, 1383, 370, 1, 0, 0, 0, 1384, 1385, 3, 101, 43, 0, 1385, 1386, 1, 0, 0, 0, 1386, 1387, 6, 178, 18, 0, 1387, 372, 1, 0, 0, 0, 1388, 1389, 3, 105, 45, 0, 1389, 1390, 1, 0, 0, 0, 1390, 1391, 6, 179, 22, 0, 1391, 374, 1, 0, 0, 0, 1392, 1393, 3, 235, 110, 0, 1393, 1394, 1, 0, 0, 0, 1394, 1395, 6, 180, 25, 0, 1395, 376, 1, 0, 0, 0, 1396, 1397, 3, 55, 20, 0, 1397, 1398, 1, 0, 0, 0, 1398, 1399, 6, 181, 10, 0, 1399, 378, 1, 0, 0, 0, 1400, 1401, 3, 57, 21, 0, 1401, 1402, 1, 0, 0, 0, 1402, 1403, 6, 182, 10, 0, 1403, 380, 1, 0, 0, 0, 1404, 1405, 3, 59, 22, 0, 1405, 1406, 1, 0, 0, 0, 1406, 1407, 6, 183, 10, 0, 1407, 382, 1, 0, 0, 0, 1408, 1409, 3, 63, 24, 0, 1409, 1410, 1, 0, 0, 0, 1410, 1411, 6, 184, 16, 0, 1411, 1412, 6, 184, 11, 0, 1412, 384, 1, 0, 0, 0, 1413, 1414, 3, 209, 97, 0, 1414, 1415, 1, 0, 0, 0, 1415, 1416, 6, 185, 20, 0, 1416, 1417, 6, 185, 11, 0, 1417, 1418, 6, 185, 34, 0, 1418, 386, 1, 0, 0, 0, 1419, 1420, 3, 85, 35, 0, 1420, 1421, 1, 0, 0, 0, 1421, 1422, 6, 186, 21, 0, 1422, 1423, 6, 186, 11, 0, 1423, 1424, 6, 186, 34, 0, 1424, 388, 1, 0, 0, 0, 1425, 1426, 3, 55, 20, 0, 1426, 1427, 1, 0, 0, 0, 1427, 1428, 6, 187, 10, 0, 1428, 390, 1, 0, 0, 0, 1429, 1430, 3, 57, 21, 0, 1430, 1431, 1, 0, 0, 0, 1431, 1432, 6, 188, 10, 0, 1432, 392, 1, 0, 0, 0, 1433, 1434, 3, 59, 22, 0, 1434, 1435, 1, 0, 0, 0, 1435, 1436, 6, 189, 10, 0, 1436, 394, 1, 0, 0, 0, 1437, 1438, 3, 61, 23, 0, 1438, 1439, 1, 0, 0, 0, 1439, 1440, 6, 190, 12, 0, 1440, 1441, 6, 190, 11, 0, 1441, 1442, 6, 190, 9, 0, 1442, 396, 1, 0, 0, 0, 1443, 1444, 3, 101, 43, 0, 1444, 1445, 1, 0, 0, 0, 1445, 1446, 6, 191, 18, 0, 1446, 1447, 6, 191, 11, 0, 1447, 1448, 6, 191, 9, 0, 1448, 398, 1, 0, 0, 0, 1449, 1450, 3, 55, 20, 0, 1450, 1451, 1, 0, 0, 0, 1451, 1452, 6, 192, 10, 0, 1452, 400, 1, 0, 0, 0, 1453, 1454, 3, 57, 21, 0, 1454, 1455, 1, 0, 0, 0, 1455, 1456, 6, 193, 10, 0, 1456, 402, 1, 0, 0, 0, 1457, 1458, 3, 59, 22, 0, 1458, 1459, 1, 0, 0, 0, 1459, 1460, 6, 194, 10, 0, 1460, 404, 1, 0, 0, 0, 1461, 1462, 3, 175, 80, 0, 1462, 1463, 1, 0, 0, 0, 1463, 1464, 6, 195, 11, 0, 1464, 1465, 6, 195, 0, 0, 1465, 1466, 6, 195, 30, 0, 1466, 406, 1, 0, 0, 0, 1467, 1468, 3, 171, 78, 0, 1468, 1469, 1, 0, 0, 0, 1469, 1470, 6, 196, 11, 0, 1470, 1471, 6, 196, 0, 0, 1471, 1472, 6, 196, 31, 0, 1472, 408, 1, 0, 0, 0, 1473, 1474, 3, 91, 38, 0, 1474, 1475, 1, 0, 0, 0, 1475, 1476, 6, 197, 11, 0, 1476, 1477, 6, 197, 0, 0, 1477, 1478, 6, 197, 35, 0, 1478, 410, 1, 0, 0, 0, 1479, 1480, 3, 63, 24, 0, 1480, 1481, 1, 0, 0, 0, 1481, 1482, 6, 198, 16, 0, 1482, 1483, 6, 198, 11, 0, 1483, 412, 1, 0, 0, 0, 65, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 581, 591, 595, 598, 607, 609, 620, 641, 646, 655, 662, 667, 669, 680, 688, 691, 693, 698, 703, 709, 716, 721, 727, 730, 738, 742, 873, 878, 885, 887, 903, 908, 913, 915, 921, 998, 1003, 1052, 1056, 1061, 1066, 1071, 1073, 1077, 1079, 1166, 1170, 1175, 1320, 1322, 36, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 24, 0, 7, 16, 0, 7, 65, 0, 5, 0, 0, 7, 25, 0, 7, 66, 0, 7, 34, 0, 7, 32, 0, 7, 76, 0, 7, 26, 0, 7, 36, 0, 7, 48, 0, 7, 64, 0, 7, 80, 0, 5, 10, 0, 5, 7, 0, 7, 90, 0, 7, 89, 0, 7, 68, 0, 7, 67, 0, 7, 88, 0, 5, 12, 0, 5, 14, 0, 7, 29, 0] \ No newline at end of file +[4, 0, 128, 1608, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 2, 213, 7, 213, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 4, 24, 656, 8, 24, 11, 24, 12, 24, 657, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 666, 8, 25, 10, 25, 12, 25, 669, 9, 25, 1, 25, 3, 25, 672, 8, 25, 1, 25, 3, 25, 675, 8, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 684, 8, 26, 10, 26, 12, 26, 687, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 4, 27, 695, 8, 27, 11, 27, 12, 27, 696, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 34, 1, 34, 3, 34, 718, 8, 34, 1, 34, 4, 34, 721, 8, 34, 11, 34, 12, 34, 722, 1, 35, 1, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 732, 8, 37, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 3, 39, 739, 8, 39, 1, 40, 1, 40, 1, 40, 5, 40, 744, 8, 40, 10, 40, 12, 40, 747, 9, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 755, 8, 40, 10, 40, 12, 40, 758, 9, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 3, 40, 765, 8, 40, 1, 40, 3, 40, 768, 8, 40, 3, 40, 770, 8, 40, 1, 41, 4, 41, 773, 8, 41, 11, 41, 12, 41, 774, 1, 42, 4, 42, 778, 8, 42, 11, 42, 12, 42, 779, 1, 42, 1, 42, 5, 42, 784, 8, 42, 10, 42, 12, 42, 787, 9, 42, 1, 42, 1, 42, 4, 42, 791, 8, 42, 11, 42, 12, 42, 792, 1, 42, 4, 42, 796, 8, 42, 11, 42, 12, 42, 797, 1, 42, 1, 42, 5, 42, 802, 8, 42, 10, 42, 12, 42, 805, 9, 42, 3, 42, 807, 8, 42, 1, 42, 1, 42, 1, 42, 1, 42, 4, 42, 813, 8, 42, 11, 42, 12, 42, 814, 1, 42, 1, 42, 3, 42, 819, 8, 42, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 3, 80, 950, 8, 80, 1, 80, 5, 80, 953, 8, 80, 10, 80, 12, 80, 956, 9, 80, 1, 80, 1, 80, 4, 80, 960, 8, 80, 11, 80, 12, 80, 961, 3, 80, 964, 8, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 5, 83, 978, 8, 83, 10, 83, 12, 83, 981, 9, 83, 1, 83, 1, 83, 3, 83, 985, 8, 83, 1, 83, 4, 83, 988, 8, 83, 11, 83, 12, 83, 989, 3, 83, 992, 8, 83, 1, 84, 1, 84, 4, 84, 996, 8, 84, 11, 84, 12, 84, 997, 1, 84, 1, 84, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 3, 101, 1075, 8, 101, 1, 102, 4, 102, 1078, 8, 102, 11, 102, 12, 102, 1079, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 3, 113, 1129, 8, 113, 1, 114, 1, 114, 3, 114, 1133, 8, 114, 1, 114, 5, 114, 1136, 8, 114, 10, 114, 12, 114, 1139, 9, 114, 1, 114, 1, 114, 3, 114, 1143, 8, 114, 1, 114, 4, 114, 1146, 8, 114, 11, 114, 12, 114, 1147, 3, 114, 1150, 8, 114, 1, 115, 1, 115, 4, 115, 1154, 8, 115, 11, 115, 12, 115, 1155, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 135, 4, 135, 1241, 8, 135, 11, 135, 12, 135, 1242, 1, 135, 1, 135, 3, 135, 1247, 8, 135, 1, 135, 4, 135, 1250, 8, 135, 11, 135, 12, 135, 1251, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 168, 4, 168, 1397, 8, 168, 11, 168, 12, 168, 1398, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 212, 1, 212, 1, 213, 1, 213, 1, 213, 1, 213, 1, 213, 2, 685, 756, 0, 214, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 25, 66, 26, 68, 27, 70, 28, 72, 29, 74, 30, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 0, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 68, 172, 0, 174, 0, 176, 69, 178, 70, 180, 71, 182, 72, 184, 0, 186, 73, 188, 74, 190, 75, 192, 76, 194, 0, 196, 0, 198, 77, 200, 78, 202, 79, 204, 0, 206, 0, 208, 0, 210, 0, 212, 0, 214, 0, 216, 80, 218, 0, 220, 81, 222, 0, 224, 0, 226, 82, 228, 83, 230, 84, 232, 0, 234, 0, 236, 0, 238, 0, 240, 0, 242, 0, 244, 0, 246, 85, 248, 86, 250, 87, 252, 88, 254, 0, 256, 0, 258, 0, 260, 0, 262, 0, 264, 0, 266, 89, 268, 0, 270, 90, 272, 91, 274, 92, 276, 0, 278, 0, 280, 93, 282, 94, 284, 0, 286, 95, 288, 0, 290, 96, 292, 97, 294, 98, 296, 0, 298, 0, 300, 0, 302, 0, 304, 0, 306, 0, 308, 0, 310, 0, 312, 0, 314, 99, 316, 100, 318, 101, 320, 0, 322, 0, 324, 0, 326, 0, 328, 0, 330, 0, 332, 102, 334, 103, 336, 104, 338, 0, 340, 105, 342, 106, 344, 107, 346, 108, 348, 0, 350, 0, 352, 109, 354, 110, 356, 111, 358, 112, 360, 0, 362, 0, 364, 0, 366, 0, 368, 0, 370, 0, 372, 0, 374, 113, 376, 114, 378, 115, 380, 0, 382, 0, 384, 0, 386, 0, 388, 116, 390, 117, 392, 118, 394, 0, 396, 0, 398, 0, 400, 0, 402, 119, 404, 0, 406, 0, 408, 120, 410, 121, 412, 122, 414, 0, 416, 0, 418, 0, 420, 123, 422, 124, 424, 125, 426, 0, 428, 0, 430, 126, 432, 127, 434, 128, 436, 0, 438, 0, 440, 0, 442, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 2, 0, 74, 74, 106, 106, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1635, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 64, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 0, 68, 1, 0, 0, 0, 0, 70, 1, 0, 0, 0, 0, 72, 1, 0, 0, 0, 1, 74, 1, 0, 0, 0, 1, 96, 1, 0, 0, 0, 1, 98, 1, 0, 0, 0, 1, 100, 1, 0, 0, 0, 1, 102, 1, 0, 0, 0, 1, 104, 1, 0, 0, 0, 1, 106, 1, 0, 0, 0, 1, 108, 1, 0, 0, 0, 1, 110, 1, 0, 0, 0, 1, 112, 1, 0, 0, 0, 1, 114, 1, 0, 0, 0, 1, 116, 1, 0, 0, 0, 1, 118, 1, 0, 0, 0, 1, 120, 1, 0, 0, 0, 1, 122, 1, 0, 0, 0, 1, 124, 1, 0, 0, 0, 1, 126, 1, 0, 0, 0, 1, 128, 1, 0, 0, 0, 1, 130, 1, 0, 0, 0, 1, 132, 1, 0, 0, 0, 1, 134, 1, 0, 0, 0, 1, 136, 1, 0, 0, 0, 1, 138, 1, 0, 0, 0, 1, 140, 1, 0, 0, 0, 1, 142, 1, 0, 0, 0, 1, 144, 1, 0, 0, 0, 1, 146, 1, 0, 0, 0, 1, 148, 1, 0, 0, 0, 1, 150, 1, 0, 0, 0, 1, 152, 1, 0, 0, 0, 1, 154, 1, 0, 0, 0, 1, 156, 1, 0, 0, 0, 1, 158, 1, 0, 0, 0, 1, 160, 1, 0, 0, 0, 1, 162, 1, 0, 0, 0, 1, 164, 1, 0, 0, 0, 1, 166, 1, 0, 0, 0, 1, 168, 1, 0, 0, 0, 1, 170, 1, 0, 0, 0, 1, 172, 1, 0, 0, 0, 1, 174, 1, 0, 0, 0, 1, 176, 1, 0, 0, 0, 1, 178, 1, 0, 0, 0, 1, 180, 1, 0, 0, 0, 1, 182, 1, 0, 0, 0, 1, 186, 1, 0, 0, 0, 1, 188, 1, 0, 0, 0, 1, 190, 1, 0, 0, 0, 1, 192, 1, 0, 0, 0, 2, 194, 1, 0, 0, 0, 2, 196, 1, 0, 0, 0, 2, 198, 1, 0, 0, 0, 2, 200, 1, 0, 0, 0, 2, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 216, 1, 0, 0, 0, 3, 220, 1, 0, 0, 0, 3, 222, 1, 0, 0, 0, 3, 224, 1, 0, 0, 0, 3, 226, 1, 0, 0, 0, 3, 228, 1, 0, 0, 0, 3, 230, 1, 0, 0, 0, 4, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 4, 236, 1, 0, 0, 0, 4, 238, 1, 0, 0, 0, 4, 240, 1, 0, 0, 0, 4, 246, 1, 0, 0, 0, 4, 248, 1, 0, 0, 0, 4, 250, 1, 0, 0, 0, 4, 252, 1, 0, 0, 0, 5, 254, 1, 0, 0, 0, 5, 256, 1, 0, 0, 0, 5, 258, 1, 0, 0, 0, 5, 260, 1, 0, 0, 0, 5, 262, 1, 0, 0, 0, 5, 264, 1, 0, 0, 0, 5, 266, 1, 0, 0, 0, 5, 268, 1, 0, 0, 0, 5, 270, 1, 0, 0, 0, 5, 272, 1, 0, 0, 0, 5, 274, 1, 0, 0, 0, 6, 276, 1, 0, 0, 0, 6, 278, 1, 0, 0, 0, 6, 280, 1, 0, 0, 0, 6, 282, 1, 0, 0, 0, 6, 286, 1, 0, 0, 0, 6, 288, 1, 0, 0, 0, 6, 290, 1, 0, 0, 0, 6, 292, 1, 0, 0, 0, 6, 294, 1, 0, 0, 0, 7, 296, 1, 0, 0, 0, 7, 298, 1, 0, 0, 0, 7, 300, 1, 0, 0, 0, 7, 302, 1, 0, 0, 0, 7, 304, 1, 0, 0, 0, 7, 306, 1, 0, 0, 0, 7, 308, 1, 0, 0, 0, 7, 310, 1, 0, 0, 0, 7, 312, 1, 0, 0, 0, 7, 314, 1, 0, 0, 0, 7, 316, 1, 0, 0, 0, 7, 318, 1, 0, 0, 0, 8, 320, 1, 0, 0, 0, 8, 322, 1, 0, 0, 0, 8, 324, 1, 0, 0, 0, 8, 326, 1, 0, 0, 0, 8, 328, 1, 0, 0, 0, 8, 330, 1, 0, 0, 0, 8, 332, 1, 0, 0, 0, 8, 334, 1, 0, 0, 0, 8, 336, 1, 0, 0, 0, 9, 338, 1, 0, 0, 0, 9, 340, 1, 0, 0, 0, 9, 342, 1, 0, 0, 0, 9, 344, 1, 0, 0, 0, 9, 346, 1, 0, 0, 0, 10, 348, 1, 0, 0, 0, 10, 350, 1, 0, 0, 0, 10, 352, 1, 0, 0, 0, 10, 354, 1, 0, 0, 0, 10, 356, 1, 0, 0, 0, 10, 358, 1, 0, 0, 0, 11, 360, 1, 0, 0, 0, 11, 362, 1, 0, 0, 0, 11, 364, 1, 0, 0, 0, 11, 366, 1, 0, 0, 0, 11, 368, 1, 0, 0, 0, 11, 370, 1, 0, 0, 0, 11, 372, 1, 0, 0, 0, 11, 374, 1, 0, 0, 0, 11, 376, 1, 0, 0, 0, 11, 378, 1, 0, 0, 0, 12, 380, 1, 0, 0, 0, 12, 382, 1, 0, 0, 0, 12, 384, 1, 0, 0, 0, 12, 386, 1, 0, 0, 0, 12, 388, 1, 0, 0, 0, 12, 390, 1, 0, 0, 0, 12, 392, 1, 0, 0, 0, 13, 394, 1, 0, 0, 0, 13, 396, 1, 0, 0, 0, 13, 398, 1, 0, 0, 0, 13, 400, 1, 0, 0, 0, 13, 402, 1, 0, 0, 0, 13, 404, 1, 0, 0, 0, 13, 406, 1, 0, 0, 0, 13, 408, 1, 0, 0, 0, 13, 410, 1, 0, 0, 0, 13, 412, 1, 0, 0, 0, 14, 414, 1, 0, 0, 0, 14, 416, 1, 0, 0, 0, 14, 418, 1, 0, 0, 0, 14, 420, 1, 0, 0, 0, 14, 422, 1, 0, 0, 0, 14, 424, 1, 0, 0, 0, 15, 426, 1, 0, 0, 0, 15, 428, 1, 0, 0, 0, 15, 430, 1, 0, 0, 0, 15, 432, 1, 0, 0, 0, 15, 434, 1, 0, 0, 0, 15, 436, 1, 0, 0, 0, 15, 438, 1, 0, 0, 0, 15, 440, 1, 0, 0, 0, 15, 442, 1, 0, 0, 0, 16, 444, 1, 0, 0, 0, 18, 454, 1, 0, 0, 0, 20, 461, 1, 0, 0, 0, 22, 470, 1, 0, 0, 0, 24, 477, 1, 0, 0, 0, 26, 487, 1, 0, 0, 0, 28, 494, 1, 0, 0, 0, 30, 501, 1, 0, 0, 0, 32, 508, 1, 0, 0, 0, 34, 516, 1, 0, 0, 0, 36, 528, 1, 0, 0, 0, 38, 537, 1, 0, 0, 0, 40, 543, 1, 0, 0, 0, 42, 550, 1, 0, 0, 0, 44, 557, 1, 0, 0, 0, 46, 565, 1, 0, 0, 0, 48, 573, 1, 0, 0, 0, 50, 588, 1, 0, 0, 0, 52, 600, 1, 0, 0, 0, 54, 611, 1, 0, 0, 0, 56, 619, 1, 0, 0, 0, 58, 627, 1, 0, 0, 0, 60, 635, 1, 0, 0, 0, 62, 644, 1, 0, 0, 0, 64, 655, 1, 0, 0, 0, 66, 661, 1, 0, 0, 0, 68, 678, 1, 0, 0, 0, 70, 694, 1, 0, 0, 0, 72, 700, 1, 0, 0, 0, 74, 702, 1, 0, 0, 0, 76, 706, 1, 0, 0, 0, 78, 708, 1, 0, 0, 0, 80, 710, 1, 0, 0, 0, 82, 713, 1, 0, 0, 0, 84, 715, 1, 0, 0, 0, 86, 724, 1, 0, 0, 0, 88, 726, 1, 0, 0, 0, 90, 731, 1, 0, 0, 0, 92, 733, 1, 0, 0, 0, 94, 738, 1, 0, 0, 0, 96, 769, 1, 0, 0, 0, 98, 772, 1, 0, 0, 0, 100, 818, 1, 0, 0, 0, 102, 820, 1, 0, 0, 0, 104, 823, 1, 0, 0, 0, 106, 827, 1, 0, 0, 0, 108, 831, 1, 0, 0, 0, 110, 833, 1, 0, 0, 0, 112, 836, 1, 0, 0, 0, 114, 838, 1, 0, 0, 0, 116, 843, 1, 0, 0, 0, 118, 845, 1, 0, 0, 0, 120, 851, 1, 0, 0, 0, 122, 857, 1, 0, 0, 0, 124, 860, 1, 0, 0, 0, 126, 863, 1, 0, 0, 0, 128, 868, 1, 0, 0, 0, 130, 873, 1, 0, 0, 0, 132, 875, 1, 0, 0, 0, 134, 879, 1, 0, 0, 0, 136, 884, 1, 0, 0, 0, 138, 890, 1, 0, 0, 0, 140, 893, 1, 0, 0, 0, 142, 895, 1, 0, 0, 0, 144, 901, 1, 0, 0, 0, 146, 903, 1, 0, 0, 0, 148, 908, 1, 0, 0, 0, 150, 911, 1, 0, 0, 0, 152, 914, 1, 0, 0, 0, 154, 917, 1, 0, 0, 0, 156, 919, 1, 0, 0, 0, 158, 922, 1, 0, 0, 0, 160, 924, 1, 0, 0, 0, 162, 927, 1, 0, 0, 0, 164, 929, 1, 0, 0, 0, 166, 931, 1, 0, 0, 0, 168, 933, 1, 0, 0, 0, 170, 935, 1, 0, 0, 0, 172, 937, 1, 0, 0, 0, 174, 942, 1, 0, 0, 0, 176, 963, 1, 0, 0, 0, 178, 965, 1, 0, 0, 0, 180, 970, 1, 0, 0, 0, 182, 991, 1, 0, 0, 0, 184, 993, 1, 0, 0, 0, 186, 1001, 1, 0, 0, 0, 188, 1003, 1, 0, 0, 0, 190, 1007, 1, 0, 0, 0, 192, 1011, 1, 0, 0, 0, 194, 1015, 1, 0, 0, 0, 196, 1020, 1, 0, 0, 0, 198, 1025, 1, 0, 0, 0, 200, 1029, 1, 0, 0, 0, 202, 1033, 1, 0, 0, 0, 204, 1037, 1, 0, 0, 0, 206, 1042, 1, 0, 0, 0, 208, 1046, 1, 0, 0, 0, 210, 1050, 1, 0, 0, 0, 212, 1054, 1, 0, 0, 0, 214, 1058, 1, 0, 0, 0, 216, 1062, 1, 0, 0, 0, 218, 1074, 1, 0, 0, 0, 220, 1077, 1, 0, 0, 0, 222, 1081, 1, 0, 0, 0, 224, 1085, 1, 0, 0, 0, 226, 1089, 1, 0, 0, 0, 228, 1093, 1, 0, 0, 0, 230, 1097, 1, 0, 0, 0, 232, 1101, 1, 0, 0, 0, 234, 1106, 1, 0, 0, 0, 236, 1110, 1, 0, 0, 0, 238, 1114, 1, 0, 0, 0, 240, 1119, 1, 0, 0, 0, 242, 1128, 1, 0, 0, 0, 244, 1149, 1, 0, 0, 0, 246, 1153, 1, 0, 0, 0, 248, 1157, 1, 0, 0, 0, 250, 1161, 1, 0, 0, 0, 252, 1165, 1, 0, 0, 0, 254, 1169, 1, 0, 0, 0, 256, 1174, 1, 0, 0, 0, 258, 1178, 1, 0, 0, 0, 260, 1182, 1, 0, 0, 0, 262, 1186, 1, 0, 0, 0, 264, 1191, 1, 0, 0, 0, 266, 1196, 1, 0, 0, 0, 268, 1199, 1, 0, 0, 0, 270, 1203, 1, 0, 0, 0, 272, 1207, 1, 0, 0, 0, 274, 1211, 1, 0, 0, 0, 276, 1215, 1, 0, 0, 0, 278, 1220, 1, 0, 0, 0, 280, 1225, 1, 0, 0, 0, 282, 1230, 1, 0, 0, 0, 284, 1237, 1, 0, 0, 0, 286, 1246, 1, 0, 0, 0, 288, 1253, 1, 0, 0, 0, 290, 1257, 1, 0, 0, 0, 292, 1261, 1, 0, 0, 0, 294, 1265, 1, 0, 0, 0, 296, 1269, 1, 0, 0, 0, 298, 1275, 1, 0, 0, 0, 300, 1279, 1, 0, 0, 0, 302, 1283, 1, 0, 0, 0, 304, 1287, 1, 0, 0, 0, 306, 1291, 1, 0, 0, 0, 308, 1295, 1, 0, 0, 0, 310, 1299, 1, 0, 0, 0, 312, 1304, 1, 0, 0, 0, 314, 1309, 1, 0, 0, 0, 316, 1313, 1, 0, 0, 0, 318, 1317, 1, 0, 0, 0, 320, 1321, 1, 0, 0, 0, 322, 1326, 1, 0, 0, 0, 324, 1330, 1, 0, 0, 0, 326, 1335, 1, 0, 0, 0, 328, 1340, 1, 0, 0, 0, 330, 1344, 1, 0, 0, 0, 332, 1348, 1, 0, 0, 0, 334, 1352, 1, 0, 0, 0, 336, 1356, 1, 0, 0, 0, 338, 1360, 1, 0, 0, 0, 340, 1365, 1, 0, 0, 0, 342, 1370, 1, 0, 0, 0, 344, 1374, 1, 0, 0, 0, 346, 1378, 1, 0, 0, 0, 348, 1382, 1, 0, 0, 0, 350, 1387, 1, 0, 0, 0, 352, 1396, 1, 0, 0, 0, 354, 1400, 1, 0, 0, 0, 356, 1404, 1, 0, 0, 0, 358, 1408, 1, 0, 0, 0, 360, 1412, 1, 0, 0, 0, 362, 1417, 1, 0, 0, 0, 364, 1421, 1, 0, 0, 0, 366, 1425, 1, 0, 0, 0, 368, 1429, 1, 0, 0, 0, 370, 1434, 1, 0, 0, 0, 372, 1438, 1, 0, 0, 0, 374, 1442, 1, 0, 0, 0, 376, 1446, 1, 0, 0, 0, 378, 1450, 1, 0, 0, 0, 380, 1454, 1, 0, 0, 0, 382, 1460, 1, 0, 0, 0, 384, 1464, 1, 0, 0, 0, 386, 1468, 1, 0, 0, 0, 388, 1472, 1, 0, 0, 0, 390, 1476, 1, 0, 0, 0, 392, 1480, 1, 0, 0, 0, 394, 1484, 1, 0, 0, 0, 396, 1489, 1, 0, 0, 0, 398, 1493, 1, 0, 0, 0, 400, 1497, 1, 0, 0, 0, 402, 1503, 1, 0, 0, 0, 404, 1512, 1, 0, 0, 0, 406, 1516, 1, 0, 0, 0, 408, 1520, 1, 0, 0, 0, 410, 1524, 1, 0, 0, 0, 412, 1528, 1, 0, 0, 0, 414, 1532, 1, 0, 0, 0, 416, 1537, 1, 0, 0, 0, 418, 1543, 1, 0, 0, 0, 420, 1549, 1, 0, 0, 0, 422, 1553, 1, 0, 0, 0, 424, 1557, 1, 0, 0, 0, 426, 1561, 1, 0, 0, 0, 428, 1567, 1, 0, 0, 0, 430, 1573, 1, 0, 0, 0, 432, 1577, 1, 0, 0, 0, 434, 1581, 1, 0, 0, 0, 436, 1585, 1, 0, 0, 0, 438, 1591, 1, 0, 0, 0, 440, 1597, 1, 0, 0, 0, 442, 1603, 1, 0, 0, 0, 444, 445, 7, 0, 0, 0, 445, 446, 7, 1, 0, 0, 446, 447, 7, 2, 0, 0, 447, 448, 7, 2, 0, 0, 448, 449, 7, 3, 0, 0, 449, 450, 7, 4, 0, 0, 450, 451, 7, 5, 0, 0, 451, 452, 1, 0, 0, 0, 452, 453, 6, 0, 0, 0, 453, 17, 1, 0, 0, 0, 454, 455, 7, 0, 0, 0, 455, 456, 7, 6, 0, 0, 456, 457, 7, 7, 0, 0, 457, 458, 7, 8, 0, 0, 458, 459, 1, 0, 0, 0, 459, 460, 6, 1, 1, 0, 460, 19, 1, 0, 0, 0, 461, 462, 7, 3, 0, 0, 462, 463, 7, 9, 0, 0, 463, 464, 7, 6, 0, 0, 464, 465, 7, 1, 0, 0, 465, 466, 7, 4, 0, 0, 466, 467, 7, 10, 0, 0, 467, 468, 1, 0, 0, 0, 468, 469, 6, 2, 2, 0, 469, 21, 1, 0, 0, 0, 470, 471, 7, 3, 0, 0, 471, 472, 7, 11, 0, 0, 472, 473, 7, 12, 0, 0, 473, 474, 7, 13, 0, 0, 474, 475, 1, 0, 0, 0, 475, 476, 6, 3, 0, 0, 476, 23, 1, 0, 0, 0, 477, 478, 7, 3, 0, 0, 478, 479, 7, 14, 0, 0, 479, 480, 7, 8, 0, 0, 480, 481, 7, 13, 0, 0, 481, 482, 7, 12, 0, 0, 482, 483, 7, 1, 0, 0, 483, 484, 7, 9, 0, 0, 484, 485, 1, 0, 0, 0, 485, 486, 6, 4, 3, 0, 486, 25, 1, 0, 0, 0, 487, 488, 7, 15, 0, 0, 488, 489, 7, 6, 0, 0, 489, 490, 7, 7, 0, 0, 490, 491, 7, 16, 0, 0, 491, 492, 1, 0, 0, 0, 492, 493, 6, 5, 4, 0, 493, 27, 1, 0, 0, 0, 494, 495, 7, 17, 0, 0, 495, 496, 7, 6, 0, 0, 496, 497, 7, 7, 0, 0, 497, 498, 7, 18, 0, 0, 498, 499, 1, 0, 0, 0, 499, 500, 6, 6, 0, 0, 500, 29, 1, 0, 0, 0, 501, 502, 7, 18, 0, 0, 502, 503, 7, 3, 0, 0, 503, 504, 7, 3, 0, 0, 504, 505, 7, 8, 0, 0, 505, 506, 1, 0, 0, 0, 506, 507, 6, 7, 1, 0, 507, 31, 1, 0, 0, 0, 508, 509, 7, 13, 0, 0, 509, 510, 7, 1, 0, 0, 510, 511, 7, 16, 0, 0, 511, 512, 7, 1, 0, 0, 512, 513, 7, 5, 0, 0, 513, 514, 1, 0, 0, 0, 514, 515, 6, 8, 0, 0, 515, 33, 1, 0, 0, 0, 516, 517, 7, 16, 0, 0, 517, 518, 7, 11, 0, 0, 518, 519, 5, 95, 0, 0, 519, 520, 7, 3, 0, 0, 520, 521, 7, 14, 0, 0, 521, 522, 7, 8, 0, 0, 522, 523, 7, 12, 0, 0, 523, 524, 7, 9, 0, 0, 524, 525, 7, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 6, 9, 5, 0, 527, 35, 1, 0, 0, 0, 528, 529, 7, 6, 0, 0, 529, 530, 7, 3, 0, 0, 530, 531, 7, 9, 0, 0, 531, 532, 7, 12, 0, 0, 532, 533, 7, 16, 0, 0, 533, 534, 7, 3, 0, 0, 534, 535, 1, 0, 0, 0, 535, 536, 6, 10, 6, 0, 536, 37, 1, 0, 0, 0, 537, 538, 7, 6, 0, 0, 538, 539, 7, 7, 0, 0, 539, 540, 7, 19, 0, 0, 540, 541, 1, 0, 0, 0, 541, 542, 6, 11, 0, 0, 542, 39, 1, 0, 0, 0, 543, 544, 7, 2, 0, 0, 544, 545, 7, 10, 0, 0, 545, 546, 7, 7, 0, 0, 546, 547, 7, 19, 0, 0, 547, 548, 1, 0, 0, 0, 548, 549, 6, 12, 7, 0, 549, 41, 1, 0, 0, 0, 550, 551, 7, 2, 0, 0, 551, 552, 7, 7, 0, 0, 552, 553, 7, 6, 0, 0, 553, 554, 7, 5, 0, 0, 554, 555, 1, 0, 0, 0, 555, 556, 6, 13, 0, 0, 556, 43, 1, 0, 0, 0, 557, 558, 7, 2, 0, 0, 558, 559, 7, 5, 0, 0, 559, 560, 7, 12, 0, 0, 560, 561, 7, 5, 0, 0, 561, 562, 7, 2, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 6, 14, 0, 0, 564, 45, 1, 0, 0, 0, 565, 566, 7, 19, 0, 0, 566, 567, 7, 10, 0, 0, 567, 568, 7, 3, 0, 0, 568, 569, 7, 6, 0, 0, 569, 570, 7, 3, 0, 0, 570, 571, 1, 0, 0, 0, 571, 572, 6, 15, 0, 0, 572, 47, 1, 0, 0, 0, 573, 574, 4, 16, 0, 0, 574, 575, 7, 1, 0, 0, 575, 576, 7, 9, 0, 0, 576, 577, 7, 13, 0, 0, 577, 578, 7, 1, 0, 0, 578, 579, 7, 9, 0, 0, 579, 580, 7, 3, 0, 0, 580, 581, 7, 2, 0, 0, 581, 582, 7, 5, 0, 0, 582, 583, 7, 12, 0, 0, 583, 584, 7, 5, 0, 0, 584, 585, 7, 2, 0, 0, 585, 586, 1, 0, 0, 0, 586, 587, 6, 16, 0, 0, 587, 49, 1, 0, 0, 0, 588, 589, 4, 17, 1, 0, 589, 590, 7, 13, 0, 0, 590, 591, 7, 7, 0, 0, 591, 592, 7, 7, 0, 0, 592, 593, 7, 18, 0, 0, 593, 594, 7, 20, 0, 0, 594, 595, 7, 8, 0, 0, 595, 596, 5, 95, 0, 0, 596, 597, 5, 128020, 0, 0, 597, 598, 1, 0, 0, 0, 598, 599, 6, 17, 8, 0, 599, 51, 1, 0, 0, 0, 600, 601, 4, 18, 2, 0, 601, 602, 7, 16, 0, 0, 602, 603, 7, 3, 0, 0, 603, 604, 7, 5, 0, 0, 604, 605, 7, 6, 0, 0, 605, 606, 7, 1, 0, 0, 606, 607, 7, 4, 0, 0, 607, 608, 7, 2, 0, 0, 608, 609, 1, 0, 0, 0, 609, 610, 6, 18, 9, 0, 610, 53, 1, 0, 0, 0, 611, 612, 4, 19, 3, 0, 612, 613, 7, 21, 0, 0, 613, 614, 7, 7, 0, 0, 614, 615, 7, 1, 0, 0, 615, 616, 7, 9, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 6, 19, 10, 0, 618, 55, 1, 0, 0, 0, 619, 620, 4, 20, 4, 0, 620, 621, 7, 15, 0, 0, 621, 622, 7, 20, 0, 0, 622, 623, 7, 13, 0, 0, 623, 624, 7, 13, 0, 0, 624, 625, 1, 0, 0, 0, 625, 626, 6, 20, 10, 0, 626, 57, 1, 0, 0, 0, 627, 628, 4, 21, 5, 0, 628, 629, 7, 13, 0, 0, 629, 630, 7, 3, 0, 0, 630, 631, 7, 15, 0, 0, 631, 632, 7, 5, 0, 0, 632, 633, 1, 0, 0, 0, 633, 634, 6, 21, 10, 0, 634, 59, 1, 0, 0, 0, 635, 636, 4, 22, 6, 0, 636, 637, 7, 6, 0, 0, 637, 638, 7, 1, 0, 0, 638, 639, 7, 17, 0, 0, 639, 640, 7, 10, 0, 0, 640, 641, 7, 5, 0, 0, 641, 642, 1, 0, 0, 0, 642, 643, 6, 22, 10, 0, 643, 61, 1, 0, 0, 0, 644, 645, 4, 23, 7, 0, 645, 646, 7, 13, 0, 0, 646, 647, 7, 7, 0, 0, 647, 648, 7, 7, 0, 0, 648, 649, 7, 18, 0, 0, 649, 650, 7, 20, 0, 0, 650, 651, 7, 8, 0, 0, 651, 652, 1, 0, 0, 0, 652, 653, 6, 23, 10, 0, 653, 63, 1, 0, 0, 0, 654, 656, 8, 22, 0, 0, 655, 654, 1, 0, 0, 0, 656, 657, 1, 0, 0, 0, 657, 655, 1, 0, 0, 0, 657, 658, 1, 0, 0, 0, 658, 659, 1, 0, 0, 0, 659, 660, 6, 24, 0, 0, 660, 65, 1, 0, 0, 0, 661, 662, 5, 47, 0, 0, 662, 663, 5, 47, 0, 0, 663, 667, 1, 0, 0, 0, 664, 666, 8, 23, 0, 0, 665, 664, 1, 0, 0, 0, 666, 669, 1, 0, 0, 0, 667, 665, 1, 0, 0, 0, 667, 668, 1, 0, 0, 0, 668, 671, 1, 0, 0, 0, 669, 667, 1, 0, 0, 0, 670, 672, 5, 13, 0, 0, 671, 670, 1, 0, 0, 0, 671, 672, 1, 0, 0, 0, 672, 674, 1, 0, 0, 0, 673, 675, 5, 10, 0, 0, 674, 673, 1, 0, 0, 0, 674, 675, 1, 0, 0, 0, 675, 676, 1, 0, 0, 0, 676, 677, 6, 25, 11, 0, 677, 67, 1, 0, 0, 0, 678, 679, 5, 47, 0, 0, 679, 680, 5, 42, 0, 0, 680, 685, 1, 0, 0, 0, 681, 684, 3, 68, 26, 0, 682, 684, 9, 0, 0, 0, 683, 681, 1, 0, 0, 0, 683, 682, 1, 0, 0, 0, 684, 687, 1, 0, 0, 0, 685, 686, 1, 0, 0, 0, 685, 683, 1, 0, 0, 0, 686, 688, 1, 0, 0, 0, 687, 685, 1, 0, 0, 0, 688, 689, 5, 42, 0, 0, 689, 690, 5, 47, 0, 0, 690, 691, 1, 0, 0, 0, 691, 692, 6, 26, 11, 0, 692, 69, 1, 0, 0, 0, 693, 695, 7, 24, 0, 0, 694, 693, 1, 0, 0, 0, 695, 696, 1, 0, 0, 0, 696, 694, 1, 0, 0, 0, 696, 697, 1, 0, 0, 0, 697, 698, 1, 0, 0, 0, 698, 699, 6, 27, 11, 0, 699, 71, 1, 0, 0, 0, 700, 701, 5, 58, 0, 0, 701, 73, 1, 0, 0, 0, 702, 703, 5, 124, 0, 0, 703, 704, 1, 0, 0, 0, 704, 705, 6, 29, 12, 0, 705, 75, 1, 0, 0, 0, 706, 707, 7, 25, 0, 0, 707, 77, 1, 0, 0, 0, 708, 709, 7, 26, 0, 0, 709, 79, 1, 0, 0, 0, 710, 711, 5, 92, 0, 0, 711, 712, 7, 27, 0, 0, 712, 81, 1, 0, 0, 0, 713, 714, 8, 28, 0, 0, 714, 83, 1, 0, 0, 0, 715, 717, 7, 3, 0, 0, 716, 718, 7, 29, 0, 0, 717, 716, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 720, 1, 0, 0, 0, 719, 721, 3, 76, 30, 0, 720, 719, 1, 0, 0, 0, 721, 722, 1, 0, 0, 0, 722, 720, 1, 0, 0, 0, 722, 723, 1, 0, 0, 0, 723, 85, 1, 0, 0, 0, 724, 725, 5, 64, 0, 0, 725, 87, 1, 0, 0, 0, 726, 727, 5, 96, 0, 0, 727, 89, 1, 0, 0, 0, 728, 732, 8, 30, 0, 0, 729, 730, 5, 96, 0, 0, 730, 732, 5, 96, 0, 0, 731, 728, 1, 0, 0, 0, 731, 729, 1, 0, 0, 0, 732, 91, 1, 0, 0, 0, 733, 734, 5, 95, 0, 0, 734, 93, 1, 0, 0, 0, 735, 739, 3, 78, 31, 0, 736, 739, 3, 76, 30, 0, 737, 739, 3, 92, 38, 0, 738, 735, 1, 0, 0, 0, 738, 736, 1, 0, 0, 0, 738, 737, 1, 0, 0, 0, 739, 95, 1, 0, 0, 0, 740, 745, 5, 34, 0, 0, 741, 744, 3, 80, 32, 0, 742, 744, 3, 82, 33, 0, 743, 741, 1, 0, 0, 0, 743, 742, 1, 0, 0, 0, 744, 747, 1, 0, 0, 0, 745, 743, 1, 0, 0, 0, 745, 746, 1, 0, 0, 0, 746, 748, 1, 0, 0, 0, 747, 745, 1, 0, 0, 0, 748, 770, 5, 34, 0, 0, 749, 750, 5, 34, 0, 0, 750, 751, 5, 34, 0, 0, 751, 752, 5, 34, 0, 0, 752, 756, 1, 0, 0, 0, 753, 755, 8, 23, 0, 0, 754, 753, 1, 0, 0, 0, 755, 758, 1, 0, 0, 0, 756, 757, 1, 0, 0, 0, 756, 754, 1, 0, 0, 0, 757, 759, 1, 0, 0, 0, 758, 756, 1, 0, 0, 0, 759, 760, 5, 34, 0, 0, 760, 761, 5, 34, 0, 0, 761, 762, 5, 34, 0, 0, 762, 764, 1, 0, 0, 0, 763, 765, 5, 34, 0, 0, 764, 763, 1, 0, 0, 0, 764, 765, 1, 0, 0, 0, 765, 767, 1, 0, 0, 0, 766, 768, 5, 34, 0, 0, 767, 766, 1, 0, 0, 0, 767, 768, 1, 0, 0, 0, 768, 770, 1, 0, 0, 0, 769, 740, 1, 0, 0, 0, 769, 749, 1, 0, 0, 0, 770, 97, 1, 0, 0, 0, 771, 773, 3, 76, 30, 0, 772, 771, 1, 0, 0, 0, 773, 774, 1, 0, 0, 0, 774, 772, 1, 0, 0, 0, 774, 775, 1, 0, 0, 0, 775, 99, 1, 0, 0, 0, 776, 778, 3, 76, 30, 0, 777, 776, 1, 0, 0, 0, 778, 779, 1, 0, 0, 0, 779, 777, 1, 0, 0, 0, 779, 780, 1, 0, 0, 0, 780, 781, 1, 0, 0, 0, 781, 785, 3, 116, 50, 0, 782, 784, 3, 76, 30, 0, 783, 782, 1, 0, 0, 0, 784, 787, 1, 0, 0, 0, 785, 783, 1, 0, 0, 0, 785, 786, 1, 0, 0, 0, 786, 819, 1, 0, 0, 0, 787, 785, 1, 0, 0, 0, 788, 790, 3, 116, 50, 0, 789, 791, 3, 76, 30, 0, 790, 789, 1, 0, 0, 0, 791, 792, 1, 0, 0, 0, 792, 790, 1, 0, 0, 0, 792, 793, 1, 0, 0, 0, 793, 819, 1, 0, 0, 0, 794, 796, 3, 76, 30, 0, 795, 794, 1, 0, 0, 0, 796, 797, 1, 0, 0, 0, 797, 795, 1, 0, 0, 0, 797, 798, 1, 0, 0, 0, 798, 806, 1, 0, 0, 0, 799, 803, 3, 116, 50, 0, 800, 802, 3, 76, 30, 0, 801, 800, 1, 0, 0, 0, 802, 805, 1, 0, 0, 0, 803, 801, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 807, 1, 0, 0, 0, 805, 803, 1, 0, 0, 0, 806, 799, 1, 0, 0, 0, 806, 807, 1, 0, 0, 0, 807, 808, 1, 0, 0, 0, 808, 809, 3, 84, 34, 0, 809, 819, 1, 0, 0, 0, 810, 812, 3, 116, 50, 0, 811, 813, 3, 76, 30, 0, 812, 811, 1, 0, 0, 0, 813, 814, 1, 0, 0, 0, 814, 812, 1, 0, 0, 0, 814, 815, 1, 0, 0, 0, 815, 816, 1, 0, 0, 0, 816, 817, 3, 84, 34, 0, 817, 819, 1, 0, 0, 0, 818, 777, 1, 0, 0, 0, 818, 788, 1, 0, 0, 0, 818, 795, 1, 0, 0, 0, 818, 810, 1, 0, 0, 0, 819, 101, 1, 0, 0, 0, 820, 821, 7, 31, 0, 0, 821, 822, 7, 32, 0, 0, 822, 103, 1, 0, 0, 0, 823, 824, 7, 12, 0, 0, 824, 825, 7, 9, 0, 0, 825, 826, 7, 0, 0, 0, 826, 105, 1, 0, 0, 0, 827, 828, 7, 12, 0, 0, 828, 829, 7, 2, 0, 0, 829, 830, 7, 4, 0, 0, 830, 107, 1, 0, 0, 0, 831, 832, 5, 61, 0, 0, 832, 109, 1, 0, 0, 0, 833, 834, 5, 58, 0, 0, 834, 835, 5, 58, 0, 0, 835, 111, 1, 0, 0, 0, 836, 837, 5, 44, 0, 0, 837, 113, 1, 0, 0, 0, 838, 839, 7, 0, 0, 0, 839, 840, 7, 3, 0, 0, 840, 841, 7, 2, 0, 0, 841, 842, 7, 4, 0, 0, 842, 115, 1, 0, 0, 0, 843, 844, 5, 46, 0, 0, 844, 117, 1, 0, 0, 0, 845, 846, 7, 15, 0, 0, 846, 847, 7, 12, 0, 0, 847, 848, 7, 13, 0, 0, 848, 849, 7, 2, 0, 0, 849, 850, 7, 3, 0, 0, 850, 119, 1, 0, 0, 0, 851, 852, 7, 15, 0, 0, 852, 853, 7, 1, 0, 0, 853, 854, 7, 6, 0, 0, 854, 855, 7, 2, 0, 0, 855, 856, 7, 5, 0, 0, 856, 121, 1, 0, 0, 0, 857, 858, 7, 1, 0, 0, 858, 859, 7, 9, 0, 0, 859, 123, 1, 0, 0, 0, 860, 861, 7, 1, 0, 0, 861, 862, 7, 2, 0, 0, 862, 125, 1, 0, 0, 0, 863, 864, 7, 13, 0, 0, 864, 865, 7, 12, 0, 0, 865, 866, 7, 2, 0, 0, 866, 867, 7, 5, 0, 0, 867, 127, 1, 0, 0, 0, 868, 869, 7, 13, 0, 0, 869, 870, 7, 1, 0, 0, 870, 871, 7, 18, 0, 0, 871, 872, 7, 3, 0, 0, 872, 129, 1, 0, 0, 0, 873, 874, 5, 40, 0, 0, 874, 131, 1, 0, 0, 0, 875, 876, 7, 9, 0, 0, 876, 877, 7, 7, 0, 0, 877, 878, 7, 5, 0, 0, 878, 133, 1, 0, 0, 0, 879, 880, 7, 9, 0, 0, 880, 881, 7, 20, 0, 0, 881, 882, 7, 13, 0, 0, 882, 883, 7, 13, 0, 0, 883, 135, 1, 0, 0, 0, 884, 885, 7, 9, 0, 0, 885, 886, 7, 20, 0, 0, 886, 887, 7, 13, 0, 0, 887, 888, 7, 13, 0, 0, 888, 889, 7, 2, 0, 0, 889, 137, 1, 0, 0, 0, 890, 891, 7, 7, 0, 0, 891, 892, 7, 6, 0, 0, 892, 139, 1, 0, 0, 0, 893, 894, 5, 63, 0, 0, 894, 141, 1, 0, 0, 0, 895, 896, 7, 6, 0, 0, 896, 897, 7, 13, 0, 0, 897, 898, 7, 1, 0, 0, 898, 899, 7, 18, 0, 0, 899, 900, 7, 3, 0, 0, 900, 143, 1, 0, 0, 0, 901, 902, 5, 41, 0, 0, 902, 145, 1, 0, 0, 0, 903, 904, 7, 5, 0, 0, 904, 905, 7, 6, 0, 0, 905, 906, 7, 20, 0, 0, 906, 907, 7, 3, 0, 0, 907, 147, 1, 0, 0, 0, 908, 909, 5, 61, 0, 0, 909, 910, 5, 61, 0, 0, 910, 149, 1, 0, 0, 0, 911, 912, 5, 61, 0, 0, 912, 913, 5, 126, 0, 0, 913, 151, 1, 0, 0, 0, 914, 915, 5, 33, 0, 0, 915, 916, 5, 61, 0, 0, 916, 153, 1, 0, 0, 0, 917, 918, 5, 60, 0, 0, 918, 155, 1, 0, 0, 0, 919, 920, 5, 60, 0, 0, 920, 921, 5, 61, 0, 0, 921, 157, 1, 0, 0, 0, 922, 923, 5, 62, 0, 0, 923, 159, 1, 0, 0, 0, 924, 925, 5, 62, 0, 0, 925, 926, 5, 61, 0, 0, 926, 161, 1, 0, 0, 0, 927, 928, 5, 43, 0, 0, 928, 163, 1, 0, 0, 0, 929, 930, 5, 45, 0, 0, 930, 165, 1, 0, 0, 0, 931, 932, 5, 42, 0, 0, 932, 167, 1, 0, 0, 0, 933, 934, 5, 47, 0, 0, 934, 169, 1, 0, 0, 0, 935, 936, 5, 37, 0, 0, 936, 171, 1, 0, 0, 0, 937, 938, 4, 78, 8, 0, 938, 939, 3, 72, 28, 0, 939, 940, 1, 0, 0, 0, 940, 941, 6, 78, 13, 0, 941, 173, 1, 0, 0, 0, 942, 943, 3, 46, 15, 0, 943, 944, 1, 0, 0, 0, 944, 945, 6, 79, 14, 0, 945, 175, 1, 0, 0, 0, 946, 949, 3, 140, 62, 0, 947, 950, 3, 78, 31, 0, 948, 950, 3, 92, 38, 0, 949, 947, 1, 0, 0, 0, 949, 948, 1, 0, 0, 0, 950, 954, 1, 0, 0, 0, 951, 953, 3, 94, 39, 0, 952, 951, 1, 0, 0, 0, 953, 956, 1, 0, 0, 0, 954, 952, 1, 0, 0, 0, 954, 955, 1, 0, 0, 0, 955, 964, 1, 0, 0, 0, 956, 954, 1, 0, 0, 0, 957, 959, 3, 140, 62, 0, 958, 960, 3, 76, 30, 0, 959, 958, 1, 0, 0, 0, 960, 961, 1, 0, 0, 0, 961, 959, 1, 0, 0, 0, 961, 962, 1, 0, 0, 0, 962, 964, 1, 0, 0, 0, 963, 946, 1, 0, 0, 0, 963, 957, 1, 0, 0, 0, 964, 177, 1, 0, 0, 0, 965, 966, 5, 91, 0, 0, 966, 967, 1, 0, 0, 0, 967, 968, 6, 81, 0, 0, 968, 969, 6, 81, 0, 0, 969, 179, 1, 0, 0, 0, 970, 971, 5, 93, 0, 0, 971, 972, 1, 0, 0, 0, 972, 973, 6, 82, 12, 0, 973, 974, 6, 82, 12, 0, 974, 181, 1, 0, 0, 0, 975, 979, 3, 78, 31, 0, 976, 978, 3, 94, 39, 0, 977, 976, 1, 0, 0, 0, 978, 981, 1, 0, 0, 0, 979, 977, 1, 0, 0, 0, 979, 980, 1, 0, 0, 0, 980, 992, 1, 0, 0, 0, 981, 979, 1, 0, 0, 0, 982, 985, 3, 92, 38, 0, 983, 985, 3, 86, 35, 0, 984, 982, 1, 0, 0, 0, 984, 983, 1, 0, 0, 0, 985, 987, 1, 0, 0, 0, 986, 988, 3, 94, 39, 0, 987, 986, 1, 0, 0, 0, 988, 989, 1, 0, 0, 0, 989, 987, 1, 0, 0, 0, 989, 990, 1, 0, 0, 0, 990, 992, 1, 0, 0, 0, 991, 975, 1, 0, 0, 0, 991, 984, 1, 0, 0, 0, 992, 183, 1, 0, 0, 0, 993, 995, 3, 88, 36, 0, 994, 996, 3, 90, 37, 0, 995, 994, 1, 0, 0, 0, 996, 997, 1, 0, 0, 0, 997, 995, 1, 0, 0, 0, 997, 998, 1, 0, 0, 0, 998, 999, 1, 0, 0, 0, 999, 1000, 3, 88, 36, 0, 1000, 185, 1, 0, 0, 0, 1001, 1002, 3, 184, 84, 0, 1002, 187, 1, 0, 0, 0, 1003, 1004, 3, 66, 25, 0, 1004, 1005, 1, 0, 0, 0, 1005, 1006, 6, 86, 11, 0, 1006, 189, 1, 0, 0, 0, 1007, 1008, 3, 68, 26, 0, 1008, 1009, 1, 0, 0, 0, 1009, 1010, 6, 87, 11, 0, 1010, 191, 1, 0, 0, 0, 1011, 1012, 3, 70, 27, 0, 1012, 1013, 1, 0, 0, 0, 1013, 1014, 6, 88, 11, 0, 1014, 193, 1, 0, 0, 0, 1015, 1016, 3, 178, 81, 0, 1016, 1017, 1, 0, 0, 0, 1017, 1018, 6, 89, 15, 0, 1018, 1019, 6, 89, 16, 0, 1019, 195, 1, 0, 0, 0, 1020, 1021, 3, 74, 29, 0, 1021, 1022, 1, 0, 0, 0, 1022, 1023, 6, 90, 17, 0, 1023, 1024, 6, 90, 12, 0, 1024, 197, 1, 0, 0, 0, 1025, 1026, 3, 70, 27, 0, 1026, 1027, 1, 0, 0, 0, 1027, 1028, 6, 91, 11, 0, 1028, 199, 1, 0, 0, 0, 1029, 1030, 3, 66, 25, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1032, 6, 92, 11, 0, 1032, 201, 1, 0, 0, 0, 1033, 1034, 3, 68, 26, 0, 1034, 1035, 1, 0, 0, 0, 1035, 1036, 6, 93, 11, 0, 1036, 203, 1, 0, 0, 0, 1037, 1038, 3, 74, 29, 0, 1038, 1039, 1, 0, 0, 0, 1039, 1040, 6, 94, 17, 0, 1040, 1041, 6, 94, 12, 0, 1041, 205, 1, 0, 0, 0, 1042, 1043, 3, 178, 81, 0, 1043, 1044, 1, 0, 0, 0, 1044, 1045, 6, 95, 15, 0, 1045, 207, 1, 0, 0, 0, 1046, 1047, 3, 180, 82, 0, 1047, 1048, 1, 0, 0, 0, 1048, 1049, 6, 96, 18, 0, 1049, 209, 1, 0, 0, 0, 1050, 1051, 3, 72, 28, 0, 1051, 1052, 1, 0, 0, 0, 1052, 1053, 6, 97, 13, 0, 1053, 211, 1, 0, 0, 0, 1054, 1055, 3, 112, 48, 0, 1055, 1056, 1, 0, 0, 0, 1056, 1057, 6, 98, 19, 0, 1057, 213, 1, 0, 0, 0, 1058, 1059, 3, 108, 46, 0, 1059, 1060, 1, 0, 0, 0, 1060, 1061, 6, 99, 20, 0, 1061, 215, 1, 0, 0, 0, 1062, 1063, 7, 16, 0, 0, 1063, 1064, 7, 3, 0, 0, 1064, 1065, 7, 5, 0, 0, 1065, 1066, 7, 12, 0, 0, 1066, 1067, 7, 0, 0, 0, 1067, 1068, 7, 12, 0, 0, 1068, 1069, 7, 5, 0, 0, 1069, 1070, 7, 12, 0, 0, 1070, 217, 1, 0, 0, 0, 1071, 1075, 8, 33, 0, 0, 1072, 1073, 5, 47, 0, 0, 1073, 1075, 8, 34, 0, 0, 1074, 1071, 1, 0, 0, 0, 1074, 1072, 1, 0, 0, 0, 1075, 219, 1, 0, 0, 0, 1076, 1078, 3, 218, 101, 0, 1077, 1076, 1, 0, 0, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1077, 1, 0, 0, 0, 1079, 1080, 1, 0, 0, 0, 1080, 221, 1, 0, 0, 0, 1081, 1082, 3, 220, 102, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 103, 21, 0, 1084, 223, 1, 0, 0, 0, 1085, 1086, 3, 96, 40, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 104, 22, 0, 1088, 225, 1, 0, 0, 0, 1089, 1090, 3, 66, 25, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 105, 11, 0, 1092, 227, 1, 0, 0, 0, 1093, 1094, 3, 68, 26, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 106, 11, 0, 1096, 229, 1, 0, 0, 0, 1097, 1098, 3, 70, 27, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 6, 107, 11, 0, 1100, 231, 1, 0, 0, 0, 1101, 1102, 3, 74, 29, 0, 1102, 1103, 1, 0, 0, 0, 1103, 1104, 6, 108, 17, 0, 1104, 1105, 6, 108, 12, 0, 1105, 233, 1, 0, 0, 0, 1106, 1107, 3, 116, 50, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 6, 109, 23, 0, 1109, 235, 1, 0, 0, 0, 1110, 1111, 3, 112, 48, 0, 1111, 1112, 1, 0, 0, 0, 1112, 1113, 6, 110, 19, 0, 1113, 237, 1, 0, 0, 0, 1114, 1115, 4, 111, 9, 0, 1115, 1116, 3, 140, 62, 0, 1116, 1117, 1, 0, 0, 0, 1117, 1118, 6, 111, 24, 0, 1118, 239, 1, 0, 0, 0, 1119, 1120, 4, 112, 10, 0, 1120, 1121, 3, 176, 80, 0, 1121, 1122, 1, 0, 0, 0, 1122, 1123, 6, 112, 25, 0, 1123, 241, 1, 0, 0, 0, 1124, 1129, 3, 78, 31, 0, 1125, 1129, 3, 76, 30, 0, 1126, 1129, 3, 92, 38, 0, 1127, 1129, 3, 166, 75, 0, 1128, 1124, 1, 0, 0, 0, 1128, 1125, 1, 0, 0, 0, 1128, 1126, 1, 0, 0, 0, 1128, 1127, 1, 0, 0, 0, 1129, 243, 1, 0, 0, 0, 1130, 1133, 3, 78, 31, 0, 1131, 1133, 3, 166, 75, 0, 1132, 1130, 1, 0, 0, 0, 1132, 1131, 1, 0, 0, 0, 1133, 1137, 1, 0, 0, 0, 1134, 1136, 3, 242, 113, 0, 1135, 1134, 1, 0, 0, 0, 1136, 1139, 1, 0, 0, 0, 1137, 1135, 1, 0, 0, 0, 1137, 1138, 1, 0, 0, 0, 1138, 1150, 1, 0, 0, 0, 1139, 1137, 1, 0, 0, 0, 1140, 1143, 3, 92, 38, 0, 1141, 1143, 3, 86, 35, 0, 1142, 1140, 1, 0, 0, 0, 1142, 1141, 1, 0, 0, 0, 1143, 1145, 1, 0, 0, 0, 1144, 1146, 3, 242, 113, 0, 1145, 1144, 1, 0, 0, 0, 1146, 1147, 1, 0, 0, 0, 1147, 1145, 1, 0, 0, 0, 1147, 1148, 1, 0, 0, 0, 1148, 1150, 1, 0, 0, 0, 1149, 1132, 1, 0, 0, 0, 1149, 1142, 1, 0, 0, 0, 1150, 245, 1, 0, 0, 0, 1151, 1154, 3, 244, 114, 0, 1152, 1154, 3, 184, 84, 0, 1153, 1151, 1, 0, 0, 0, 1153, 1152, 1, 0, 0, 0, 1154, 1155, 1, 0, 0, 0, 1155, 1153, 1, 0, 0, 0, 1155, 1156, 1, 0, 0, 0, 1156, 247, 1, 0, 0, 0, 1157, 1158, 3, 66, 25, 0, 1158, 1159, 1, 0, 0, 0, 1159, 1160, 6, 116, 11, 0, 1160, 249, 1, 0, 0, 0, 1161, 1162, 3, 68, 26, 0, 1162, 1163, 1, 0, 0, 0, 1163, 1164, 6, 117, 11, 0, 1164, 251, 1, 0, 0, 0, 1165, 1166, 3, 70, 27, 0, 1166, 1167, 1, 0, 0, 0, 1167, 1168, 6, 118, 11, 0, 1168, 253, 1, 0, 0, 0, 1169, 1170, 3, 74, 29, 0, 1170, 1171, 1, 0, 0, 0, 1171, 1172, 6, 119, 17, 0, 1172, 1173, 6, 119, 12, 0, 1173, 255, 1, 0, 0, 0, 1174, 1175, 3, 108, 46, 0, 1175, 1176, 1, 0, 0, 0, 1176, 1177, 6, 120, 20, 0, 1177, 257, 1, 0, 0, 0, 1178, 1179, 3, 112, 48, 0, 1179, 1180, 1, 0, 0, 0, 1180, 1181, 6, 121, 19, 0, 1181, 259, 1, 0, 0, 0, 1182, 1183, 3, 116, 50, 0, 1183, 1184, 1, 0, 0, 0, 1184, 1185, 6, 122, 23, 0, 1185, 261, 1, 0, 0, 0, 1186, 1187, 4, 123, 11, 0, 1187, 1188, 3, 140, 62, 0, 1188, 1189, 1, 0, 0, 0, 1189, 1190, 6, 123, 24, 0, 1190, 263, 1, 0, 0, 0, 1191, 1192, 4, 124, 12, 0, 1192, 1193, 3, 176, 80, 0, 1193, 1194, 1, 0, 0, 0, 1194, 1195, 6, 124, 25, 0, 1195, 265, 1, 0, 0, 0, 1196, 1197, 7, 12, 0, 0, 1197, 1198, 7, 2, 0, 0, 1198, 267, 1, 0, 0, 0, 1199, 1200, 3, 246, 115, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 6, 126, 26, 0, 1202, 269, 1, 0, 0, 0, 1203, 1204, 3, 66, 25, 0, 1204, 1205, 1, 0, 0, 0, 1205, 1206, 6, 127, 11, 0, 1206, 271, 1, 0, 0, 0, 1207, 1208, 3, 68, 26, 0, 1208, 1209, 1, 0, 0, 0, 1209, 1210, 6, 128, 11, 0, 1210, 273, 1, 0, 0, 0, 1211, 1212, 3, 70, 27, 0, 1212, 1213, 1, 0, 0, 0, 1213, 1214, 6, 129, 11, 0, 1214, 275, 1, 0, 0, 0, 1215, 1216, 3, 74, 29, 0, 1216, 1217, 1, 0, 0, 0, 1217, 1218, 6, 130, 17, 0, 1218, 1219, 6, 130, 12, 0, 1219, 277, 1, 0, 0, 0, 1220, 1221, 3, 178, 81, 0, 1221, 1222, 1, 0, 0, 0, 1222, 1223, 6, 131, 15, 0, 1223, 1224, 6, 131, 27, 0, 1224, 279, 1, 0, 0, 0, 1225, 1226, 7, 7, 0, 0, 1226, 1227, 7, 9, 0, 0, 1227, 1228, 1, 0, 0, 0, 1228, 1229, 6, 132, 28, 0, 1229, 281, 1, 0, 0, 0, 1230, 1231, 7, 19, 0, 0, 1231, 1232, 7, 1, 0, 0, 1232, 1233, 7, 5, 0, 0, 1233, 1234, 7, 10, 0, 0, 1234, 1235, 1, 0, 0, 0, 1235, 1236, 6, 133, 28, 0, 1236, 283, 1, 0, 0, 0, 1237, 1238, 8, 35, 0, 0, 1238, 285, 1, 0, 0, 0, 1239, 1241, 3, 284, 134, 0, 1240, 1239, 1, 0, 0, 0, 1241, 1242, 1, 0, 0, 0, 1242, 1240, 1, 0, 0, 0, 1242, 1243, 1, 0, 0, 0, 1243, 1244, 1, 0, 0, 0, 1244, 1245, 3, 72, 28, 0, 1245, 1247, 1, 0, 0, 0, 1246, 1240, 1, 0, 0, 0, 1246, 1247, 1, 0, 0, 0, 1247, 1249, 1, 0, 0, 0, 1248, 1250, 3, 284, 134, 0, 1249, 1248, 1, 0, 0, 0, 1250, 1251, 1, 0, 0, 0, 1251, 1249, 1, 0, 0, 0, 1251, 1252, 1, 0, 0, 0, 1252, 287, 1, 0, 0, 0, 1253, 1254, 3, 286, 135, 0, 1254, 1255, 1, 0, 0, 0, 1255, 1256, 6, 136, 29, 0, 1256, 289, 1, 0, 0, 0, 1257, 1258, 3, 66, 25, 0, 1258, 1259, 1, 0, 0, 0, 1259, 1260, 6, 137, 11, 0, 1260, 291, 1, 0, 0, 0, 1261, 1262, 3, 68, 26, 0, 1262, 1263, 1, 0, 0, 0, 1263, 1264, 6, 138, 11, 0, 1264, 293, 1, 0, 0, 0, 1265, 1266, 3, 70, 27, 0, 1266, 1267, 1, 0, 0, 0, 1267, 1268, 6, 139, 11, 0, 1268, 295, 1, 0, 0, 0, 1269, 1270, 3, 74, 29, 0, 1270, 1271, 1, 0, 0, 0, 1271, 1272, 6, 140, 17, 0, 1272, 1273, 6, 140, 12, 0, 1273, 1274, 6, 140, 12, 0, 1274, 297, 1, 0, 0, 0, 1275, 1276, 3, 108, 46, 0, 1276, 1277, 1, 0, 0, 0, 1277, 1278, 6, 141, 20, 0, 1278, 299, 1, 0, 0, 0, 1279, 1280, 3, 112, 48, 0, 1280, 1281, 1, 0, 0, 0, 1281, 1282, 6, 142, 19, 0, 1282, 301, 1, 0, 0, 0, 1283, 1284, 3, 116, 50, 0, 1284, 1285, 1, 0, 0, 0, 1285, 1286, 6, 143, 23, 0, 1286, 303, 1, 0, 0, 0, 1287, 1288, 3, 282, 133, 0, 1288, 1289, 1, 0, 0, 0, 1289, 1290, 6, 144, 30, 0, 1290, 305, 1, 0, 0, 0, 1291, 1292, 3, 246, 115, 0, 1292, 1293, 1, 0, 0, 0, 1293, 1294, 6, 145, 26, 0, 1294, 307, 1, 0, 0, 0, 1295, 1296, 3, 186, 85, 0, 1296, 1297, 1, 0, 0, 0, 1297, 1298, 6, 146, 31, 0, 1298, 309, 1, 0, 0, 0, 1299, 1300, 4, 147, 13, 0, 1300, 1301, 3, 140, 62, 0, 1301, 1302, 1, 0, 0, 0, 1302, 1303, 6, 147, 24, 0, 1303, 311, 1, 0, 0, 0, 1304, 1305, 4, 148, 14, 0, 1305, 1306, 3, 176, 80, 0, 1306, 1307, 1, 0, 0, 0, 1307, 1308, 6, 148, 25, 0, 1308, 313, 1, 0, 0, 0, 1309, 1310, 3, 66, 25, 0, 1310, 1311, 1, 0, 0, 0, 1311, 1312, 6, 149, 11, 0, 1312, 315, 1, 0, 0, 0, 1313, 1314, 3, 68, 26, 0, 1314, 1315, 1, 0, 0, 0, 1315, 1316, 6, 150, 11, 0, 1316, 317, 1, 0, 0, 0, 1317, 1318, 3, 70, 27, 0, 1318, 1319, 1, 0, 0, 0, 1319, 1320, 6, 151, 11, 0, 1320, 319, 1, 0, 0, 0, 1321, 1322, 3, 74, 29, 0, 1322, 1323, 1, 0, 0, 0, 1323, 1324, 6, 152, 17, 0, 1324, 1325, 6, 152, 12, 0, 1325, 321, 1, 0, 0, 0, 1326, 1327, 3, 116, 50, 0, 1327, 1328, 1, 0, 0, 0, 1328, 1329, 6, 153, 23, 0, 1329, 323, 1, 0, 0, 0, 1330, 1331, 4, 154, 15, 0, 1331, 1332, 3, 140, 62, 0, 1332, 1333, 1, 0, 0, 0, 1333, 1334, 6, 154, 24, 0, 1334, 325, 1, 0, 0, 0, 1335, 1336, 4, 155, 16, 0, 1336, 1337, 3, 176, 80, 0, 1337, 1338, 1, 0, 0, 0, 1338, 1339, 6, 155, 25, 0, 1339, 327, 1, 0, 0, 0, 1340, 1341, 3, 186, 85, 0, 1341, 1342, 1, 0, 0, 0, 1342, 1343, 6, 156, 31, 0, 1343, 329, 1, 0, 0, 0, 1344, 1345, 3, 182, 83, 0, 1345, 1346, 1, 0, 0, 0, 1346, 1347, 6, 157, 32, 0, 1347, 331, 1, 0, 0, 0, 1348, 1349, 3, 66, 25, 0, 1349, 1350, 1, 0, 0, 0, 1350, 1351, 6, 158, 11, 0, 1351, 333, 1, 0, 0, 0, 1352, 1353, 3, 68, 26, 0, 1353, 1354, 1, 0, 0, 0, 1354, 1355, 6, 159, 11, 0, 1355, 335, 1, 0, 0, 0, 1356, 1357, 3, 70, 27, 0, 1357, 1358, 1, 0, 0, 0, 1358, 1359, 6, 160, 11, 0, 1359, 337, 1, 0, 0, 0, 1360, 1361, 3, 74, 29, 0, 1361, 1362, 1, 0, 0, 0, 1362, 1363, 6, 161, 17, 0, 1363, 1364, 6, 161, 12, 0, 1364, 339, 1, 0, 0, 0, 1365, 1366, 7, 1, 0, 0, 1366, 1367, 7, 9, 0, 0, 1367, 1368, 7, 15, 0, 0, 1368, 1369, 7, 7, 0, 0, 1369, 341, 1, 0, 0, 0, 1370, 1371, 3, 66, 25, 0, 1371, 1372, 1, 0, 0, 0, 1372, 1373, 6, 163, 11, 0, 1373, 343, 1, 0, 0, 0, 1374, 1375, 3, 68, 26, 0, 1375, 1376, 1, 0, 0, 0, 1376, 1377, 6, 164, 11, 0, 1377, 345, 1, 0, 0, 0, 1378, 1379, 3, 70, 27, 0, 1379, 1380, 1, 0, 0, 0, 1380, 1381, 6, 165, 11, 0, 1381, 347, 1, 0, 0, 0, 1382, 1383, 3, 180, 82, 0, 1383, 1384, 1, 0, 0, 0, 1384, 1385, 6, 166, 18, 0, 1385, 1386, 6, 166, 12, 0, 1386, 349, 1, 0, 0, 0, 1387, 1388, 3, 72, 28, 0, 1388, 1389, 1, 0, 0, 0, 1389, 1390, 6, 167, 13, 0, 1390, 351, 1, 0, 0, 0, 1391, 1397, 3, 86, 35, 0, 1392, 1397, 3, 76, 30, 0, 1393, 1397, 3, 116, 50, 0, 1394, 1397, 3, 78, 31, 0, 1395, 1397, 3, 92, 38, 0, 1396, 1391, 1, 0, 0, 0, 1396, 1392, 1, 0, 0, 0, 1396, 1393, 1, 0, 0, 0, 1396, 1394, 1, 0, 0, 0, 1396, 1395, 1, 0, 0, 0, 1397, 1398, 1, 0, 0, 0, 1398, 1396, 1, 0, 0, 0, 1398, 1399, 1, 0, 0, 0, 1399, 353, 1, 0, 0, 0, 1400, 1401, 3, 66, 25, 0, 1401, 1402, 1, 0, 0, 0, 1402, 1403, 6, 169, 11, 0, 1403, 355, 1, 0, 0, 0, 1404, 1405, 3, 68, 26, 0, 1405, 1406, 1, 0, 0, 0, 1406, 1407, 6, 170, 11, 0, 1407, 357, 1, 0, 0, 0, 1408, 1409, 3, 70, 27, 0, 1409, 1410, 1, 0, 0, 0, 1410, 1411, 6, 171, 11, 0, 1411, 359, 1, 0, 0, 0, 1412, 1413, 3, 74, 29, 0, 1413, 1414, 1, 0, 0, 0, 1414, 1415, 6, 172, 17, 0, 1415, 1416, 6, 172, 12, 0, 1416, 361, 1, 0, 0, 0, 1417, 1418, 3, 72, 28, 0, 1418, 1419, 1, 0, 0, 0, 1419, 1420, 6, 173, 13, 0, 1420, 363, 1, 0, 0, 0, 1421, 1422, 3, 112, 48, 0, 1422, 1423, 1, 0, 0, 0, 1423, 1424, 6, 174, 19, 0, 1424, 365, 1, 0, 0, 0, 1425, 1426, 3, 116, 50, 0, 1426, 1427, 1, 0, 0, 0, 1427, 1428, 6, 175, 23, 0, 1428, 367, 1, 0, 0, 0, 1429, 1430, 3, 280, 132, 0, 1430, 1431, 1, 0, 0, 0, 1431, 1432, 6, 176, 33, 0, 1432, 1433, 6, 176, 34, 0, 1433, 369, 1, 0, 0, 0, 1434, 1435, 3, 220, 102, 0, 1435, 1436, 1, 0, 0, 0, 1436, 1437, 6, 177, 21, 0, 1437, 371, 1, 0, 0, 0, 1438, 1439, 3, 96, 40, 0, 1439, 1440, 1, 0, 0, 0, 1440, 1441, 6, 178, 22, 0, 1441, 373, 1, 0, 0, 0, 1442, 1443, 3, 66, 25, 0, 1443, 1444, 1, 0, 0, 0, 1444, 1445, 6, 179, 11, 0, 1445, 375, 1, 0, 0, 0, 1446, 1447, 3, 68, 26, 0, 1447, 1448, 1, 0, 0, 0, 1448, 1449, 6, 180, 11, 0, 1449, 377, 1, 0, 0, 0, 1450, 1451, 3, 70, 27, 0, 1451, 1452, 1, 0, 0, 0, 1452, 1453, 6, 181, 11, 0, 1453, 379, 1, 0, 0, 0, 1454, 1455, 3, 74, 29, 0, 1455, 1456, 1, 0, 0, 0, 1456, 1457, 6, 182, 17, 0, 1457, 1458, 6, 182, 12, 0, 1458, 1459, 6, 182, 12, 0, 1459, 381, 1, 0, 0, 0, 1460, 1461, 3, 112, 48, 0, 1461, 1462, 1, 0, 0, 0, 1462, 1463, 6, 183, 19, 0, 1463, 383, 1, 0, 0, 0, 1464, 1465, 3, 116, 50, 0, 1465, 1466, 1, 0, 0, 0, 1466, 1467, 6, 184, 23, 0, 1467, 385, 1, 0, 0, 0, 1468, 1469, 3, 246, 115, 0, 1469, 1470, 1, 0, 0, 0, 1470, 1471, 6, 185, 26, 0, 1471, 387, 1, 0, 0, 0, 1472, 1473, 3, 66, 25, 0, 1473, 1474, 1, 0, 0, 0, 1474, 1475, 6, 186, 11, 0, 1475, 389, 1, 0, 0, 0, 1476, 1477, 3, 68, 26, 0, 1477, 1478, 1, 0, 0, 0, 1478, 1479, 6, 187, 11, 0, 1479, 391, 1, 0, 0, 0, 1480, 1481, 3, 70, 27, 0, 1481, 1482, 1, 0, 0, 0, 1482, 1483, 6, 188, 11, 0, 1483, 393, 1, 0, 0, 0, 1484, 1485, 3, 74, 29, 0, 1485, 1486, 1, 0, 0, 0, 1486, 1487, 6, 189, 17, 0, 1487, 1488, 6, 189, 12, 0, 1488, 395, 1, 0, 0, 0, 1489, 1490, 3, 54, 19, 0, 1490, 1491, 1, 0, 0, 0, 1491, 1492, 6, 190, 35, 0, 1492, 397, 1, 0, 0, 0, 1493, 1494, 3, 266, 125, 0, 1494, 1495, 1, 0, 0, 0, 1495, 1496, 6, 191, 36, 0, 1496, 399, 1, 0, 0, 0, 1497, 1498, 3, 280, 132, 0, 1498, 1499, 1, 0, 0, 0, 1499, 1500, 6, 192, 33, 0, 1500, 1501, 6, 192, 12, 0, 1501, 1502, 6, 192, 0, 0, 1502, 401, 1, 0, 0, 0, 1503, 1504, 7, 20, 0, 0, 1504, 1505, 7, 2, 0, 0, 1505, 1506, 7, 1, 0, 0, 1506, 1507, 7, 9, 0, 0, 1507, 1508, 7, 17, 0, 0, 1508, 1509, 1, 0, 0, 0, 1509, 1510, 6, 193, 12, 0, 1510, 1511, 6, 193, 0, 0, 1511, 403, 1, 0, 0, 0, 1512, 1513, 3, 182, 83, 0, 1513, 1514, 1, 0, 0, 0, 1514, 1515, 6, 194, 32, 0, 1515, 405, 1, 0, 0, 0, 1516, 1517, 3, 186, 85, 0, 1517, 1518, 1, 0, 0, 0, 1518, 1519, 6, 195, 31, 0, 1519, 407, 1, 0, 0, 0, 1520, 1521, 3, 66, 25, 0, 1521, 1522, 1, 0, 0, 0, 1522, 1523, 6, 196, 11, 0, 1523, 409, 1, 0, 0, 0, 1524, 1525, 3, 68, 26, 0, 1525, 1526, 1, 0, 0, 0, 1526, 1527, 6, 197, 11, 0, 1527, 411, 1, 0, 0, 0, 1528, 1529, 3, 70, 27, 0, 1529, 1530, 1, 0, 0, 0, 1530, 1531, 6, 198, 11, 0, 1531, 413, 1, 0, 0, 0, 1532, 1533, 3, 74, 29, 0, 1533, 1534, 1, 0, 0, 0, 1534, 1535, 6, 199, 17, 0, 1535, 1536, 6, 199, 12, 0, 1536, 415, 1, 0, 0, 0, 1537, 1538, 3, 220, 102, 0, 1538, 1539, 1, 0, 0, 0, 1539, 1540, 6, 200, 21, 0, 1540, 1541, 6, 200, 12, 0, 1541, 1542, 6, 200, 37, 0, 1542, 417, 1, 0, 0, 0, 1543, 1544, 3, 96, 40, 0, 1544, 1545, 1, 0, 0, 0, 1545, 1546, 6, 201, 22, 0, 1546, 1547, 6, 201, 12, 0, 1547, 1548, 6, 201, 37, 0, 1548, 419, 1, 0, 0, 0, 1549, 1550, 3, 66, 25, 0, 1550, 1551, 1, 0, 0, 0, 1551, 1552, 6, 202, 11, 0, 1552, 421, 1, 0, 0, 0, 1553, 1554, 3, 68, 26, 0, 1554, 1555, 1, 0, 0, 0, 1555, 1556, 6, 203, 11, 0, 1556, 423, 1, 0, 0, 0, 1557, 1558, 3, 70, 27, 0, 1558, 1559, 1, 0, 0, 0, 1559, 1560, 6, 204, 11, 0, 1560, 425, 1, 0, 0, 0, 1561, 1562, 3, 72, 28, 0, 1562, 1563, 1, 0, 0, 0, 1563, 1564, 6, 205, 13, 0, 1564, 1565, 6, 205, 12, 0, 1565, 1566, 6, 205, 9, 0, 1566, 427, 1, 0, 0, 0, 1567, 1568, 3, 112, 48, 0, 1568, 1569, 1, 0, 0, 0, 1569, 1570, 6, 206, 19, 0, 1570, 1571, 6, 206, 12, 0, 1571, 1572, 6, 206, 9, 0, 1572, 429, 1, 0, 0, 0, 1573, 1574, 3, 66, 25, 0, 1574, 1575, 1, 0, 0, 0, 1575, 1576, 6, 207, 11, 0, 1576, 431, 1, 0, 0, 0, 1577, 1578, 3, 68, 26, 0, 1578, 1579, 1, 0, 0, 0, 1579, 1580, 6, 208, 11, 0, 1580, 433, 1, 0, 0, 0, 1581, 1582, 3, 70, 27, 0, 1582, 1583, 1, 0, 0, 0, 1583, 1584, 6, 209, 11, 0, 1584, 435, 1, 0, 0, 0, 1585, 1586, 3, 186, 85, 0, 1586, 1587, 1, 0, 0, 0, 1587, 1588, 6, 210, 12, 0, 1588, 1589, 6, 210, 0, 0, 1589, 1590, 6, 210, 31, 0, 1590, 437, 1, 0, 0, 0, 1591, 1592, 3, 182, 83, 0, 1592, 1593, 1, 0, 0, 0, 1593, 1594, 6, 211, 12, 0, 1594, 1595, 6, 211, 0, 0, 1595, 1596, 6, 211, 32, 0, 1596, 439, 1, 0, 0, 0, 1597, 1598, 3, 102, 43, 0, 1598, 1599, 1, 0, 0, 0, 1599, 1600, 6, 212, 12, 0, 1600, 1601, 6, 212, 0, 0, 1601, 1602, 6, 212, 38, 0, 1602, 441, 1, 0, 0, 0, 1603, 1604, 3, 74, 29, 0, 1604, 1605, 1, 0, 0, 0, 1605, 1606, 6, 213, 17, 0, 1606, 1607, 6, 213, 12, 0, 1607, 443, 1, 0, 0, 0, 66, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 657, 667, 671, 674, 683, 685, 696, 717, 722, 731, 738, 743, 745, 756, 764, 767, 769, 774, 779, 785, 792, 797, 803, 806, 814, 818, 949, 954, 961, 963, 979, 984, 989, 991, 997, 1074, 1079, 1128, 1132, 1137, 1142, 1147, 1149, 1153, 1155, 1242, 1246, 1251, 1396, 1398, 39, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 14, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 29, 0, 7, 16, 0, 7, 70, 0, 5, 0, 0, 7, 30, 0, 7, 71, 0, 7, 39, 0, 7, 37, 0, 7, 81, 0, 7, 31, 0, 7, 41, 0, 7, 53, 0, 7, 69, 0, 7, 85, 0, 5, 10, 0, 5, 7, 0, 7, 95, 0, 7, 94, 0, 7, 73, 0, 7, 72, 0, 7, 93, 0, 5, 12, 0, 7, 20, 0, 7, 89, 0, 5, 15, 0, 7, 34, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index f10881fcf0692..915264f21910f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -8,16 +8,14 @@ * 2.0. */ +import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.CharStream; -import org.antlr.v4.runtime.RuleContext; -import org.antlr.v4.runtime.RuntimeMetaData; -import org.antlr.v4.runtime.Vocabulary; -import org.antlr.v4.runtime.VocabularyImpl; -import org.antlr.v4.runtime.atn.ATN; -import org.antlr.v4.runtime.atn.ATNDeserializer; -import org.antlr.v4.runtime.atn.LexerATNSimulator; -import org.antlr.v4.runtime.atn.PredictionContextCache; +import org.antlr.v4.runtime.Token; +import org.antlr.v4.runtime.TokenStream; +import org.antlr.v4.runtime.*; +import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; +import org.antlr.v4.runtime.misc.*; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue", "this-escape"}) public class EsqlBaseLexer extends LexerConfig { @@ -27,90 +25,96 @@ public class EsqlBaseLexer extends LexerConfig { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, - LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, - WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, UNKNOWN_CMD=20, - LINE_COMMENT=21, MULTILINE_COMMENT=22, WS=23, COLON=24, PIPE=25, QUOTED_STRING=26, - INTEGER_LITERAL=27, DECIMAL_LITERAL=28, BY=29, AND=30, ASC=31, ASSIGN=32, - CAST_OP=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, IN=39, IS=40, - LAST=41, LIKE=42, LP=43, NOT=44, NULL=45, NULLS=46, OR=47, PARAM=48, RLIKE=49, - RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, GT=57, GTE=58, - PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, NAMED_OR_POSITIONAL_PARAM=64, - OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, - EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, - EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, - FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, - PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, - AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, - ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, - ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, - ENRICH_FIELD_WS=96, MVEXPAND_LINE_COMMENT=97, MVEXPAND_MULTILINE_COMMENT=98, - MVEXPAND_WS=99, INFO=100, SHOW_LINE_COMMENT=101, SHOW_MULTILINE_COMMENT=102, - SHOW_WS=103, SETTING=104, SETTING_LINE_COMMENT=105, SETTTING_MULTILINE_COMMENT=106, - SETTING_WS=107, LOOKUP_LINE_COMMENT=108, LOOKUP_MULTILINE_COMMENT=109, - LOOKUP_WS=110, LOOKUP_FIELD_LINE_COMMENT=111, LOOKUP_FIELD_MULTILINE_COMMENT=112, - LOOKUP_FIELD_WS=113, METRICS_LINE_COMMENT=114, METRICS_MULTILINE_COMMENT=115, - METRICS_WS=116, CLOSING_METRICS_LINE_COMMENT=117, CLOSING_METRICS_MULTILINE_COMMENT=118, - CLOSING_METRICS_WS=119; + DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, + LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, + WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, DEV_JOIN=20, + DEV_JOIN_FULL=21, DEV_JOIN_LEFT=22, DEV_JOIN_RIGHT=23, DEV_JOIN_LOOKUP=24, + UNKNOWN_CMD=25, LINE_COMMENT=26, MULTILINE_COMMENT=27, WS=28, COLON=29, + PIPE=30, QUOTED_STRING=31, INTEGER_LITERAL=32, DECIMAL_LITERAL=33, BY=34, + AND=35, ASC=36, ASSIGN=37, CAST_OP=38, COMMA=39, DESC=40, DOT=41, FALSE=42, + FIRST=43, IN=44, IS=45, LAST=46, LIKE=47, LP=48, NOT=49, NULL=50, NULLS=51, + OR=52, PARAM=53, RLIKE=54, RP=55, TRUE=56, EQ=57, CIEQ=58, NEQ=59, LT=60, + LTE=61, GT=62, GTE=63, PLUS=64, MINUS=65, ASTERISK=66, SLASH=67, PERCENT=68, + NAMED_OR_POSITIONAL_PARAM=69, OPENING_BRACKET=70, CLOSING_BRACKET=71, + UNQUOTED_IDENTIFIER=72, QUOTED_IDENTIFIER=73, EXPR_LINE_COMMENT=74, EXPR_MULTILINE_COMMENT=75, + EXPR_WS=76, EXPLAIN_WS=77, EXPLAIN_LINE_COMMENT=78, EXPLAIN_MULTILINE_COMMENT=79, + METADATA=80, UNQUOTED_SOURCE=81, FROM_LINE_COMMENT=82, FROM_MULTILINE_COMMENT=83, + FROM_WS=84, ID_PATTERN=85, PROJECT_LINE_COMMENT=86, PROJECT_MULTILINE_COMMENT=87, + PROJECT_WS=88, AS=89, RENAME_LINE_COMMENT=90, RENAME_MULTILINE_COMMENT=91, + RENAME_WS=92, ON=93, WITH=94, ENRICH_POLICY_NAME=95, ENRICH_LINE_COMMENT=96, + ENRICH_MULTILINE_COMMENT=97, ENRICH_WS=98, ENRICH_FIELD_LINE_COMMENT=99, + ENRICH_FIELD_MULTILINE_COMMENT=100, ENRICH_FIELD_WS=101, MVEXPAND_LINE_COMMENT=102, + MVEXPAND_MULTILINE_COMMENT=103, MVEXPAND_WS=104, INFO=105, SHOW_LINE_COMMENT=106, + SHOW_MULTILINE_COMMENT=107, SHOW_WS=108, SETTING=109, SETTING_LINE_COMMENT=110, + SETTTING_MULTILINE_COMMENT=111, SETTING_WS=112, LOOKUP_LINE_COMMENT=113, + LOOKUP_MULTILINE_COMMENT=114, LOOKUP_WS=115, LOOKUP_FIELD_LINE_COMMENT=116, + LOOKUP_FIELD_MULTILINE_COMMENT=117, LOOKUP_FIELD_WS=118, USING=119, JOIN_LINE_COMMENT=120, + JOIN_MULTILINE_COMMENT=121, JOIN_WS=122, METRICS_LINE_COMMENT=123, METRICS_MULTILINE_COMMENT=124, + METRICS_WS=125, CLOSING_METRICS_LINE_COMMENT=126, CLOSING_METRICS_MULTILINE_COMMENT=127, + CLOSING_METRICS_WS=128; public static final int - EXPRESSION_MODE=1, EXPLAIN_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, - ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10, - LOOKUP_MODE=11, LOOKUP_FIELD_MODE=12, METRICS_MODE=13, CLOSING_METRICS_MODE=14; + EXPRESSION_MODE=1, EXPLAIN_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, + ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10, + LOOKUP_MODE=11, LOOKUP_FIELD_MODE=12, JOIN_MODE=13, METRICS_MODE=14, CLOSING_METRICS_MODE=15; public static String[] channelNames = { "DEFAULT_TOKEN_CHANNEL", "HIDDEN" }; public static String[] modeNames = { - "DEFAULT_MODE", "EXPRESSION_MODE", "EXPLAIN_MODE", "FROM_MODE", "PROJECT_MODE", - "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", - "SETTING_MODE", "LOOKUP_MODE", "LOOKUP_FIELD_MODE", "METRICS_MODE", "CLOSING_METRICS_MODE" + "DEFAULT_MODE", "EXPRESSION_MODE", "EXPLAIN_MODE", "FROM_MODE", "PROJECT_MODE", + "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", + "SETTING_MODE", "LOOKUP_MODE", "LOOKUP_FIELD_MODE", "JOIN_MODE", "METRICS_MODE", + "CLOSING_METRICS_MODE" }; private static String[] makeRuleNames() { return new String[] { - "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", - "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", - "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", "LINE_COMMENT", - "MULTILINE_COMMENT", "WS", "COLON", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", - "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", - "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", - "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", - "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", - "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", - "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", - "PERCENT", "EXPRESSION_COLON", "NESTED_WHERE", "NAMED_OR_POSITIONAL_PARAM", - "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", - "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", - "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", - "EXPLAIN_MULTILINE_COMMENT", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", - "FROM_COLON", "FROM_COMMA", "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", - "UNQUOTED_SOURCE", "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", - "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", - "PROJECT_PARAM", "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", - "UNQUOTED_ID_PATTERN", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", - "RENAME_PARAM", "RENAME_NAMED_OR_POSITIONAL_PARAM", "AS", "RENAME_ID_PATTERN", - "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", - "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", - "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", - "ENRICH_WS", "ENRICH_FIELD_PIPE", "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", - "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", "ENRICH_FIELD_ID_PATTERN", "ENRICH_FIELD_QUOTED_IDENTIFIER", - "ENRICH_FIELD_PARAM", "ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_PIPE", - "MVEXPAND_DOT", "MVEXPAND_PARAM", "MVEXPAND_NAMED_OR_POSITIONAL_PARAM", - "MVEXPAND_QUOTED_IDENTIFIER", "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING_CLOSING_BRACKET", "SETTING_COLON", - "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", - "LOOKUP_PIPE", "LOOKUP_COLON", "LOOKUP_COMMA", "LOOKUP_DOT", "LOOKUP_ON", - "LOOKUP_UNQUOTED_SOURCE", "LOOKUP_QUOTED_SOURCE", "LOOKUP_LINE_COMMENT", - "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_PIPE", "LOOKUP_FIELD_COMMA", - "LOOKUP_FIELD_DOT", "LOOKUP_FIELD_ID_PATTERN", "LOOKUP_FIELD_LINE_COMMENT", - "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "METRICS_PIPE", - "METRICS_UNQUOTED_SOURCE", "METRICS_QUOTED_SOURCE", "METRICS_LINE_COMMENT", - "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_COLON", "CLOSING_METRICS_COMMA", - "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", - "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", "CLOSING_METRICS_UNQUOTED_IDENTIFIER", + "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", + "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", + "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", "DEV_JOIN_FULL", + "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", "UNKNOWN_CMD", + "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "COLON", "PIPE", "DIGIT", + "LETTER", "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", + "BACKQUOTE", "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", + "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", + "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", + "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", + "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "SLASH", "PERCENT", "EXPRESSION_COLON", "NESTED_WHERE", "NAMED_OR_POSITIONAL_PARAM", + "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", + "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", + "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", + "EXPLAIN_MULTILINE_COMMENT", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", + "FROM_COLON", "FROM_COMMA", "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", + "UNQUOTED_SOURCE", "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", + "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", + "PROJECT_PARAM", "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", + "UNQUOTED_ID_PATTERN", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", + "RENAME_PARAM", "RENAME_NAMED_OR_POSITIONAL_PARAM", "AS", "RENAME_ID_PATTERN", + "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", + "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", + "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", + "ENRICH_WS", "ENRICH_FIELD_PIPE", "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", + "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", "ENRICH_FIELD_ID_PATTERN", "ENRICH_FIELD_QUOTED_IDENTIFIER", + "ENRICH_FIELD_PARAM", "ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_PIPE", + "MVEXPAND_DOT", "MVEXPAND_PARAM", "MVEXPAND_NAMED_OR_POSITIONAL_PARAM", + "MVEXPAND_QUOTED_IDENTIFIER", "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING_CLOSING_BRACKET", "SETTING_COLON", + "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", + "LOOKUP_PIPE", "LOOKUP_COLON", "LOOKUP_COMMA", "LOOKUP_DOT", "LOOKUP_ON", + "LOOKUP_UNQUOTED_SOURCE", "LOOKUP_QUOTED_SOURCE", "LOOKUP_LINE_COMMENT", + "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_PIPE", "LOOKUP_FIELD_COMMA", + "LOOKUP_FIELD_DOT", "LOOKUP_FIELD_ID_PATTERN", "LOOKUP_FIELD_LINE_COMMENT", + "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "JOIN_PIPE", "JOIN_JOIN", + "JOIN_AS", "JOIN_ON", "USING", "JOIN_UNQUOTED_IDENTIFER", "JOIN_QUOTED_IDENTIFIER", + "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_PIPE", + "METRICS_UNQUOTED_SOURCE", "METRICS_QUOTED_SOURCE", "METRICS_LINE_COMMENT", + "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_COLON", "CLOSING_METRICS_COMMA", + "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", "CLOSING_METRICS_UNQUOTED_IDENTIFIER", "CLOSING_METRICS_BY", "CLOSING_METRICS_PIPE" }; } @@ -118,46 +122,50 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { - null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", - "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, - "':'", "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", - "','", "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", - "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", - "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", - "'+'", "'-'", "'*'", "'/'", "'%'", null, null, "']'", null, null, null, - null, null, null, null, null, "'metadata'", null, null, null, null, null, - null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, - null, null, null, null, null, null, null, null, "'info'" + null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", + "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", + "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, + null, null, null, null, null, "':'", "'|'", null, null, null, "'by'", + "'and'", "'asc'", "'='", "'::'", "','", "'desc'", "'.'", "'false'", "'first'", + "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", + "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", + "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, null, + "']'", null, null, null, null, null, null, null, null, "'metadata'", + null, null, null, null, null, null, null, null, "'as'", null, null, null, + "'on'", "'with'", null, null, null, null, null, null, null, null, null, + null, "'info'", null, null, null, null, null, null, null, null, null, + null, null, null, null, "'USING'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { - null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", - "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "COLON", "PIPE", "QUOTED_STRING", - "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", - "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", - "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", - "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", - "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", - "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", - "LOOKUP_FIELD_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", - "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", - "CLOSING_METRICS_WS" + null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", + "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", + "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", + "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", + "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "COLON", "PIPE", + "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", + "ASSIGN", "CAST_OP", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", + "IS", "LAST", "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", + "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", + "MINUS", "ASTERISK", "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", + "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", + "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_WS", + "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "METADATA", "UNQUOTED_SOURCE", + "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", + "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", + "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", + "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", + "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", + "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", + "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", + "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", + "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", + "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", + "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_LINE_COMMENT", + "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", + "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -228,23 +236,33 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return DEV_LOOKUP_sempred((RuleContext)_localctx, predIndex); case 18: return DEV_METRICS_sempred((RuleContext)_localctx, predIndex); - case 73: + case 19: + return DEV_JOIN_sempred((RuleContext)_localctx, predIndex); + case 20: + return DEV_JOIN_FULL_sempred((RuleContext)_localctx, predIndex); + case 21: + return DEV_JOIN_LEFT_sempred((RuleContext)_localctx, predIndex); + case 22: + return DEV_JOIN_RIGHT_sempred((RuleContext)_localctx, predIndex); + case 23: + return DEV_JOIN_LOOKUP_sempred((RuleContext)_localctx, predIndex); + case 78: return EXPRESSION_COLON_sempred((RuleContext)_localctx, predIndex); - case 106: + case 111: return PROJECT_PARAM_sempred((RuleContext)_localctx, predIndex); - case 107: + case 112: return PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 118: + case 123: return RENAME_PARAM_sempred((RuleContext)_localctx, predIndex); - case 119: + case 124: return RENAME_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 142: + case 147: return ENRICH_FIELD_PARAM_sempred((RuleContext)_localctx, predIndex); - case 143: + case 148: return ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 149: + case 154: return MVEXPAND_PARAM_sempred((RuleContext)_localctx, predIndex); - case 150: + case 155: return MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); } return true; @@ -270,1034 +288,1153 @@ private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { } return true; } - private boolean EXPRESSION_COLON_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_JOIN_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 3: return this.isDevVersion(); } return true; } - private boolean PROJECT_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_JOIN_FULL_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 4: return this.isDevVersion(); } return true; } - private boolean PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_JOIN_LEFT_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 5: return this.isDevVersion(); } return true; } - private boolean RENAME_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_JOIN_RIGHT_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 6: return this.isDevVersion(); } return true; } - private boolean RENAME_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_JOIN_LOOKUP_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 7: return this.isDevVersion(); } return true; } - private boolean ENRICH_FIELD_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean EXPRESSION_COLON_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 8: return this.isDevVersion(); } return true; } - private boolean ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean PROJECT_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 9: return this.isDevVersion(); } return true; } - private boolean MVEXPAND_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 10: return this.isDevVersion(); } return true; } - private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean RENAME_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 11: return this.isDevVersion(); } return true; } + private boolean RENAME_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 12: + return this.isDevVersion(); + } + return true; + } + private boolean ENRICH_FIELD_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 13: + return this.isDevVersion(); + } + return true; + } + private boolean ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 14: + return this.isDevVersion(); + } + return true; + } + private boolean MVEXPAND_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 15: + return this.isDevVersion(); + } + return true; + } + private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 16: + return this.isDevVersion(); + } + return true; + } public static final String _serializedATN = - "\u0004\u0000w\u05cc\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002\u0002\u0007\u0002"+ - "\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002\u0005\u0007\u0005"+ - "\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002\b\u0007\b\u0002"+ - "\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002\f\u0007\f\u0002"+ - "\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007\u000f\u0002\u0010"+ - "\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007\u0012\u0002\u0013"+ - "\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007\u0015\u0002\u0016"+ - "\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007\u0018\u0002\u0019"+ - "\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007\u001b\u0002\u001c"+ - "\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007\u001e\u0002\u001f"+ - "\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002#\u0007"+ - "#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002(\u0007"+ - "(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002-\u0007"+ - "-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u00022\u0007"+ - "2\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u00027\u0007"+ - "7\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0002<\u0007"+ - "<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007@\u0002A\u0007"+ - "A\u0002B\u0007B\u0002C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002F\u0007"+ - "F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002K\u0007"+ - "K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002P\u0007"+ - "P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002U\u0007"+ - "U\u0002V\u0007V\u0002W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002Z\u0007"+ - "Z\u0002[\u0007[\u0002\\\u0007\\\u0002]\u0007]\u0002^\u0007^\u0002_\u0007"+ - "_\u0002`\u0007`\u0002a\u0007a\u0002b\u0007b\u0002c\u0007c\u0002d\u0007"+ - "d\u0002e\u0007e\u0002f\u0007f\u0002g\u0007g\u0002h\u0007h\u0002i\u0007"+ - "i\u0002j\u0007j\u0002k\u0007k\u0002l\u0007l\u0002m\u0007m\u0002n\u0007"+ - "n\u0002o\u0007o\u0002p\u0007p\u0002q\u0007q\u0002r\u0007r\u0002s\u0007"+ - "s\u0002t\u0007t\u0002u\u0007u\u0002v\u0007v\u0002w\u0007w\u0002x\u0007"+ - "x\u0002y\u0007y\u0002z\u0007z\u0002{\u0007{\u0002|\u0007|\u0002}\u0007"+ - "}\u0002~\u0007~\u0002\u007f\u0007\u007f\u0002\u0080\u0007\u0080\u0002"+ - "\u0081\u0007\u0081\u0002\u0082\u0007\u0082\u0002\u0083\u0007\u0083\u0002"+ - "\u0084\u0007\u0084\u0002\u0085\u0007\u0085\u0002\u0086\u0007\u0086\u0002"+ - "\u0087\u0007\u0087\u0002\u0088\u0007\u0088\u0002\u0089\u0007\u0089\u0002"+ - "\u008a\u0007\u008a\u0002\u008b\u0007\u008b\u0002\u008c\u0007\u008c\u0002"+ - "\u008d\u0007\u008d\u0002\u008e\u0007\u008e\u0002\u008f\u0007\u008f\u0002"+ - "\u0090\u0007\u0090\u0002\u0091\u0007\u0091\u0002\u0092\u0007\u0092\u0002"+ - "\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007\u0095\u0002"+ - "\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007\u0098\u0002"+ - "\u0099\u0007\u0099\u0002\u009a\u0007\u009a\u0002\u009b\u0007\u009b\u0002"+ - "\u009c\u0007\u009c\u0002\u009d\u0007\u009d\u0002\u009e\u0007\u009e\u0002"+ - "\u009f\u0007\u009f\u0002\u00a0\u0007\u00a0\u0002\u00a1\u0007\u00a1\u0002"+ - "\u00a2\u0007\u00a2\u0002\u00a3\u0007\u00a3\u0002\u00a4\u0007\u00a4\u0002"+ - "\u00a5\u0007\u00a5\u0002\u00a6\u0007\u00a6\u0002\u00a7\u0007\u00a7\u0002"+ - "\u00a8\u0007\u00a8\u0002\u00a9\u0007\u00a9\u0002\u00aa\u0007\u00aa\u0002"+ - "\u00ab\u0007\u00ab\u0002\u00ac\u0007\u00ac\u0002\u00ad\u0007\u00ad\u0002"+ - "\u00ae\u0007\u00ae\u0002\u00af\u0007\u00af\u0002\u00b0\u0007\u00b0\u0002"+ - "\u00b1\u0007\u00b1\u0002\u00b2\u0007\u00b2\u0002\u00b3\u0007\u00b3\u0002"+ - "\u00b4\u0007\u00b4\u0002\u00b5\u0007\u00b5\u0002\u00b6\u0007\u00b6\u0002"+ - "\u00b7\u0007\u00b7\u0002\u00b8\u0007\u00b8\u0002\u00b9\u0007\u00b9\u0002"+ - "\u00ba\u0007\u00ba\u0002\u00bb\u0007\u00bb\u0002\u00bc\u0007\u00bc\u0002"+ - "\u00bd\u0007\u00bd\u0002\u00be\u0007\u00be\u0002\u00bf\u0007\u00bf\u0002"+ - "\u00c0\u0007\u00c0\u0002\u00c1\u0007\u00c1\u0002\u00c2\u0007\u00c2\u0002"+ - "\u00c3\u0007\u00c3\u0002\u00c4\u0007\u00c4\u0002\u00c5\u0007\u00c5\u0002"+ - "\u00c6\u0007\u00c6\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0004\u0000\u0080\u0648\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\uffff\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ + "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ + "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ + "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ + "\u0002\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007"+ + "\u000f\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007"+ + "\u0012\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007"+ + "\u0015\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007"+ + "\u0018\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007"+ + "\u001b\u0002\u001c\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007"+ + "\u001e\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007"+ + "\"\u0002#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007"+ + "\'\u0002(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007"+ + ",\u0002-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u0007"+ + "1\u00022\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u0007"+ + "6\u00027\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007"+ + ";\u0002<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007"+ + "@\u0002A\u0007A\u0002B\u0007B\u0002C\u0007C\u0002D\u0007D\u0002E\u0007"+ + "E\u0002F\u0007F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007"+ + "J\u0002K\u0007K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007"+ + "O\u0002P\u0007P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007"+ + "T\u0002U\u0007U\u0002V\u0007V\u0002W\u0007W\u0002X\u0007X\u0002Y\u0007"+ + "Y\u0002Z\u0007Z\u0002[\u0007[\u0002\\\u0007\\\u0002]\u0007]\u0002^\u0007"+ + "^\u0002_\u0007_\u0002`\u0007`\u0002a\u0007a\u0002b\u0007b\u0002c\u0007"+ + "c\u0002d\u0007d\u0002e\u0007e\u0002f\u0007f\u0002g\u0007g\u0002h\u0007"+ + "h\u0002i\u0007i\u0002j\u0007j\u0002k\u0007k\u0002l\u0007l\u0002m\u0007"+ + "m\u0002n\u0007n\u0002o\u0007o\u0002p\u0007p\u0002q\u0007q\u0002r\u0007"+ + "r\u0002s\u0007s\u0002t\u0007t\u0002u\u0007u\u0002v\u0007v\u0002w\u0007"+ + "w\u0002x\u0007x\u0002y\u0007y\u0002z\u0007z\u0002{\u0007{\u0002|\u0007"+ + "|\u0002}\u0007}\u0002~\u0007~\u0002\u007f\u0007\u007f\u0002\u0080\u0007"+ + "\u0080\u0002\u0081\u0007\u0081\u0002\u0082\u0007\u0082\u0002\u0083\u0007"+ + "\u0083\u0002\u0084\u0007\u0084\u0002\u0085\u0007\u0085\u0002\u0086\u0007"+ + "\u0086\u0002\u0087\u0007\u0087\u0002\u0088\u0007\u0088\u0002\u0089\u0007"+ + "\u0089\u0002\u008a\u0007\u008a\u0002\u008b\u0007\u008b\u0002\u008c\u0007"+ + "\u008c\u0002\u008d\u0007\u008d\u0002\u008e\u0007\u008e\u0002\u008f\u0007"+ + "\u008f\u0002\u0090\u0007\u0090\u0002\u0091\u0007\u0091\u0002\u0092\u0007"+ + "\u0092\u0002\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007"+ + "\u0095\u0002\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007"+ + "\u0098\u0002\u0099\u0007\u0099\u0002\u009a\u0007\u009a\u0002\u009b\u0007"+ + "\u009b\u0002\u009c\u0007\u009c\u0002\u009d\u0007\u009d\u0002\u009e\u0007"+ + "\u009e\u0002\u009f\u0007\u009f\u0002\u00a0\u0007\u00a0\u0002\u00a1\u0007"+ + "\u00a1\u0002\u00a2\u0007\u00a2\u0002\u00a3\u0007\u00a3\u0002\u00a4\u0007"+ + "\u00a4\u0002\u00a5\u0007\u00a5\u0002\u00a6\u0007\u00a6\u0002\u00a7\u0007"+ + "\u00a7\u0002\u00a8\u0007\u00a8\u0002\u00a9\u0007\u00a9\u0002\u00aa\u0007"+ + "\u00aa\u0002\u00ab\u0007\u00ab\u0002\u00ac\u0007\u00ac\u0002\u00ad\u0007"+ + "\u00ad\u0002\u00ae\u0007\u00ae\u0002\u00af\u0007\u00af\u0002\u00b0\u0007"+ + "\u00b0\u0002\u00b1\u0007\u00b1\u0002\u00b2\u0007\u00b2\u0002\u00b3\u0007"+ + "\u00b3\u0002\u00b4\u0007\u00b4\u0002\u00b5\u0007\u00b5\u0002\u00b6\u0007"+ + "\u00b6\u0002\u00b7\u0007\u00b7\u0002\u00b8\u0007\u00b8\u0002\u00b9\u0007"+ + "\u00b9\u0002\u00ba\u0007\u00ba\u0002\u00bb\u0007\u00bb\u0002\u00bc\u0007"+ + "\u00bc\u0002\u00bd\u0007\u00bd\u0002\u00be\u0007\u00be\u0002\u00bf\u0007"+ + "\u00bf\u0002\u00c0\u0007\u00c0\u0002\u00c1\u0007\u00c1\u0002\u00c2\u0007"+ + "\u00c2\u0002\u00c3\u0007\u00c3\u0002\u00c4\u0007\u00c4\u0002\u00c5\u0007"+ + "\u00c5\u0002\u00c6\u0007\u00c6\u0002\u00c7\u0007\u00c7\u0002\u00c8\u0007"+ + "\u00c8\u0002\u00c9\u0007\u00c9\u0002\u00ca\u0007\u00ca\u0002\u00cb\u0007"+ + "\u00cb\u0002\u00cc\u0007\u00cc\u0002\u00cd\u0007\u00cd\u0002\u00ce\u0007"+ + "\u00ce\u0002\u00cf\u0007\u00cf\u0002\u00d0\u0007\u00d0\u0002\u00d1\u0007"+ + "\u00d1\u0002\u00d2\u0007\u00d2\u0002\u00d3\u0007\u00d3\u0002\u00d4\u0007"+ + "\u00d4\u0002\u00d5\u0007\u00d5\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001"+ + "\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001"+ + "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ - "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ - "\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ - "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ - "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012"+ - "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"+ - "\u0001\u0012\u0001\u0012\u0001\u0013\u0004\u0013\u0244\b\u0013\u000b\u0013"+ - "\f\u0013\u0245\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014"+ - "\u0001\u0014\u0005\u0014\u024e\b\u0014\n\u0014\f\u0014\u0251\t\u0014\u0001"+ - "\u0014\u0003\u0014\u0254\b\u0014\u0001\u0014\u0003\u0014\u0257\b\u0014"+ - "\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015"+ - "\u0001\u0015\u0005\u0015\u0260\b\u0015\n\u0015\f\u0015\u0263\t\u0015\u0001"+ - "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0004"+ - "\u0016\u026b\b\u0016\u000b\u0016\f\u0016\u026c\u0001\u0016\u0001\u0016"+ - "\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018"+ - "\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b"+ - "\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0003\u001d"+ - "\u0282\b\u001d\u0001\u001d\u0004\u001d\u0285\b\u001d\u000b\u001d\f\u001d"+ - "\u0286\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001"+ - " \u0003 \u0290\b \u0001!\u0001!\u0001\"\u0001\"\u0001\"\u0003\"\u0297"+ - "\b\"\u0001#\u0001#\u0001#\u0005#\u029c\b#\n#\f#\u029f\t#\u0001#\u0001"+ - "#\u0001#\u0001#\u0001#\u0001#\u0005#\u02a7\b#\n#\f#\u02aa\t#\u0001#\u0001"+ - "#\u0001#\u0001#\u0001#\u0003#\u02b1\b#\u0001#\u0003#\u02b4\b#\u0003#\u02b6"+ - "\b#\u0001$\u0004$\u02b9\b$\u000b$\f$\u02ba\u0001%\u0004%\u02be\b%\u000b"+ - "%\f%\u02bf\u0001%\u0001%\u0005%\u02c4\b%\n%\f%\u02c7\t%\u0001%\u0001%"+ - "\u0004%\u02cb\b%\u000b%\f%\u02cc\u0001%\u0004%\u02d0\b%\u000b%\f%\u02d1"+ - "\u0001%\u0001%\u0005%\u02d6\b%\n%\f%\u02d9\t%\u0003%\u02db\b%\u0001%\u0001"+ - "%\u0001%\u0001%\u0004%\u02e1\b%\u000b%\f%\u02e2\u0001%\u0001%\u0003%\u02e7"+ - "\b%\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0001\'\u0001(\u0001"+ - "(\u0001(\u0001(\u0001)\u0001)\u0001*\u0001*\u0001*\u0001+\u0001+\u0001"+ - ",\u0001,\u0001,\u0001,\u0001,\u0001-\u0001-\u0001.\u0001.\u0001.\u0001"+ - ".\u0001.\u0001.\u0001/\u0001/\u0001/\u0001/\u0001/\u0001/\u00010\u0001"+ - "0\u00010\u00011\u00011\u00011\u00012\u00012\u00012\u00012\u00012\u0001"+ - "3\u00013\u00013\u00013\u00013\u00014\u00014\u00015\u00015\u00015\u0001"+ - "5\u00016\u00016\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u0001"+ - "7\u00017\u00018\u00018\u00018\u00019\u00019\u0001:\u0001:\u0001:\u0001"+ - ":\u0001:\u0001:\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001"+ - "=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0001@\u0001"+ - "@\u0001A\u0001A\u0001A\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001"+ - "D\u0001E\u0001E\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001"+ - "I\u0001I\u0001I\u0001I\u0001J\u0001J\u0001J\u0001J\u0001K\u0001K\u0001"+ - "K\u0003K\u036a\bK\u0001K\u0005K\u036d\bK\nK\fK\u0370\tK\u0001K\u0001K"+ - "\u0004K\u0374\bK\u000bK\fK\u0375\u0003K\u0378\bK\u0001L\u0001L\u0001L"+ - "\u0001L\u0001L\u0001M\u0001M\u0001M\u0001M\u0001M\u0001N\u0001N\u0005"+ - "N\u0386\bN\nN\fN\u0389\tN\u0001N\u0001N\u0003N\u038d\bN\u0001N\u0004N"+ - "\u0390\bN\u000bN\fN\u0391\u0003N\u0394\bN\u0001O\u0001O\u0004O\u0398\b"+ - "O\u000bO\fO\u0399\u0001O\u0001O\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001"+ - "Q\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001S\u0001T\u0001"+ - "T\u0001T\u0001T\u0001T\u0001U\u0001U\u0001U\u0001U\u0001U\u0001V\u0001"+ - "V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001X\u0001X\u0001X\u0001"+ - "X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001"+ - "[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001"+ - "]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001"+ - "_\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0003`\u03e7\b`\u0001"+ - "a\u0004a\u03ea\ba\u000ba\fa\u03eb\u0001b\u0001b\u0001b\u0001b\u0001c\u0001"+ - "c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001"+ - "e\u0001f\u0001f\u0001f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001g\u0001"+ - "h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001"+ - "j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001"+ - "l\u0001l\u0003l\u041d\bl\u0001m\u0001m\u0003m\u0421\bm\u0001m\u0005m\u0424"+ - "\bm\nm\fm\u0427\tm\u0001m\u0001m\u0003m\u042b\bm\u0001m\u0004m\u042e\b"+ - "m\u000bm\fm\u042f\u0003m\u0432\bm\u0001n\u0001n\u0004n\u0436\bn\u000b"+ - "n\fn\u0437\u0001o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001p\u0001"+ - "q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001r\u0001r\u0001s\u0001"+ - "s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001"+ - "u\u0001v\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001"+ - "w\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001"+ - "z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001"+ - "}\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001~\u0001"+ - "\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080\u0001"+ - "\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001"+ - "\u0081\u0001\u0081\u0001\u0082\u0004\u0082\u048d\b\u0082\u000b\u0082\f"+ - "\u0082\u048e\u0001\u0082\u0001\u0082\u0003\u0082\u0493\b\u0082\u0001\u0082"+ - "\u0004\u0082\u0496\b\u0082\u000b\u0082\f\u0082\u0497\u0001\u0083\u0001"+ - "\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001"+ - "\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086\u0001"+ - "\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001\u0087\u0001"+ - "\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001"+ - "\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001"+ - "\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ - "\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001"+ - "\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001"+ - "\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001"+ - "\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001"+ - "\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001"+ - "\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001"+ - "\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001"+ - "\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001"+ - "\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001"+ - "\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001"+ - "\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001"+ - "\u009b\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001"+ - "\u009c\u0001\u009c\u0001\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0001"+ - "\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001"+ - "\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u00a0\u0001\u00a0\u0001"+ - "\u00a0\u0001\u00a0\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001"+ - "\u00a1\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001"+ - "\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0004\u00a3\u0529\b\u00a3\u000b"+ - "\u00a3\f\u00a3\u052a\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001"+ - "\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001"+ - "\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001"+ - "\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001"+ - "\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001"+ - "\u00aa\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001"+ - "\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001"+ - "\u00ad\u0001\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001"+ - "\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001"+ + "\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b"+ + "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ + "\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ + "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ + "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ + "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ + "\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001"+ + "\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ + "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001"+ + "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001"+ + "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001"+ + "\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0001\u0018\u0004\u0018\u0290\b\u0018\u000b\u0018\f\u0018\u0291"+ + "\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ + "\u0005\u0019\u029a\b\u0019\n\u0019\f\u0019\u029d\t\u0019\u0001\u0019\u0003"+ + "\u0019\u02a0\b\u0019\u0001\u0019\u0003\u0019\u02a3\b\u0019\u0001\u0019"+ + "\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ + "\u0005\u001a\u02ac\b\u001a\n\u001a\f\u001a\u02af\t\u001a\u0001\u001a\u0001"+ + "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0004\u001b\u02b7"+ + "\b\u001b\u000b\u001b\f\u001b\u02b8\u0001\u001b\u0001\u001b\u0001\u001c"+ + "\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001e"+ + "\u0001\u001e\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001 \u0001!\u0001"+ + "!\u0001\"\u0001\"\u0003\"\u02ce\b\"\u0001\"\u0004\"\u02d1\b\"\u000b\""+ + "\f\"\u02d2\u0001#\u0001#\u0001$\u0001$\u0001%\u0001%\u0001%\u0003%\u02dc"+ + "\b%\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0003\'\u02e3\b\'\u0001(\u0001"+ + "(\u0001(\u0005(\u02e8\b(\n(\f(\u02eb\t(\u0001(\u0001(\u0001(\u0001(\u0001"+ + "(\u0001(\u0005(\u02f3\b(\n(\f(\u02f6\t(\u0001(\u0001(\u0001(\u0001(\u0001"+ + "(\u0003(\u02fd\b(\u0001(\u0003(\u0300\b(\u0003(\u0302\b(\u0001)\u0004"+ + ")\u0305\b)\u000b)\f)\u0306\u0001*\u0004*\u030a\b*\u000b*\f*\u030b\u0001"+ + "*\u0001*\u0005*\u0310\b*\n*\f*\u0313\t*\u0001*\u0001*\u0004*\u0317\b*"+ + "\u000b*\f*\u0318\u0001*\u0004*\u031c\b*\u000b*\f*\u031d\u0001*\u0001*"+ + "\u0005*\u0322\b*\n*\f*\u0325\t*\u0003*\u0327\b*\u0001*\u0001*\u0001*\u0001"+ + "*\u0004*\u032d\b*\u000b*\f*\u032e\u0001*\u0001*\u0003*\u0333\b*\u0001"+ + "+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0001-\u0001-\u0001-\u0001"+ + "-\u0001.\u0001.\u0001/\u0001/\u0001/\u00010\u00010\u00011\u00011\u0001"+ + "1\u00011\u00011\u00012\u00012\u00013\u00013\u00013\u00013\u00013\u0001"+ + "3\u00014\u00014\u00014\u00014\u00014\u00014\u00015\u00015\u00015\u0001"+ + "6\u00016\u00016\u00017\u00017\u00017\u00017\u00017\u00018\u00018\u0001"+ + "8\u00018\u00018\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0001;\u0001"+ + ";\u0001;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001<\u0001"+ + "=\u0001=\u0001=\u0001>\u0001>\u0001?\u0001?\u0001?\u0001?\u0001?\u0001"+ + "?\u0001@\u0001@\u0001A\u0001A\u0001A\u0001A\u0001A\u0001B\u0001B\u0001"+ + "B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001E\u0001F\u0001"+ + "F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001H\u0001I\u0001I\u0001J\u0001"+ + "J\u0001K\u0001K\u0001L\u0001L\u0001M\u0001M\u0001N\u0001N\u0001N\u0001"+ + "N\u0001N\u0001O\u0001O\u0001O\u0001O\u0001P\u0001P\u0001P\u0003P\u03b6"+ + "\bP\u0001P\u0005P\u03b9\bP\nP\fP\u03bc\tP\u0001P\u0001P\u0004P\u03c0\b"+ + "P\u000bP\fP\u03c1\u0003P\u03c4\bP\u0001Q\u0001Q\u0001Q\u0001Q\u0001Q\u0001"+ + "R\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0005S\u03d2\bS\nS\fS\u03d5"+ + "\tS\u0001S\u0001S\u0003S\u03d9\bS\u0001S\u0004S\u03dc\bS\u000bS\fS\u03dd"+ + "\u0003S\u03e0\bS\u0001T\u0001T\u0004T\u03e4\bT\u000bT\fT\u03e5\u0001T"+ + "\u0001T\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001"+ + "W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001"+ + "Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001"+ + "\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001"+ + "^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001"+ + "`\u0001a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001"+ + "c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001d\u0001d\u0001d\u0001"+ + "d\u0001d\u0001e\u0001e\u0001e\u0003e\u0433\be\u0001f\u0004f\u0436\bf\u000b"+ + "f\ff\u0437\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001"+ + "i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001"+ + "k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001"+ + "m\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001o\u0001"+ + "p\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0003q\u0469"+ + "\bq\u0001r\u0001r\u0003r\u046d\br\u0001r\u0005r\u0470\br\nr\fr\u0473\t"+ + "r\u0001r\u0001r\u0003r\u0477\br\u0001r\u0004r\u047a\br\u000br\fr\u047b"+ + "\u0003r\u047e\br\u0001s\u0001s\u0004s\u0482\bs\u000bs\fs\u0483\u0001t"+ + "\u0001t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001"+ + "v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001"+ + "x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001"+ + "{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001|\u0001}\u0001"+ + "}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f"+ + "\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081"+ + "\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082"+ + "\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083"+ + "\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084"+ + "\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085"+ + "\u0001\u0085\u0001\u0086\u0001\u0086\u0001\u0087\u0004\u0087\u04d9\b\u0087"+ + "\u000b\u0087\f\u0087\u04da\u0001\u0087\u0001\u0087\u0003\u0087\u04df\b"+ + "\u0087\u0001\u0087\u0004\u0087\u04e2\b\u0087\u000b\u0087\f\u0087\u04e3"+ + "\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089"+ + "\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a"+ + "\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008c\u0001\u008c"+ + "\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d"+ + "\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e"+ + "\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090"+ + "\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091"+ + "\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093"+ + "\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094"+ + "\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095"+ + "\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097"+ + "\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0098"+ + "\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u009a"+ + "\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b"+ + "\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c"+ + "\u0001\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e"+ + "\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009f\u0001\u009f\u0001\u009f"+ + "\u0001\u009f\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a1"+ + "\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a2\u0001\u00a2"+ + "\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3"+ + "\u0001\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5"+ + "\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001\u00a6"+ + "\u0001\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7"+ + "\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0004\u00a8"+ + "\u0575\b\u00a8\u000b\u00a8\f\u00a8\u0576\u0001\u00a9\u0001\u00a9\u0001"+ + "\u00a9\u0001\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001"+ + "\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac\u0001"+ + "\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001"+ + "\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001"+ + "\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001"+ "\u00b0\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001"+ - "\u00b1\u0001\u00b1\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001"+ - "\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001"+ - "\u00b4\u0001\u00b4\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001"+ + "\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001\u00b3\u0001"+ + "\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001"+ + "\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6\u0001\u00b6\u0001"+ "\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001"+ "\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001"+ - "\u00b8\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001"+ - "\u00b9\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001"+ - "\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bc\u0001"+ - "\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001"+ - "\u00bd\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001"+ - "\u00be\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001"+ - "\u00bf\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001"+ - "\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001"+ - "\u00c2\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001"+ - "\u00c3\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001"+ - "\u00c4\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001"+ - "\u00c5\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0002"+ - "\u0261\u02a8\u0000\u00c7\u000f\u0001\u0011\u0002\u0013\u0003\u0015\u0004"+ - "\u0017\u0005\u0019\u0006\u001b\u0007\u001d\b\u001f\t!\n#\u000b%\f\'\r"+ - ")\u000e+\u000f-\u0010/\u00111\u00123\u00135\u00147\u00159\u0016;\u0017"+ - "=\u0018?\u0019A\u0000C\u0000E\u0000G\u0000I\u0000K\u0000M\u0000O\u0000"+ - "Q\u0000S\u0000U\u001aW\u001bY\u001c[\u001d]\u001e_\u001fa c!e\"g#i$k%"+ - "m&o\'q(s)u*w+y,{-}.\u007f/\u00810\u00831\u00852\u00873\u00894\u008b5\u008d"+ - "6\u008f7\u00918\u00939\u0095:\u0097;\u0099<\u009b=\u009d>\u009f?\u00a1"+ - "\u0000\u00a3\u0000\u00a5@\u00a7A\u00a9B\u00abC\u00ad\u0000\u00afD\u00b1"+ - "E\u00b3F\u00b5G\u00b7\u0000\u00b9\u0000\u00bbH\u00bdI\u00bfJ\u00c1\u0000"+ - "\u00c3\u0000\u00c5\u0000\u00c7\u0000\u00c9\u0000\u00cb\u0000\u00cdK\u00cf"+ - "\u0000\u00d1L\u00d3\u0000\u00d5\u0000\u00d7M\u00d9N\u00dbO\u00dd\u0000"+ - "\u00df\u0000\u00e1\u0000\u00e3\u0000\u00e5\u0000\u00e7\u0000\u00e9\u0000"+ - "\u00ebP\u00edQ\u00efR\u00f1S\u00f3\u0000\u00f5\u0000\u00f7\u0000\u00f9"+ - "\u0000\u00fb\u0000\u00fd\u0000\u00ffT\u0101\u0000\u0103U\u0105V\u0107"+ - "W\u0109\u0000\u010b\u0000\u010dX\u010fY\u0111\u0000\u0113Z\u0115\u0000"+ - "\u0117[\u0119\\\u011b]\u011d\u0000\u011f\u0000\u0121\u0000\u0123\u0000"+ - "\u0125\u0000\u0127\u0000\u0129\u0000\u012b\u0000\u012d\u0000\u012f^\u0131"+ - "_\u0133`\u0135\u0000\u0137\u0000\u0139\u0000\u013b\u0000\u013d\u0000\u013f"+ - "\u0000\u0141a\u0143b\u0145c\u0147\u0000\u0149d\u014be\u014df\u014fg\u0151"+ - "\u0000\u0153\u0000\u0155h\u0157i\u0159j\u015bk\u015d\u0000\u015f\u0000"+ - "\u0161\u0000\u0163\u0000\u0165\u0000\u0167\u0000\u0169\u0000\u016bl\u016d"+ - "m\u016fn\u0171\u0000\u0173\u0000\u0175\u0000\u0177\u0000\u0179o\u017b"+ - "p\u017dq\u017f\u0000\u0181\u0000\u0183\u0000\u0185r\u0187s\u0189t\u018b"+ - "\u0000\u018d\u0000\u018fu\u0191v\u0193w\u0195\u0000\u0197\u0000\u0199"+ - "\u0000\u019b\u0000\u000f\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007"+ - "\b\t\n\u000b\f\r\u000e#\u0002\u0000DDdd\u0002\u0000IIii\u0002\u0000SS"+ - "ss\u0002\u0000EEee\u0002\u0000CCcc\u0002\u0000TTtt\u0002\u0000RRrr\u0002"+ - "\u0000OOoo\u0002\u0000PPpp\u0002\u0000NNnn\u0002\u0000HHhh\u0002\u0000"+ - "VVvv\u0002\u0000AAaa\u0002\u0000LLll\u0002\u0000XXxx\u0002\u0000FFff\u0002"+ - "\u0000MMmm\u0002\u0000GGgg\u0002\u0000KKkk\u0002\u0000WWww\u0002\u0000"+ - "UUuu\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r"+ - "\r \u0001\u000009\u0002\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000"+ - "\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000"+ - "YYyy\u000b\u0000\t\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b\u0000"+ - "\t\n\r\r \"#,,//::<<>?\\\\||\u05e8\u0000\u000f\u0001\u0000\u0000\u0000"+ - "\u0000\u0011\u0001\u0000\u0000\u0000\u0000\u0013\u0001\u0000\u0000\u0000"+ - "\u0000\u0015\u0001\u0000\u0000\u0000\u0000\u0017\u0001\u0000\u0000\u0000"+ - "\u0000\u0019\u0001\u0000\u0000\u0000\u0000\u001b\u0001\u0000\u0000\u0000"+ - "\u0000\u001d\u0001\u0000\u0000\u0000\u0000\u001f\u0001\u0000\u0000\u0000"+ - "\u0000!\u0001\u0000\u0000\u0000\u0000#\u0001\u0000\u0000\u0000\u0000%"+ - "\u0001\u0000\u0000\u0000\u0000\'\u0001\u0000\u0000\u0000\u0000)\u0001"+ - "\u0000\u0000\u0000\u0000+\u0001\u0000\u0000\u0000\u0000-\u0001\u0000\u0000"+ - "\u0000\u0000/\u0001\u0000\u0000\u0000\u00001\u0001\u0000\u0000\u0000\u0000"+ - "3\u0001\u0000\u0000\u0000\u00005\u0001\u0000\u0000\u0000\u00007\u0001"+ - "\u0000\u0000\u0000\u00009\u0001\u0000\u0000\u0000\u0000;\u0001\u0000\u0000"+ - "\u0000\u0000=\u0001\u0000\u0000\u0000\u0001?\u0001\u0000\u0000\u0000\u0001"+ - "U\u0001\u0000\u0000\u0000\u0001W\u0001\u0000\u0000\u0000\u0001Y\u0001"+ - "\u0000\u0000\u0000\u0001[\u0001\u0000\u0000\u0000\u0001]\u0001\u0000\u0000"+ - "\u0000\u0001_\u0001\u0000\u0000\u0000\u0001a\u0001\u0000\u0000\u0000\u0001"+ - "c\u0001\u0000\u0000\u0000\u0001e\u0001\u0000\u0000\u0000\u0001g\u0001"+ - "\u0000\u0000\u0000\u0001i\u0001\u0000\u0000\u0000\u0001k\u0001\u0000\u0000"+ - "\u0000\u0001m\u0001\u0000\u0000\u0000\u0001o\u0001\u0000\u0000\u0000\u0001"+ - "q\u0001\u0000\u0000\u0000\u0001s\u0001\u0000\u0000\u0000\u0001u\u0001"+ - "\u0000\u0000\u0000\u0001w\u0001\u0000\u0000\u0000\u0001y\u0001\u0000\u0000"+ - "\u0000\u0001{\u0001\u0000\u0000\u0000\u0001}\u0001\u0000\u0000\u0000\u0001"+ - "\u007f\u0001\u0000\u0000\u0000\u0001\u0081\u0001\u0000\u0000\u0000\u0001"+ - "\u0083\u0001\u0000\u0000\u0000\u0001\u0085\u0001\u0000\u0000\u0000\u0001"+ - "\u0087\u0001\u0000\u0000\u0000\u0001\u0089\u0001\u0000\u0000\u0000\u0001"+ - "\u008b\u0001\u0000\u0000\u0000\u0001\u008d\u0001\u0000\u0000\u0000\u0001"+ - "\u008f\u0001\u0000\u0000\u0000\u0001\u0091\u0001\u0000\u0000\u0000\u0001"+ - "\u0093\u0001\u0000\u0000\u0000\u0001\u0095\u0001\u0000\u0000\u0000\u0001"+ - "\u0097\u0001\u0000\u0000\u0000\u0001\u0099\u0001\u0000\u0000\u0000\u0001"+ - "\u009b\u0001\u0000\u0000\u0000\u0001\u009d\u0001\u0000\u0000\u0000\u0001"+ - "\u009f\u0001\u0000\u0000\u0000\u0001\u00a1\u0001\u0000\u0000\u0000\u0001"+ - "\u00a3\u0001\u0000\u0000\u0000\u0001\u00a5\u0001\u0000\u0000\u0000\u0001"+ - "\u00a7\u0001\u0000\u0000\u0000\u0001\u00a9\u0001\u0000\u0000\u0000\u0001"+ - "\u00ab\u0001\u0000\u0000\u0000\u0001\u00af\u0001\u0000\u0000\u0000\u0001"+ - "\u00b1\u0001\u0000\u0000\u0000\u0001\u00b3\u0001\u0000\u0000\u0000\u0001"+ - "\u00b5\u0001\u0000\u0000\u0000\u0002\u00b7\u0001\u0000\u0000\u0000\u0002"+ - "\u00b9\u0001\u0000\u0000\u0000\u0002\u00bb\u0001\u0000\u0000\u0000\u0002"+ - "\u00bd\u0001\u0000\u0000\u0000\u0002\u00bf\u0001\u0000\u0000\u0000\u0003"+ - "\u00c1\u0001\u0000\u0000\u0000\u0003\u00c3\u0001\u0000\u0000\u0000\u0003"+ - "\u00c5\u0001\u0000\u0000\u0000\u0003\u00c7\u0001\u0000\u0000\u0000\u0003"+ - "\u00c9\u0001\u0000\u0000\u0000\u0003\u00cb\u0001\u0000\u0000\u0000\u0003"+ - "\u00cd\u0001\u0000\u0000\u0000\u0003\u00d1\u0001\u0000\u0000\u0000\u0003"+ - "\u00d3\u0001\u0000\u0000\u0000\u0003\u00d5\u0001\u0000\u0000\u0000\u0003"+ - "\u00d7\u0001\u0000\u0000\u0000\u0003\u00d9\u0001\u0000\u0000\u0000\u0003"+ - "\u00db\u0001\u0000\u0000\u0000\u0004\u00dd\u0001\u0000\u0000\u0000\u0004"+ - "\u00df\u0001\u0000\u0000\u0000\u0004\u00e1\u0001\u0000\u0000\u0000\u0004"+ - "\u00e3\u0001\u0000\u0000\u0000\u0004\u00e5\u0001\u0000\u0000\u0000\u0004"+ - "\u00eb\u0001\u0000\u0000\u0000\u0004\u00ed\u0001\u0000\u0000\u0000\u0004"+ - "\u00ef\u0001\u0000\u0000\u0000\u0004\u00f1\u0001\u0000\u0000\u0000\u0005"+ - "\u00f3\u0001\u0000\u0000\u0000\u0005\u00f5\u0001\u0000\u0000\u0000\u0005"+ - "\u00f7\u0001\u0000\u0000\u0000\u0005\u00f9\u0001\u0000\u0000\u0000\u0005"+ - "\u00fb\u0001\u0000\u0000\u0000\u0005\u00fd\u0001\u0000\u0000\u0000\u0005"+ - "\u00ff\u0001\u0000\u0000\u0000\u0005\u0101\u0001\u0000\u0000\u0000\u0005"+ - "\u0103\u0001\u0000\u0000\u0000\u0005\u0105\u0001\u0000\u0000\u0000\u0005"+ - "\u0107\u0001\u0000\u0000\u0000\u0006\u0109\u0001\u0000\u0000\u0000\u0006"+ - "\u010b\u0001\u0000\u0000\u0000\u0006\u010d\u0001\u0000\u0000\u0000\u0006"+ - "\u010f\u0001\u0000\u0000\u0000\u0006\u0113\u0001\u0000\u0000\u0000\u0006"+ - "\u0115\u0001\u0000\u0000\u0000\u0006\u0117\u0001\u0000\u0000\u0000\u0006"+ - "\u0119\u0001\u0000\u0000\u0000\u0006\u011b\u0001\u0000\u0000\u0000\u0007"+ - "\u011d\u0001\u0000\u0000\u0000\u0007\u011f\u0001\u0000\u0000\u0000\u0007"+ - "\u0121\u0001\u0000\u0000\u0000\u0007\u0123\u0001\u0000\u0000\u0000\u0007"+ - "\u0125\u0001\u0000\u0000\u0000\u0007\u0127\u0001\u0000\u0000\u0000\u0007"+ - "\u0129\u0001\u0000\u0000\u0000\u0007\u012b\u0001\u0000\u0000\u0000\u0007"+ - "\u012d\u0001\u0000\u0000\u0000\u0007\u012f\u0001\u0000\u0000\u0000\u0007"+ - "\u0131\u0001\u0000\u0000\u0000\u0007\u0133\u0001\u0000\u0000\u0000\b\u0135"+ - "\u0001\u0000\u0000\u0000\b\u0137\u0001\u0000\u0000\u0000\b\u0139\u0001"+ - "\u0000\u0000\u0000\b\u013b\u0001\u0000\u0000\u0000\b\u013d\u0001\u0000"+ - "\u0000\u0000\b\u013f\u0001\u0000\u0000\u0000\b\u0141\u0001\u0000\u0000"+ - "\u0000\b\u0143\u0001\u0000\u0000\u0000\b\u0145\u0001\u0000\u0000\u0000"+ - "\t\u0147\u0001\u0000\u0000\u0000\t\u0149\u0001\u0000\u0000\u0000\t\u014b"+ - "\u0001\u0000\u0000\u0000\t\u014d\u0001\u0000\u0000\u0000\t\u014f\u0001"+ - "\u0000\u0000\u0000\n\u0151\u0001\u0000\u0000\u0000\n\u0153\u0001\u0000"+ - "\u0000\u0000\n\u0155\u0001\u0000\u0000\u0000\n\u0157\u0001\u0000\u0000"+ - "\u0000\n\u0159\u0001\u0000\u0000\u0000\n\u015b\u0001\u0000\u0000\u0000"+ - "\u000b\u015d\u0001\u0000\u0000\u0000\u000b\u015f\u0001\u0000\u0000\u0000"+ - "\u000b\u0161\u0001\u0000\u0000\u0000\u000b\u0163\u0001\u0000\u0000\u0000"+ - "\u000b\u0165\u0001\u0000\u0000\u0000\u000b\u0167\u0001\u0000\u0000\u0000"+ - "\u000b\u0169\u0001\u0000\u0000\u0000\u000b\u016b\u0001\u0000\u0000\u0000"+ - "\u000b\u016d\u0001\u0000\u0000\u0000\u000b\u016f\u0001\u0000\u0000\u0000"+ - "\f\u0171\u0001\u0000\u0000\u0000\f\u0173\u0001\u0000\u0000\u0000\f\u0175"+ - "\u0001\u0000\u0000\u0000\f\u0177\u0001\u0000\u0000\u0000\f\u0179\u0001"+ - "\u0000\u0000\u0000\f\u017b\u0001\u0000\u0000\u0000\f\u017d\u0001\u0000"+ - "\u0000\u0000\r\u017f\u0001\u0000\u0000\u0000\r\u0181\u0001\u0000\u0000"+ - "\u0000\r\u0183\u0001\u0000\u0000\u0000\r\u0185\u0001\u0000\u0000\u0000"+ - "\r\u0187\u0001\u0000\u0000\u0000\r\u0189\u0001\u0000\u0000\u0000\u000e"+ - "\u018b\u0001\u0000\u0000\u0000\u000e\u018d\u0001\u0000\u0000\u0000\u000e"+ - "\u018f\u0001\u0000\u0000\u0000\u000e\u0191\u0001\u0000\u0000\u0000\u000e"+ - "\u0193\u0001\u0000\u0000\u0000\u000e\u0195\u0001\u0000\u0000\u0000\u000e"+ - "\u0197\u0001\u0000\u0000\u0000\u000e\u0199\u0001\u0000\u0000\u0000\u000e"+ - "\u019b\u0001\u0000\u0000\u0000\u000f\u019d\u0001\u0000\u0000\u0000\u0011"+ - "\u01a7\u0001\u0000\u0000\u0000\u0013\u01ae\u0001\u0000\u0000\u0000\u0015"+ - "\u01b7\u0001\u0000\u0000\u0000\u0017\u01be\u0001\u0000\u0000\u0000\u0019"+ - "\u01c8\u0001\u0000\u0000\u0000\u001b\u01cf\u0001\u0000\u0000\u0000\u001d"+ - "\u01d6\u0001\u0000\u0000\u0000\u001f\u01dd\u0001\u0000\u0000\u0000!\u01e5"+ - "\u0001\u0000\u0000\u0000#\u01f1\u0001\u0000\u0000\u0000%\u01fa\u0001\u0000"+ - "\u0000\u0000\'\u0200\u0001\u0000\u0000\u0000)\u0207\u0001\u0000\u0000"+ - "\u0000+\u020e\u0001\u0000\u0000\u0000-\u0216\u0001\u0000\u0000\u0000/"+ - "\u021e\u0001\u0000\u0000\u00001\u022d\u0001\u0000\u0000\u00003\u0237\u0001"+ - "\u0000\u0000\u00005\u0243\u0001\u0000\u0000\u00007\u0249\u0001\u0000\u0000"+ - "\u00009\u025a\u0001\u0000\u0000\u0000;\u026a\u0001\u0000\u0000\u0000="+ - "\u0270\u0001\u0000\u0000\u0000?\u0272\u0001\u0000\u0000\u0000A\u0276\u0001"+ - "\u0000\u0000\u0000C\u0278\u0001\u0000\u0000\u0000E\u027a\u0001\u0000\u0000"+ - "\u0000G\u027d\u0001\u0000\u0000\u0000I\u027f\u0001\u0000\u0000\u0000K"+ - "\u0288\u0001\u0000\u0000\u0000M\u028a\u0001\u0000\u0000\u0000O\u028f\u0001"+ - "\u0000\u0000\u0000Q\u0291\u0001\u0000\u0000\u0000S\u0296\u0001\u0000\u0000"+ - "\u0000U\u02b5\u0001\u0000\u0000\u0000W\u02b8\u0001\u0000\u0000\u0000Y"+ - "\u02e6\u0001\u0000\u0000\u0000[\u02e8\u0001\u0000\u0000\u0000]\u02eb\u0001"+ - "\u0000\u0000\u0000_\u02ef\u0001\u0000\u0000\u0000a\u02f3\u0001\u0000\u0000"+ - "\u0000c\u02f5\u0001\u0000\u0000\u0000e\u02f8\u0001\u0000\u0000\u0000g"+ - "\u02fa\u0001\u0000\u0000\u0000i\u02ff\u0001\u0000\u0000\u0000k\u0301\u0001"+ - "\u0000\u0000\u0000m\u0307\u0001\u0000\u0000\u0000o\u030d\u0001\u0000\u0000"+ - "\u0000q\u0310\u0001\u0000\u0000\u0000s\u0313\u0001\u0000\u0000\u0000u"+ - "\u0318\u0001\u0000\u0000\u0000w\u031d\u0001\u0000\u0000\u0000y\u031f\u0001"+ - "\u0000\u0000\u0000{\u0323\u0001\u0000\u0000\u0000}\u0328\u0001\u0000\u0000"+ - "\u0000\u007f\u032e\u0001\u0000\u0000\u0000\u0081\u0331\u0001\u0000\u0000"+ - "\u0000\u0083\u0333\u0001\u0000\u0000\u0000\u0085\u0339\u0001\u0000\u0000"+ - "\u0000\u0087\u033b\u0001\u0000\u0000\u0000\u0089\u0340\u0001\u0000\u0000"+ - "\u0000\u008b\u0343\u0001\u0000\u0000\u0000\u008d\u0346\u0001\u0000\u0000"+ - "\u0000\u008f\u0349\u0001\u0000\u0000\u0000\u0091\u034b\u0001\u0000\u0000"+ - "\u0000\u0093\u034e\u0001\u0000\u0000\u0000\u0095\u0350\u0001\u0000\u0000"+ - "\u0000\u0097\u0353\u0001\u0000\u0000\u0000\u0099\u0355\u0001\u0000\u0000"+ - "\u0000\u009b\u0357\u0001\u0000\u0000\u0000\u009d\u0359\u0001\u0000\u0000"+ - "\u0000\u009f\u035b\u0001\u0000\u0000\u0000\u00a1\u035d\u0001\u0000\u0000"+ - "\u0000\u00a3\u0362\u0001\u0000\u0000\u0000\u00a5\u0377\u0001\u0000\u0000"+ - "\u0000\u00a7\u0379\u0001\u0000\u0000\u0000\u00a9\u037e\u0001\u0000\u0000"+ - "\u0000\u00ab\u0393\u0001\u0000\u0000\u0000\u00ad\u0395\u0001\u0000\u0000"+ - "\u0000\u00af\u039d\u0001\u0000\u0000\u0000\u00b1\u039f\u0001\u0000\u0000"+ - "\u0000\u00b3\u03a3\u0001\u0000\u0000\u0000\u00b5\u03a7\u0001\u0000\u0000"+ - "\u0000\u00b7\u03ab\u0001\u0000\u0000\u0000\u00b9\u03b0\u0001\u0000\u0000"+ - "\u0000\u00bb\u03b5\u0001\u0000\u0000\u0000\u00bd\u03b9\u0001\u0000\u0000"+ - "\u0000\u00bf\u03bd\u0001\u0000\u0000\u0000\u00c1\u03c1\u0001\u0000\u0000"+ - "\u0000\u00c3\u03c6\u0001\u0000\u0000\u0000\u00c5\u03ca\u0001\u0000\u0000"+ - "\u0000\u00c7\u03ce\u0001\u0000\u0000\u0000\u00c9\u03d2\u0001\u0000\u0000"+ - "\u0000\u00cb\u03d6\u0001\u0000\u0000\u0000\u00cd\u03da\u0001\u0000\u0000"+ - "\u0000\u00cf\u03e6\u0001\u0000\u0000\u0000\u00d1\u03e9\u0001\u0000\u0000"+ - "\u0000\u00d3\u03ed\u0001\u0000\u0000\u0000\u00d5\u03f1\u0001\u0000\u0000"+ - "\u0000\u00d7\u03f5\u0001\u0000\u0000\u0000\u00d9\u03f9\u0001\u0000\u0000"+ - "\u0000\u00db\u03fd\u0001\u0000\u0000\u0000\u00dd\u0401\u0001\u0000\u0000"+ - "\u0000\u00df\u0406\u0001\u0000\u0000\u0000\u00e1\u040a\u0001\u0000\u0000"+ - "\u0000\u00e3\u040e\u0001\u0000\u0000\u0000\u00e5\u0413\u0001\u0000\u0000"+ - "\u0000\u00e7\u041c\u0001\u0000\u0000\u0000\u00e9\u0431\u0001\u0000\u0000"+ - "\u0000\u00eb\u0435\u0001\u0000\u0000\u0000\u00ed\u0439\u0001\u0000\u0000"+ - "\u0000\u00ef\u043d\u0001\u0000\u0000\u0000\u00f1\u0441\u0001\u0000\u0000"+ - "\u0000\u00f3\u0445\u0001\u0000\u0000\u0000\u00f5\u044a\u0001\u0000\u0000"+ - "\u0000\u00f7\u044e\u0001\u0000\u0000\u0000\u00f9\u0452\u0001\u0000\u0000"+ - "\u0000\u00fb\u0456\u0001\u0000\u0000\u0000\u00fd\u045b\u0001\u0000\u0000"+ - "\u0000\u00ff\u0460\u0001\u0000\u0000\u0000\u0101\u0463\u0001\u0000\u0000"+ - "\u0000\u0103\u0467\u0001\u0000\u0000\u0000\u0105\u046b\u0001\u0000\u0000"+ - "\u0000\u0107\u046f\u0001\u0000\u0000\u0000\u0109\u0473\u0001\u0000\u0000"+ - "\u0000\u010b\u0478\u0001\u0000\u0000\u0000\u010d\u047d\u0001\u0000\u0000"+ - "\u0000\u010f\u0482\u0001\u0000\u0000\u0000\u0111\u0489\u0001\u0000\u0000"+ - "\u0000\u0113\u0492\u0001\u0000\u0000\u0000\u0115\u0499\u0001\u0000\u0000"+ - "\u0000\u0117\u049d\u0001\u0000\u0000\u0000\u0119\u04a1\u0001\u0000\u0000"+ - "\u0000\u011b\u04a5\u0001\u0000\u0000\u0000\u011d\u04a9\u0001\u0000\u0000"+ - "\u0000\u011f\u04af\u0001\u0000\u0000\u0000\u0121\u04b3\u0001\u0000\u0000"+ - "\u0000\u0123\u04b7\u0001\u0000\u0000\u0000\u0125\u04bb\u0001\u0000\u0000"+ - "\u0000\u0127\u04bf\u0001\u0000\u0000\u0000\u0129\u04c3\u0001\u0000\u0000"+ - "\u0000\u012b\u04c7\u0001\u0000\u0000\u0000\u012d\u04cc\u0001\u0000\u0000"+ - "\u0000\u012f\u04d1\u0001\u0000\u0000\u0000\u0131\u04d5\u0001\u0000\u0000"+ - "\u0000\u0133\u04d9\u0001\u0000\u0000\u0000\u0135\u04dd\u0001\u0000\u0000"+ - "\u0000\u0137\u04e2\u0001\u0000\u0000\u0000\u0139\u04e6\u0001\u0000\u0000"+ - "\u0000\u013b\u04eb\u0001\u0000\u0000\u0000\u013d\u04f0\u0001\u0000\u0000"+ - "\u0000\u013f\u04f4\u0001\u0000\u0000\u0000\u0141\u04f8\u0001\u0000\u0000"+ - "\u0000\u0143\u04fc\u0001\u0000\u0000\u0000\u0145\u0500\u0001\u0000\u0000"+ - "\u0000\u0147\u0504\u0001\u0000\u0000\u0000\u0149\u0509\u0001\u0000\u0000"+ - "\u0000\u014b\u050e\u0001\u0000\u0000\u0000\u014d\u0512\u0001\u0000\u0000"+ - "\u0000\u014f\u0516\u0001\u0000\u0000\u0000\u0151\u051a\u0001\u0000\u0000"+ - "\u0000\u0153\u051f\u0001\u0000\u0000\u0000\u0155\u0528\u0001\u0000\u0000"+ - "\u0000\u0157\u052c\u0001\u0000\u0000\u0000\u0159\u0530\u0001\u0000\u0000"+ - "\u0000\u015b\u0534\u0001\u0000\u0000\u0000\u015d\u0538\u0001\u0000\u0000"+ - "\u0000\u015f\u053d\u0001\u0000\u0000\u0000\u0161\u0541\u0001\u0000\u0000"+ - "\u0000\u0163\u0545\u0001\u0000\u0000\u0000\u0165\u0549\u0001\u0000\u0000"+ - "\u0000\u0167\u054e\u0001\u0000\u0000\u0000\u0169\u0552\u0001\u0000\u0000"+ - "\u0000\u016b\u0556\u0001\u0000\u0000\u0000\u016d\u055a\u0001\u0000\u0000"+ - "\u0000\u016f\u055e\u0001\u0000\u0000\u0000\u0171\u0562\u0001\u0000\u0000"+ - "\u0000\u0173\u0568\u0001\u0000\u0000\u0000\u0175\u056c\u0001\u0000\u0000"+ - "\u0000\u0177\u0570\u0001\u0000\u0000\u0000\u0179\u0574\u0001\u0000\u0000"+ - "\u0000\u017b\u0578\u0001\u0000\u0000\u0000\u017d\u057c\u0001\u0000\u0000"+ - "\u0000\u017f\u0580\u0001\u0000\u0000\u0000\u0181\u0585\u0001\u0000\u0000"+ - "\u0000\u0183\u058b\u0001\u0000\u0000\u0000\u0185\u0591\u0001\u0000\u0000"+ - "\u0000\u0187\u0595\u0001\u0000\u0000\u0000\u0189\u0599\u0001\u0000\u0000"+ - "\u0000\u018b\u059d\u0001\u0000\u0000\u0000\u018d\u05a3\u0001\u0000\u0000"+ - "\u0000\u018f\u05a9\u0001\u0000\u0000\u0000\u0191\u05ad\u0001\u0000\u0000"+ - "\u0000\u0193\u05b1\u0001\u0000\u0000\u0000\u0195\u05b5\u0001\u0000\u0000"+ - "\u0000\u0197\u05bb\u0001\u0000\u0000\u0000\u0199\u05c1\u0001\u0000\u0000"+ - "\u0000\u019b\u05c7\u0001\u0000\u0000\u0000\u019d\u019e\u0007\u0000\u0000"+ - "\u0000\u019e\u019f\u0007\u0001\u0000\u0000\u019f\u01a0\u0007\u0002\u0000"+ - "\u0000\u01a0\u01a1\u0007\u0002\u0000\u0000\u01a1\u01a2\u0007\u0003\u0000"+ - "\u0000\u01a2\u01a3\u0007\u0004\u0000\u0000\u01a3\u01a4\u0007\u0005\u0000"+ - "\u0000\u01a4\u01a5\u0001\u0000\u0000\u0000\u01a5\u01a6\u0006\u0000\u0000"+ - "\u0000\u01a6\u0010\u0001\u0000\u0000\u0000\u01a7\u01a8\u0007\u0000\u0000"+ - "\u0000\u01a8\u01a9\u0007\u0006\u0000\u0000\u01a9\u01aa\u0007\u0007\u0000"+ - "\u0000\u01aa\u01ab\u0007\b\u0000\u0000\u01ab\u01ac\u0001\u0000\u0000\u0000"+ - "\u01ac\u01ad\u0006\u0001\u0001\u0000\u01ad\u0012\u0001\u0000\u0000\u0000"+ - "\u01ae\u01af\u0007\u0003\u0000\u0000\u01af\u01b0\u0007\t\u0000\u0000\u01b0"+ - "\u01b1\u0007\u0006\u0000\u0000\u01b1\u01b2\u0007\u0001\u0000\u0000\u01b2"+ - "\u01b3\u0007\u0004\u0000\u0000\u01b3\u01b4\u0007\n\u0000\u0000\u01b4\u01b5"+ - "\u0001\u0000\u0000\u0000\u01b5\u01b6\u0006\u0002\u0002\u0000\u01b6\u0014"+ - "\u0001\u0000\u0000\u0000\u01b7\u01b8\u0007\u0003\u0000\u0000\u01b8\u01b9"+ - "\u0007\u000b\u0000\u0000\u01b9\u01ba\u0007\f\u0000\u0000\u01ba\u01bb\u0007"+ - "\r\u0000\u0000\u01bb\u01bc\u0001\u0000\u0000\u0000\u01bc\u01bd\u0006\u0003"+ - "\u0000\u0000\u01bd\u0016\u0001\u0000\u0000\u0000\u01be\u01bf\u0007\u0003"+ - "\u0000\u0000\u01bf\u01c0\u0007\u000e\u0000\u0000\u01c0\u01c1\u0007\b\u0000"+ - "\u0000\u01c1\u01c2\u0007\r\u0000\u0000\u01c2\u01c3\u0007\f\u0000\u0000"+ - "\u01c3\u01c4\u0007\u0001\u0000\u0000\u01c4\u01c5\u0007\t\u0000\u0000\u01c5"+ - "\u01c6\u0001\u0000\u0000\u0000\u01c6\u01c7\u0006\u0004\u0003\u0000\u01c7"+ - "\u0018\u0001\u0000\u0000\u0000\u01c8\u01c9\u0007\u000f\u0000\u0000\u01c9"+ - "\u01ca\u0007\u0006\u0000\u0000\u01ca\u01cb\u0007\u0007\u0000\u0000\u01cb"+ - "\u01cc\u0007\u0010\u0000\u0000\u01cc\u01cd\u0001\u0000\u0000\u0000\u01cd"+ - "\u01ce\u0006\u0005\u0004\u0000\u01ce\u001a\u0001\u0000\u0000\u0000\u01cf"+ - "\u01d0\u0007\u0011\u0000\u0000\u01d0\u01d1\u0007\u0006\u0000\u0000\u01d1"+ - "\u01d2\u0007\u0007\u0000\u0000\u01d2\u01d3\u0007\u0012\u0000\u0000\u01d3"+ - "\u01d4\u0001\u0000\u0000\u0000\u01d4\u01d5\u0006\u0006\u0000\u0000\u01d5"+ - "\u001c\u0001\u0000\u0000\u0000\u01d6\u01d7\u0007\u0012\u0000\u0000\u01d7"+ - "\u01d8\u0007\u0003\u0000\u0000\u01d8\u01d9\u0007\u0003\u0000\u0000\u01d9"+ - "\u01da\u0007\b\u0000\u0000\u01da\u01db\u0001\u0000\u0000\u0000\u01db\u01dc"+ - "\u0006\u0007\u0001\u0000\u01dc\u001e\u0001\u0000\u0000\u0000\u01dd\u01de"+ - "\u0007\r\u0000\u0000\u01de\u01df\u0007\u0001\u0000\u0000\u01df\u01e0\u0007"+ - "\u0010\u0000\u0000\u01e0\u01e1\u0007\u0001\u0000\u0000\u01e1\u01e2\u0007"+ - "\u0005\u0000\u0000\u01e2\u01e3\u0001\u0000\u0000\u0000\u01e3\u01e4\u0006"+ - "\b\u0000\u0000\u01e4 \u0001\u0000\u0000\u0000\u01e5\u01e6\u0007\u0010"+ - "\u0000\u0000\u01e6\u01e7\u0007\u000b\u0000\u0000\u01e7\u01e8\u0005_\u0000"+ - "\u0000\u01e8\u01e9\u0007\u0003\u0000\u0000\u01e9\u01ea\u0007\u000e\u0000"+ - "\u0000\u01ea\u01eb\u0007\b\u0000\u0000\u01eb\u01ec\u0007\f\u0000\u0000"+ - "\u01ec\u01ed\u0007\t\u0000\u0000\u01ed\u01ee\u0007\u0000\u0000\u0000\u01ee"+ - "\u01ef\u0001\u0000\u0000\u0000\u01ef\u01f0\u0006\t\u0005\u0000\u01f0\""+ - "\u0001\u0000\u0000\u0000\u01f1\u01f2\u0007\u0006\u0000\u0000\u01f2\u01f3"+ - "\u0007\u0003\u0000\u0000\u01f3\u01f4\u0007\t\u0000\u0000\u01f4\u01f5\u0007"+ - "\f\u0000\u0000\u01f5\u01f6\u0007\u0010\u0000\u0000\u01f6\u01f7\u0007\u0003"+ - "\u0000\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000\u01f8\u01f9\u0006\n\u0006"+ - "\u0000\u01f9$\u0001\u0000\u0000\u0000\u01fa\u01fb\u0007\u0006\u0000\u0000"+ - "\u01fb\u01fc\u0007\u0007\u0000\u0000\u01fc\u01fd\u0007\u0013\u0000\u0000"+ - "\u01fd\u01fe\u0001\u0000\u0000\u0000\u01fe\u01ff\u0006\u000b\u0000\u0000"+ - "\u01ff&\u0001\u0000\u0000\u0000\u0200\u0201\u0007\u0002\u0000\u0000\u0201"+ - "\u0202\u0007\n\u0000\u0000\u0202\u0203\u0007\u0007\u0000\u0000\u0203\u0204"+ - "\u0007\u0013\u0000\u0000\u0204\u0205\u0001\u0000\u0000\u0000\u0205\u0206"+ - "\u0006\f\u0007\u0000\u0206(\u0001\u0000\u0000\u0000\u0207\u0208\u0007"+ - "\u0002\u0000\u0000\u0208\u0209\u0007\u0007\u0000\u0000\u0209\u020a\u0007"+ - "\u0006\u0000\u0000\u020a\u020b\u0007\u0005\u0000\u0000\u020b\u020c\u0001"+ - "\u0000\u0000\u0000\u020c\u020d\u0006\r\u0000\u0000\u020d*\u0001\u0000"+ - "\u0000\u0000\u020e\u020f\u0007\u0002\u0000\u0000\u020f\u0210\u0007\u0005"+ - "\u0000\u0000\u0210\u0211\u0007\f\u0000\u0000\u0211\u0212\u0007\u0005\u0000"+ - "\u0000\u0212\u0213\u0007\u0002\u0000\u0000\u0213\u0214\u0001\u0000\u0000"+ - "\u0000\u0214\u0215\u0006\u000e\u0000\u0000\u0215,\u0001\u0000\u0000\u0000"+ - "\u0216\u0217\u0007\u0013\u0000\u0000\u0217\u0218\u0007\n\u0000\u0000\u0218"+ - "\u0219\u0007\u0003\u0000\u0000\u0219\u021a\u0007\u0006\u0000\u0000\u021a"+ - "\u021b\u0007\u0003\u0000\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c"+ - "\u021d\u0006\u000f\u0000\u0000\u021d.\u0001\u0000\u0000\u0000\u021e\u021f"+ - "\u0004\u0010\u0000\u0000\u021f\u0220\u0007\u0001\u0000\u0000\u0220\u0221"+ - "\u0007\t\u0000\u0000\u0221\u0222\u0007\r\u0000\u0000\u0222\u0223\u0007"+ - "\u0001\u0000\u0000\u0223\u0224\u0007\t\u0000\u0000\u0224\u0225\u0007\u0003"+ - "\u0000\u0000\u0225\u0226\u0007\u0002\u0000\u0000\u0226\u0227\u0007\u0005"+ - "\u0000\u0000\u0227\u0228\u0007\f\u0000\u0000\u0228\u0229\u0007\u0005\u0000"+ - "\u0000\u0229\u022a\u0007\u0002\u0000\u0000\u022a\u022b\u0001\u0000\u0000"+ - "\u0000\u022b\u022c\u0006\u0010\u0000\u0000\u022c0\u0001\u0000\u0000\u0000"+ - "\u022d\u022e\u0004\u0011\u0001\u0000\u022e\u022f\u0007\r\u0000\u0000\u022f"+ - "\u0230\u0007\u0007\u0000\u0000\u0230\u0231\u0007\u0007\u0000\u0000\u0231"+ - "\u0232\u0007\u0012\u0000\u0000\u0232\u0233\u0007\u0014\u0000\u0000\u0233"+ - "\u0234\u0007\b\u0000\u0000\u0234\u0235\u0001\u0000\u0000\u0000\u0235\u0236"+ - "\u0006\u0011\b\u0000\u02362\u0001\u0000\u0000\u0000\u0237\u0238\u0004"+ - "\u0012\u0002\u0000\u0238\u0239\u0007\u0010\u0000\u0000\u0239\u023a\u0007"+ - "\u0003\u0000\u0000\u023a\u023b\u0007\u0005\u0000\u0000\u023b\u023c\u0007"+ - "\u0006\u0000\u0000\u023c\u023d\u0007\u0001\u0000\u0000\u023d\u023e\u0007"+ - "\u0004\u0000\u0000\u023e\u023f\u0007\u0002\u0000\u0000\u023f\u0240\u0001"+ - "\u0000\u0000\u0000\u0240\u0241\u0006\u0012\t\u0000\u02414\u0001\u0000"+ - "\u0000\u0000\u0242\u0244\b\u0015\u0000\u0000\u0243\u0242\u0001\u0000\u0000"+ - "\u0000\u0244\u0245\u0001\u0000\u0000\u0000\u0245\u0243\u0001\u0000\u0000"+ - "\u0000\u0245\u0246\u0001\u0000\u0000\u0000\u0246\u0247\u0001\u0000\u0000"+ - "\u0000\u0247\u0248\u0006\u0013\u0000\u0000\u02486\u0001\u0000\u0000\u0000"+ - "\u0249\u024a\u0005/\u0000\u0000\u024a\u024b\u0005/\u0000\u0000\u024b\u024f"+ - "\u0001\u0000\u0000\u0000\u024c\u024e\b\u0016\u0000\u0000\u024d\u024c\u0001"+ - "\u0000\u0000\u0000\u024e\u0251\u0001\u0000\u0000\u0000\u024f\u024d\u0001"+ - "\u0000\u0000\u0000\u024f\u0250\u0001\u0000\u0000\u0000\u0250\u0253\u0001"+ - "\u0000\u0000\u0000\u0251\u024f\u0001\u0000\u0000\u0000\u0252\u0254\u0005"+ - "\r\u0000\u0000\u0253\u0252\u0001\u0000\u0000\u0000\u0253\u0254\u0001\u0000"+ - "\u0000\u0000\u0254\u0256\u0001\u0000\u0000\u0000\u0255\u0257\u0005\n\u0000"+ - "\u0000\u0256\u0255\u0001\u0000\u0000\u0000\u0256\u0257\u0001\u0000\u0000"+ - "\u0000\u0257\u0258\u0001\u0000\u0000\u0000\u0258\u0259\u0006\u0014\n\u0000"+ - "\u02598\u0001\u0000\u0000\u0000\u025a\u025b\u0005/\u0000\u0000\u025b\u025c"+ - "\u0005*\u0000\u0000\u025c\u0261\u0001\u0000\u0000\u0000\u025d\u0260\u0003"+ - "9\u0015\u0000\u025e\u0260\t\u0000\u0000\u0000\u025f\u025d\u0001\u0000"+ - "\u0000\u0000\u025f\u025e\u0001\u0000\u0000\u0000\u0260\u0263\u0001\u0000"+ - "\u0000\u0000\u0261\u0262\u0001\u0000\u0000\u0000\u0261\u025f\u0001\u0000"+ - "\u0000\u0000\u0262\u0264\u0001\u0000\u0000\u0000\u0263\u0261\u0001\u0000"+ - "\u0000\u0000\u0264\u0265\u0005*\u0000\u0000\u0265\u0266\u0005/\u0000\u0000"+ - "\u0266\u0267\u0001\u0000\u0000\u0000\u0267\u0268\u0006\u0015\n\u0000\u0268"+ - ":\u0001\u0000\u0000\u0000\u0269\u026b\u0007\u0017\u0000\u0000\u026a\u0269"+ - "\u0001\u0000\u0000\u0000\u026b\u026c\u0001\u0000\u0000\u0000\u026c\u026a"+ - "\u0001\u0000\u0000\u0000\u026c\u026d\u0001\u0000\u0000\u0000\u026d\u026e"+ - "\u0001\u0000\u0000\u0000\u026e\u026f\u0006\u0016\n\u0000\u026f<\u0001"+ - "\u0000\u0000\u0000\u0270\u0271\u0005:\u0000\u0000\u0271>\u0001\u0000\u0000"+ - "\u0000\u0272\u0273\u0005|\u0000\u0000\u0273\u0274\u0001\u0000\u0000\u0000"+ - "\u0274\u0275\u0006\u0018\u000b\u0000\u0275@\u0001\u0000\u0000\u0000\u0276"+ - "\u0277\u0007\u0018\u0000\u0000\u0277B\u0001\u0000\u0000\u0000\u0278\u0279"+ - "\u0007\u0019\u0000\u0000\u0279D\u0001\u0000\u0000\u0000\u027a\u027b\u0005"+ - "\\\u0000\u0000\u027b\u027c\u0007\u001a\u0000\u0000\u027cF\u0001\u0000"+ - "\u0000\u0000\u027d\u027e\b\u001b\u0000\u0000\u027eH\u0001\u0000\u0000"+ - "\u0000\u027f\u0281\u0007\u0003\u0000\u0000\u0280\u0282\u0007\u001c\u0000"+ - "\u0000\u0281\u0280\u0001\u0000\u0000\u0000\u0281\u0282\u0001\u0000\u0000"+ - "\u0000\u0282\u0284\u0001\u0000\u0000\u0000\u0283\u0285\u0003A\u0019\u0000"+ - "\u0284\u0283\u0001\u0000\u0000\u0000\u0285\u0286\u0001\u0000\u0000\u0000"+ - "\u0286\u0284\u0001\u0000\u0000\u0000\u0286\u0287\u0001\u0000\u0000\u0000"+ - "\u0287J\u0001\u0000\u0000\u0000\u0288\u0289\u0005@\u0000\u0000\u0289L"+ - "\u0001\u0000\u0000\u0000\u028a\u028b\u0005`\u0000\u0000\u028bN\u0001\u0000"+ - "\u0000\u0000\u028c\u0290\b\u001d\u0000\u0000\u028d\u028e\u0005`\u0000"+ - "\u0000\u028e\u0290\u0005`\u0000\u0000\u028f\u028c\u0001\u0000\u0000\u0000"+ - "\u028f\u028d\u0001\u0000\u0000\u0000\u0290P\u0001\u0000\u0000\u0000\u0291"+ - "\u0292\u0005_\u0000\u0000\u0292R\u0001\u0000\u0000\u0000\u0293\u0297\u0003"+ - "C\u001a\u0000\u0294\u0297\u0003A\u0019\u0000\u0295\u0297\u0003Q!\u0000"+ - "\u0296\u0293\u0001\u0000\u0000\u0000\u0296\u0294\u0001\u0000\u0000\u0000"+ - "\u0296\u0295\u0001\u0000\u0000\u0000\u0297T\u0001\u0000\u0000\u0000\u0298"+ - "\u029d\u0005\"\u0000\u0000\u0299\u029c\u0003E\u001b\u0000\u029a\u029c"+ - "\u0003G\u001c\u0000\u029b\u0299\u0001\u0000\u0000\u0000\u029b\u029a\u0001"+ - "\u0000\u0000\u0000\u029c\u029f\u0001\u0000\u0000\u0000\u029d\u029b\u0001"+ - "\u0000\u0000\u0000\u029d\u029e\u0001\u0000\u0000\u0000\u029e\u02a0\u0001"+ - "\u0000\u0000\u0000\u029f\u029d\u0001\u0000\u0000\u0000\u02a0\u02b6\u0005"+ - "\"\u0000\u0000\u02a1\u02a2\u0005\"\u0000\u0000\u02a2\u02a3\u0005\"\u0000"+ - "\u0000\u02a3\u02a4\u0005\"\u0000\u0000\u02a4\u02a8\u0001\u0000\u0000\u0000"+ - "\u02a5\u02a7\b\u0016\u0000\u0000\u02a6\u02a5\u0001\u0000\u0000\u0000\u02a7"+ - "\u02aa\u0001\u0000\u0000\u0000\u02a8\u02a9\u0001\u0000\u0000\u0000\u02a8"+ - "\u02a6\u0001\u0000\u0000\u0000\u02a9\u02ab\u0001\u0000\u0000\u0000\u02aa"+ - "\u02a8\u0001\u0000\u0000\u0000\u02ab\u02ac\u0005\"\u0000\u0000\u02ac\u02ad"+ - "\u0005\"\u0000\u0000\u02ad\u02ae\u0005\"\u0000\u0000\u02ae\u02b0\u0001"+ - "\u0000\u0000\u0000\u02af\u02b1\u0005\"\u0000\u0000\u02b0\u02af\u0001\u0000"+ - "\u0000\u0000\u02b0\u02b1\u0001\u0000\u0000\u0000\u02b1\u02b3\u0001\u0000"+ - "\u0000\u0000\u02b2\u02b4\u0005\"\u0000\u0000\u02b3\u02b2\u0001\u0000\u0000"+ - "\u0000\u02b3\u02b4\u0001\u0000\u0000\u0000\u02b4\u02b6\u0001\u0000\u0000"+ - "\u0000\u02b5\u0298\u0001\u0000\u0000\u0000\u02b5\u02a1\u0001\u0000\u0000"+ - "\u0000\u02b6V\u0001\u0000\u0000\u0000\u02b7\u02b9\u0003A\u0019\u0000\u02b8"+ - "\u02b7\u0001\u0000\u0000\u0000\u02b9\u02ba\u0001\u0000\u0000\u0000\u02ba"+ - "\u02b8\u0001\u0000\u0000\u0000\u02ba\u02bb\u0001\u0000\u0000\u0000\u02bb"+ - "X\u0001\u0000\u0000\u0000\u02bc\u02be\u0003A\u0019\u0000\u02bd\u02bc\u0001"+ - "\u0000\u0000\u0000\u02be\u02bf\u0001\u0000\u0000\u0000\u02bf\u02bd\u0001"+ - "\u0000\u0000\u0000\u02bf\u02c0\u0001\u0000\u0000\u0000\u02c0\u02c1\u0001"+ - "\u0000\u0000\u0000\u02c1\u02c5\u0003i-\u0000\u02c2\u02c4\u0003A\u0019"+ - "\u0000\u02c3\u02c2\u0001\u0000\u0000\u0000\u02c4\u02c7\u0001\u0000\u0000"+ - "\u0000\u02c5\u02c3\u0001\u0000\u0000\u0000\u02c5\u02c6\u0001\u0000\u0000"+ - "\u0000\u02c6\u02e7\u0001\u0000\u0000\u0000\u02c7\u02c5\u0001\u0000\u0000"+ - "\u0000\u02c8\u02ca\u0003i-\u0000\u02c9\u02cb\u0003A\u0019\u0000\u02ca"+ - "\u02c9\u0001\u0000\u0000\u0000\u02cb\u02cc\u0001\u0000\u0000\u0000\u02cc"+ - "\u02ca\u0001\u0000\u0000\u0000\u02cc\u02cd\u0001\u0000\u0000\u0000\u02cd"+ - "\u02e7\u0001\u0000\u0000\u0000\u02ce\u02d0\u0003A\u0019\u0000\u02cf\u02ce"+ - "\u0001\u0000\u0000\u0000\u02d0\u02d1\u0001\u0000\u0000\u0000\u02d1\u02cf"+ - "\u0001\u0000\u0000\u0000\u02d1\u02d2\u0001\u0000\u0000\u0000\u02d2\u02da"+ - "\u0001\u0000\u0000\u0000\u02d3\u02d7\u0003i-\u0000\u02d4\u02d6\u0003A"+ - "\u0019\u0000\u02d5\u02d4\u0001\u0000\u0000\u0000\u02d6\u02d9\u0001\u0000"+ - "\u0000\u0000\u02d7\u02d5\u0001\u0000\u0000\u0000\u02d7\u02d8\u0001\u0000"+ - "\u0000\u0000\u02d8\u02db\u0001\u0000\u0000\u0000\u02d9\u02d7\u0001\u0000"+ - "\u0000\u0000\u02da\u02d3\u0001\u0000\u0000\u0000\u02da\u02db\u0001\u0000"+ - "\u0000\u0000\u02db\u02dc\u0001\u0000\u0000\u0000\u02dc\u02dd\u0003I\u001d"+ - "\u0000\u02dd\u02e7\u0001\u0000\u0000\u0000\u02de\u02e0\u0003i-\u0000\u02df"+ - "\u02e1\u0003A\u0019\u0000\u02e0\u02df\u0001\u0000\u0000\u0000\u02e1\u02e2"+ - "\u0001\u0000\u0000\u0000\u02e2\u02e0\u0001\u0000\u0000\u0000\u02e2\u02e3"+ - "\u0001\u0000\u0000\u0000\u02e3\u02e4\u0001\u0000\u0000\u0000\u02e4\u02e5"+ - "\u0003I\u001d\u0000\u02e5\u02e7\u0001\u0000\u0000\u0000\u02e6\u02bd\u0001"+ - "\u0000\u0000\u0000\u02e6\u02c8\u0001\u0000\u0000\u0000\u02e6\u02cf\u0001"+ - "\u0000\u0000\u0000\u02e6\u02de\u0001\u0000\u0000\u0000\u02e7Z\u0001\u0000"+ - "\u0000\u0000\u02e8\u02e9\u0007\u001e\u0000\u0000\u02e9\u02ea\u0007\u001f"+ - "\u0000\u0000\u02ea\\\u0001\u0000\u0000\u0000\u02eb\u02ec\u0007\f\u0000"+ - "\u0000\u02ec\u02ed\u0007\t\u0000\u0000\u02ed\u02ee\u0007\u0000\u0000\u0000"+ - "\u02ee^\u0001\u0000\u0000\u0000\u02ef\u02f0\u0007\f\u0000\u0000\u02f0"+ - "\u02f1\u0007\u0002\u0000\u0000\u02f1\u02f2\u0007\u0004\u0000\u0000\u02f2"+ - "`\u0001\u0000\u0000\u0000\u02f3\u02f4\u0005=\u0000\u0000\u02f4b\u0001"+ - "\u0000\u0000\u0000\u02f5\u02f6\u0005:\u0000\u0000\u02f6\u02f7\u0005:\u0000"+ - "\u0000\u02f7d\u0001\u0000\u0000\u0000\u02f8\u02f9\u0005,\u0000\u0000\u02f9"+ - "f\u0001\u0000\u0000\u0000\u02fa\u02fb\u0007\u0000\u0000\u0000\u02fb\u02fc"+ - "\u0007\u0003\u0000\u0000\u02fc\u02fd\u0007\u0002\u0000\u0000\u02fd\u02fe"+ - "\u0007\u0004\u0000\u0000\u02feh\u0001\u0000\u0000\u0000\u02ff\u0300\u0005"+ - ".\u0000\u0000\u0300j\u0001\u0000\u0000\u0000\u0301\u0302\u0007\u000f\u0000"+ - "\u0000\u0302\u0303\u0007\f\u0000\u0000\u0303\u0304\u0007\r\u0000\u0000"+ - "\u0304\u0305\u0007\u0002\u0000\u0000\u0305\u0306\u0007\u0003\u0000\u0000"+ - "\u0306l\u0001\u0000\u0000\u0000\u0307\u0308\u0007\u000f\u0000\u0000\u0308"+ - "\u0309\u0007\u0001\u0000\u0000\u0309\u030a\u0007\u0006\u0000\u0000\u030a"+ - "\u030b\u0007\u0002\u0000\u0000\u030b\u030c\u0007\u0005\u0000\u0000\u030c"+ - "n\u0001\u0000\u0000\u0000\u030d\u030e\u0007\u0001\u0000\u0000\u030e\u030f"+ - "\u0007\t\u0000\u0000\u030fp\u0001\u0000\u0000\u0000\u0310\u0311\u0007"+ - "\u0001\u0000\u0000\u0311\u0312\u0007\u0002\u0000\u0000\u0312r\u0001\u0000"+ - "\u0000\u0000\u0313\u0314\u0007\r\u0000\u0000\u0314\u0315\u0007\f\u0000"+ - "\u0000\u0315\u0316\u0007\u0002\u0000\u0000\u0316\u0317\u0007\u0005\u0000"+ - "\u0000\u0317t\u0001\u0000\u0000\u0000\u0318\u0319\u0007\r\u0000\u0000"+ - "\u0319\u031a\u0007\u0001\u0000\u0000\u031a\u031b\u0007\u0012\u0000\u0000"+ - "\u031b\u031c\u0007\u0003\u0000\u0000\u031cv\u0001\u0000\u0000\u0000\u031d"+ - "\u031e\u0005(\u0000\u0000\u031ex\u0001\u0000\u0000\u0000\u031f\u0320\u0007"+ - "\t\u0000\u0000\u0320\u0321\u0007\u0007\u0000\u0000\u0321\u0322\u0007\u0005"+ - "\u0000\u0000\u0322z\u0001\u0000\u0000\u0000\u0323\u0324\u0007\t\u0000"+ - "\u0000\u0324\u0325\u0007\u0014\u0000\u0000\u0325\u0326\u0007\r\u0000\u0000"+ - "\u0326\u0327\u0007\r\u0000\u0000\u0327|\u0001\u0000\u0000\u0000\u0328"+ - "\u0329\u0007\t\u0000\u0000\u0329\u032a\u0007\u0014\u0000\u0000\u032a\u032b"+ - "\u0007\r\u0000\u0000\u032b\u032c\u0007\r\u0000\u0000\u032c\u032d\u0007"+ - "\u0002\u0000\u0000\u032d~\u0001\u0000\u0000\u0000\u032e\u032f\u0007\u0007"+ - "\u0000\u0000\u032f\u0330\u0007\u0006\u0000\u0000\u0330\u0080\u0001\u0000"+ - "\u0000\u0000\u0331\u0332\u0005?\u0000\u0000\u0332\u0082\u0001\u0000\u0000"+ - "\u0000\u0333\u0334\u0007\u0006\u0000\u0000\u0334\u0335\u0007\r\u0000\u0000"+ - "\u0335\u0336\u0007\u0001\u0000\u0000\u0336\u0337\u0007\u0012\u0000\u0000"+ - "\u0337\u0338\u0007\u0003\u0000\u0000\u0338\u0084\u0001\u0000\u0000\u0000"+ - "\u0339\u033a\u0005)\u0000\u0000\u033a\u0086\u0001\u0000\u0000\u0000\u033b"+ - "\u033c\u0007\u0005\u0000\u0000\u033c\u033d\u0007\u0006\u0000\u0000\u033d"+ - "\u033e\u0007\u0014\u0000\u0000\u033e\u033f\u0007\u0003\u0000\u0000\u033f"+ - "\u0088\u0001\u0000\u0000\u0000\u0340\u0341\u0005=\u0000\u0000\u0341\u0342"+ - "\u0005=\u0000\u0000\u0342\u008a\u0001\u0000\u0000\u0000\u0343\u0344\u0005"+ - "=\u0000\u0000\u0344\u0345\u0005~\u0000\u0000\u0345\u008c\u0001\u0000\u0000"+ - "\u0000\u0346\u0347\u0005!\u0000\u0000\u0347\u0348\u0005=\u0000\u0000\u0348"+ - "\u008e\u0001\u0000\u0000\u0000\u0349\u034a\u0005<\u0000\u0000\u034a\u0090"+ - "\u0001\u0000\u0000\u0000\u034b\u034c\u0005<\u0000\u0000\u034c\u034d\u0005"+ - "=\u0000\u0000\u034d\u0092\u0001\u0000\u0000\u0000\u034e\u034f\u0005>\u0000"+ - "\u0000\u034f\u0094\u0001\u0000\u0000\u0000\u0350\u0351\u0005>\u0000\u0000"+ - "\u0351\u0352\u0005=\u0000\u0000\u0352\u0096\u0001\u0000\u0000\u0000\u0353"+ - "\u0354\u0005+\u0000\u0000\u0354\u0098\u0001\u0000\u0000\u0000\u0355\u0356"+ - "\u0005-\u0000\u0000\u0356\u009a\u0001\u0000\u0000\u0000\u0357\u0358\u0005"+ - "*\u0000\u0000\u0358\u009c\u0001\u0000\u0000\u0000\u0359\u035a\u0005/\u0000"+ - "\u0000\u035a\u009e\u0001\u0000\u0000\u0000\u035b\u035c\u0005%\u0000\u0000"+ - "\u035c\u00a0\u0001\u0000\u0000\u0000\u035d\u035e\u0004I\u0003\u0000\u035e"+ - "\u035f\u0003=\u0017\u0000\u035f\u0360\u0001\u0000\u0000\u0000\u0360\u0361"+ - "\u0006I\f\u0000\u0361\u00a2\u0001\u0000\u0000\u0000\u0362\u0363\u0003"+ - "-\u000f\u0000\u0363\u0364\u0001\u0000\u0000\u0000\u0364\u0365\u0006J\r"+ - "\u0000\u0365\u00a4\u0001\u0000\u0000\u0000\u0366\u0369\u0003\u00819\u0000"+ - "\u0367\u036a\u0003C\u001a\u0000\u0368\u036a\u0003Q!\u0000\u0369\u0367"+ - "\u0001\u0000\u0000\u0000\u0369\u0368\u0001\u0000\u0000\u0000\u036a\u036e"+ - "\u0001\u0000\u0000\u0000\u036b\u036d\u0003S\"\u0000\u036c\u036b\u0001"+ - "\u0000\u0000\u0000\u036d\u0370\u0001\u0000\u0000\u0000\u036e\u036c\u0001"+ - "\u0000\u0000\u0000\u036e\u036f\u0001\u0000\u0000\u0000\u036f\u0378\u0001"+ - "\u0000\u0000\u0000\u0370\u036e\u0001\u0000\u0000\u0000\u0371\u0373\u0003"+ - "\u00819\u0000\u0372\u0374\u0003A\u0019\u0000\u0373\u0372\u0001\u0000\u0000"+ - "\u0000\u0374\u0375\u0001\u0000\u0000\u0000\u0375\u0373\u0001\u0000\u0000"+ - "\u0000\u0375\u0376\u0001\u0000\u0000\u0000\u0376\u0378\u0001\u0000\u0000"+ - "\u0000\u0377\u0366\u0001\u0000\u0000\u0000\u0377\u0371\u0001\u0000\u0000"+ - "\u0000\u0378\u00a6\u0001\u0000\u0000\u0000\u0379\u037a\u0005[\u0000\u0000"+ - "\u037a\u037b\u0001\u0000\u0000\u0000\u037b\u037c\u0006L\u0000\u0000\u037c"+ - "\u037d\u0006L\u0000\u0000\u037d\u00a8\u0001\u0000\u0000\u0000\u037e\u037f"+ - "\u0005]\u0000\u0000\u037f\u0380\u0001\u0000\u0000\u0000\u0380\u0381\u0006"+ - "M\u000b\u0000\u0381\u0382\u0006M\u000b\u0000\u0382\u00aa\u0001\u0000\u0000"+ - "\u0000\u0383\u0387\u0003C\u001a\u0000\u0384\u0386\u0003S\"\u0000\u0385"+ - "\u0384\u0001\u0000\u0000\u0000\u0386\u0389\u0001\u0000\u0000\u0000\u0387"+ - "\u0385\u0001\u0000\u0000\u0000\u0387\u0388\u0001\u0000\u0000\u0000\u0388"+ - "\u0394\u0001\u0000\u0000\u0000\u0389\u0387\u0001\u0000\u0000\u0000\u038a"+ - "\u038d\u0003Q!\u0000\u038b\u038d\u0003K\u001e\u0000\u038c\u038a\u0001"+ - "\u0000\u0000\u0000\u038c\u038b\u0001\u0000\u0000\u0000\u038d\u038f\u0001"+ - "\u0000\u0000\u0000\u038e\u0390\u0003S\"\u0000\u038f\u038e\u0001\u0000"+ - "\u0000\u0000\u0390\u0391\u0001\u0000\u0000\u0000\u0391\u038f\u0001\u0000"+ - "\u0000\u0000\u0391\u0392\u0001\u0000\u0000\u0000\u0392\u0394\u0001\u0000"+ - "\u0000\u0000\u0393\u0383\u0001\u0000\u0000\u0000\u0393\u038c\u0001\u0000"+ - "\u0000\u0000\u0394\u00ac\u0001\u0000\u0000\u0000\u0395\u0397\u0003M\u001f"+ - "\u0000\u0396\u0398\u0003O \u0000\u0397\u0396\u0001\u0000\u0000\u0000\u0398"+ - "\u0399\u0001\u0000\u0000\u0000\u0399\u0397\u0001\u0000\u0000\u0000\u0399"+ - "\u039a\u0001\u0000\u0000\u0000\u039a\u039b\u0001\u0000\u0000\u0000\u039b"+ - "\u039c\u0003M\u001f\u0000\u039c\u00ae\u0001\u0000\u0000\u0000\u039d\u039e"+ - "\u0003\u00adO\u0000\u039e\u00b0\u0001\u0000\u0000\u0000\u039f\u03a0\u0003"+ - "7\u0014\u0000\u03a0\u03a1\u0001\u0000\u0000\u0000\u03a1\u03a2\u0006Q\n"+ - "\u0000\u03a2\u00b2\u0001\u0000\u0000\u0000\u03a3\u03a4\u00039\u0015\u0000"+ - "\u03a4\u03a5\u0001\u0000\u0000\u0000\u03a5\u03a6\u0006R\n\u0000\u03a6"+ - "\u00b4\u0001\u0000\u0000\u0000\u03a7\u03a8\u0003;\u0016\u0000\u03a8\u03a9"+ - "\u0001\u0000\u0000\u0000\u03a9\u03aa\u0006S\n\u0000\u03aa\u00b6\u0001"+ - "\u0000\u0000\u0000\u03ab\u03ac\u0003\u00a7L\u0000\u03ac\u03ad\u0001\u0000"+ - "\u0000\u0000\u03ad\u03ae\u0006T\u000e\u0000\u03ae\u03af\u0006T\u000f\u0000"+ - "\u03af\u00b8\u0001\u0000\u0000\u0000\u03b0\u03b1\u0003?\u0018\u0000\u03b1"+ - "\u03b2\u0001\u0000\u0000\u0000\u03b2\u03b3\u0006U\u0010\u0000\u03b3\u03b4"+ - "\u0006U\u000b\u0000\u03b4\u00ba\u0001\u0000\u0000\u0000\u03b5\u03b6\u0003"+ - ";\u0016\u0000\u03b6\u03b7\u0001\u0000\u0000\u0000\u03b7\u03b8\u0006V\n"+ - "\u0000\u03b8\u00bc\u0001\u0000\u0000\u0000\u03b9\u03ba\u00037\u0014\u0000"+ - "\u03ba\u03bb\u0001\u0000\u0000\u0000\u03bb\u03bc\u0006W\n\u0000\u03bc"+ - "\u00be\u0001\u0000\u0000\u0000\u03bd\u03be\u00039\u0015\u0000\u03be\u03bf"+ - "\u0001\u0000\u0000\u0000\u03bf\u03c0\u0006X\n\u0000\u03c0\u00c0\u0001"+ - "\u0000\u0000\u0000\u03c1\u03c2\u0003?\u0018\u0000\u03c2\u03c3\u0001\u0000"+ - "\u0000\u0000\u03c3\u03c4\u0006Y\u0010\u0000\u03c4\u03c5\u0006Y\u000b\u0000"+ - "\u03c5\u00c2\u0001\u0000\u0000\u0000\u03c6\u03c7\u0003\u00a7L\u0000\u03c7"+ - "\u03c8\u0001\u0000\u0000\u0000\u03c8\u03c9\u0006Z\u000e\u0000\u03c9\u00c4"+ - "\u0001\u0000\u0000\u0000\u03ca\u03cb\u0003\u00a9M\u0000\u03cb\u03cc\u0001"+ - "\u0000\u0000\u0000\u03cc\u03cd\u0006[\u0011\u0000\u03cd\u00c6\u0001\u0000"+ - "\u0000\u0000\u03ce\u03cf\u0003=\u0017\u0000\u03cf\u03d0\u0001\u0000\u0000"+ - "\u0000\u03d0\u03d1\u0006\\\f\u0000\u03d1\u00c8\u0001\u0000\u0000\u0000"+ - "\u03d2\u03d3\u0003e+\u0000\u03d3\u03d4\u0001\u0000\u0000\u0000\u03d4\u03d5"+ - "\u0006]\u0012\u0000\u03d5\u00ca\u0001\u0000\u0000\u0000\u03d6\u03d7\u0003"+ - "a)\u0000\u03d7\u03d8\u0001\u0000\u0000\u0000\u03d8\u03d9\u0006^\u0013"+ - "\u0000\u03d9\u00cc\u0001\u0000\u0000\u0000\u03da\u03db\u0007\u0010\u0000"+ - "\u0000\u03db\u03dc\u0007\u0003\u0000\u0000\u03dc\u03dd\u0007\u0005\u0000"+ - "\u0000\u03dd\u03de\u0007\f\u0000\u0000\u03de\u03df\u0007\u0000\u0000\u0000"+ - "\u03df\u03e0\u0007\f\u0000\u0000\u03e0\u03e1\u0007\u0005\u0000\u0000\u03e1"+ - "\u03e2\u0007\f\u0000\u0000\u03e2\u00ce\u0001\u0000\u0000\u0000\u03e3\u03e7"+ - "\b \u0000\u0000\u03e4\u03e5\u0005/\u0000\u0000\u03e5\u03e7\b!\u0000\u0000"+ - "\u03e6\u03e3\u0001\u0000\u0000\u0000\u03e6\u03e4\u0001\u0000\u0000\u0000"+ - "\u03e7\u00d0\u0001\u0000\u0000\u0000\u03e8\u03ea\u0003\u00cf`\u0000\u03e9"+ - "\u03e8\u0001\u0000\u0000\u0000\u03ea\u03eb\u0001\u0000\u0000\u0000\u03eb"+ - "\u03e9\u0001\u0000\u0000\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000\u03ec"+ - "\u00d2\u0001\u0000\u0000\u0000\u03ed\u03ee\u0003\u00d1a\u0000\u03ee\u03ef"+ - "\u0001\u0000\u0000\u0000\u03ef\u03f0\u0006b\u0014\u0000\u03f0\u00d4\u0001"+ - "\u0000\u0000\u0000\u03f1\u03f2\u0003U#\u0000\u03f2\u03f3\u0001\u0000\u0000"+ - "\u0000\u03f3\u03f4\u0006c\u0015\u0000\u03f4\u00d6\u0001\u0000\u0000\u0000"+ - "\u03f5\u03f6\u00037\u0014\u0000\u03f6\u03f7\u0001\u0000\u0000\u0000\u03f7"+ - "\u03f8\u0006d\n\u0000\u03f8\u00d8\u0001\u0000\u0000\u0000\u03f9\u03fa"+ - "\u00039\u0015\u0000\u03fa\u03fb\u0001\u0000\u0000\u0000\u03fb\u03fc\u0006"+ - "e\n\u0000\u03fc\u00da\u0001\u0000\u0000\u0000\u03fd\u03fe\u0003;\u0016"+ - "\u0000\u03fe\u03ff\u0001\u0000\u0000\u0000\u03ff\u0400\u0006f\n\u0000"+ - "\u0400\u00dc\u0001\u0000\u0000\u0000\u0401\u0402\u0003?\u0018\u0000\u0402"+ - "\u0403\u0001\u0000\u0000\u0000\u0403\u0404\u0006g\u0010\u0000\u0404\u0405"+ - "\u0006g\u000b\u0000\u0405\u00de\u0001\u0000\u0000\u0000\u0406\u0407\u0003"+ - "i-\u0000\u0407\u0408\u0001\u0000\u0000\u0000\u0408\u0409\u0006h\u0016"+ - "\u0000\u0409\u00e0\u0001\u0000\u0000\u0000\u040a\u040b\u0003e+\u0000\u040b"+ - "\u040c\u0001\u0000\u0000\u0000\u040c\u040d\u0006i\u0012\u0000\u040d\u00e2"+ - "\u0001\u0000\u0000\u0000\u040e\u040f\u0004j\u0004\u0000\u040f\u0410\u0003"+ - "\u00819\u0000\u0410\u0411\u0001\u0000\u0000\u0000\u0411\u0412\u0006j\u0017"+ - "\u0000\u0412\u00e4\u0001\u0000\u0000\u0000\u0413\u0414\u0004k\u0005\u0000"+ - "\u0414\u0415\u0003\u00a5K\u0000\u0415\u0416\u0001\u0000\u0000\u0000\u0416"+ - "\u0417\u0006k\u0018\u0000\u0417\u00e6\u0001\u0000\u0000\u0000\u0418\u041d"+ - "\u0003C\u001a\u0000\u0419\u041d\u0003A\u0019\u0000\u041a\u041d\u0003Q"+ - "!\u0000\u041b\u041d\u0003\u009bF\u0000\u041c\u0418\u0001\u0000\u0000\u0000"+ - "\u041c\u0419\u0001\u0000\u0000\u0000\u041c\u041a\u0001\u0000\u0000\u0000"+ - "\u041c\u041b\u0001\u0000\u0000\u0000\u041d\u00e8\u0001\u0000\u0000\u0000"+ - "\u041e\u0421\u0003C\u001a\u0000\u041f\u0421\u0003\u009bF\u0000\u0420\u041e"+ - "\u0001\u0000\u0000\u0000\u0420\u041f\u0001\u0000\u0000\u0000\u0421\u0425"+ - "\u0001\u0000\u0000\u0000\u0422\u0424\u0003\u00e7l\u0000\u0423\u0422\u0001"+ - "\u0000\u0000\u0000\u0424\u0427\u0001\u0000\u0000\u0000\u0425\u0423\u0001"+ - "\u0000\u0000\u0000\u0425\u0426\u0001\u0000\u0000\u0000\u0426\u0432\u0001"+ - "\u0000\u0000\u0000\u0427\u0425\u0001\u0000\u0000\u0000\u0428\u042b\u0003"+ - "Q!\u0000\u0429\u042b\u0003K\u001e\u0000\u042a\u0428\u0001\u0000\u0000"+ - "\u0000\u042a\u0429\u0001\u0000\u0000\u0000\u042b\u042d\u0001\u0000\u0000"+ - "\u0000\u042c\u042e\u0003\u00e7l\u0000\u042d\u042c\u0001\u0000\u0000\u0000"+ - "\u042e\u042f\u0001\u0000\u0000\u0000\u042f\u042d\u0001\u0000\u0000\u0000"+ - "\u042f\u0430\u0001\u0000\u0000\u0000\u0430\u0432\u0001\u0000\u0000\u0000"+ - "\u0431\u0420\u0001\u0000\u0000\u0000\u0431\u042a\u0001\u0000\u0000\u0000"+ - "\u0432\u00ea\u0001\u0000\u0000\u0000\u0433\u0436\u0003\u00e9m\u0000\u0434"+ - "\u0436\u0003\u00adO\u0000\u0435\u0433\u0001\u0000\u0000\u0000\u0435\u0434"+ - "\u0001\u0000\u0000\u0000\u0436\u0437\u0001\u0000\u0000\u0000\u0437\u0435"+ - "\u0001\u0000\u0000\u0000\u0437\u0438\u0001\u0000\u0000\u0000\u0438\u00ec"+ - "\u0001\u0000\u0000\u0000\u0439\u043a\u00037\u0014\u0000\u043a\u043b\u0001"+ - "\u0000\u0000\u0000\u043b\u043c\u0006o\n\u0000\u043c\u00ee\u0001\u0000"+ - "\u0000\u0000\u043d\u043e\u00039\u0015\u0000\u043e\u043f\u0001\u0000\u0000"+ - "\u0000\u043f\u0440\u0006p\n\u0000\u0440\u00f0\u0001\u0000\u0000\u0000"+ - "\u0441\u0442\u0003;\u0016\u0000\u0442\u0443\u0001\u0000\u0000\u0000\u0443"+ - "\u0444\u0006q\n\u0000\u0444\u00f2\u0001\u0000\u0000\u0000\u0445\u0446"+ - "\u0003?\u0018\u0000\u0446\u0447\u0001\u0000\u0000\u0000\u0447\u0448\u0006"+ - "r\u0010\u0000\u0448\u0449\u0006r\u000b\u0000\u0449\u00f4\u0001\u0000\u0000"+ - "\u0000\u044a\u044b\u0003a)\u0000\u044b\u044c\u0001\u0000\u0000\u0000\u044c"+ - "\u044d\u0006s\u0013\u0000\u044d\u00f6\u0001\u0000\u0000\u0000\u044e\u044f"+ - "\u0003e+\u0000\u044f\u0450\u0001\u0000\u0000\u0000\u0450\u0451\u0006t"+ - "\u0012\u0000\u0451\u00f8\u0001\u0000\u0000\u0000\u0452\u0453\u0003i-\u0000"+ - "\u0453\u0454\u0001\u0000\u0000\u0000\u0454\u0455\u0006u\u0016\u0000\u0455"+ - "\u00fa\u0001\u0000\u0000\u0000\u0456\u0457\u0004v\u0006\u0000\u0457\u0458"+ - "\u0003\u00819\u0000\u0458\u0459\u0001\u0000\u0000\u0000\u0459\u045a\u0006"+ - "v\u0017\u0000\u045a\u00fc\u0001\u0000\u0000\u0000\u045b\u045c\u0004w\u0007"+ - "\u0000\u045c\u045d\u0003\u00a5K\u0000\u045d\u045e\u0001\u0000\u0000\u0000"+ - "\u045e\u045f\u0006w\u0018\u0000\u045f\u00fe\u0001\u0000\u0000\u0000\u0460"+ - "\u0461\u0007\f\u0000\u0000\u0461\u0462\u0007\u0002\u0000\u0000\u0462\u0100"+ - "\u0001\u0000\u0000\u0000\u0463\u0464\u0003\u00ebn\u0000\u0464\u0465\u0001"+ - "\u0000\u0000\u0000\u0465\u0466\u0006y\u0019\u0000\u0466\u0102\u0001\u0000"+ - "\u0000\u0000\u0467\u0468\u00037\u0014\u0000\u0468\u0469\u0001\u0000\u0000"+ - "\u0000\u0469\u046a\u0006z\n\u0000\u046a\u0104\u0001\u0000\u0000\u0000"+ - "\u046b\u046c\u00039\u0015\u0000\u046c\u046d\u0001\u0000\u0000\u0000\u046d"+ - "\u046e\u0006{\n\u0000\u046e\u0106\u0001\u0000\u0000\u0000\u046f\u0470"+ - "\u0003;\u0016\u0000\u0470\u0471\u0001\u0000\u0000\u0000\u0471\u0472\u0006"+ - "|\n\u0000\u0472\u0108\u0001\u0000\u0000\u0000\u0473\u0474\u0003?\u0018"+ - "\u0000\u0474\u0475\u0001\u0000\u0000\u0000\u0475\u0476\u0006}\u0010\u0000"+ - "\u0476\u0477\u0006}\u000b\u0000\u0477\u010a\u0001\u0000\u0000\u0000\u0478"+ - "\u0479\u0003\u00a7L\u0000\u0479\u047a\u0001\u0000\u0000\u0000\u047a\u047b"+ - "\u0006~\u000e\u0000\u047b\u047c\u0006~\u001a\u0000\u047c\u010c\u0001\u0000"+ - "\u0000\u0000\u047d\u047e\u0007\u0007\u0000\u0000\u047e\u047f\u0007\t\u0000"+ - "\u0000\u047f\u0480\u0001\u0000\u0000\u0000\u0480\u0481\u0006\u007f\u001b"+ - "\u0000\u0481\u010e\u0001\u0000\u0000\u0000\u0482\u0483\u0007\u0013\u0000"+ - "\u0000\u0483\u0484\u0007\u0001\u0000\u0000\u0484\u0485\u0007\u0005\u0000"+ - "\u0000\u0485\u0486\u0007\n\u0000\u0000\u0486\u0487\u0001\u0000\u0000\u0000"+ - "\u0487\u0488\u0006\u0080\u001b\u0000\u0488\u0110\u0001\u0000\u0000\u0000"+ - "\u0489\u048a\b\"\u0000\u0000\u048a\u0112\u0001\u0000\u0000\u0000\u048b"+ - "\u048d\u0003\u0111\u0081\u0000\u048c\u048b\u0001\u0000\u0000\u0000\u048d"+ - "\u048e\u0001\u0000\u0000\u0000\u048e\u048c\u0001\u0000\u0000\u0000\u048e"+ - "\u048f\u0001\u0000\u0000\u0000\u048f\u0490\u0001\u0000\u0000\u0000\u0490"+ - "\u0491\u0003=\u0017\u0000\u0491\u0493\u0001\u0000\u0000\u0000\u0492\u048c"+ - "\u0001\u0000\u0000\u0000\u0492\u0493\u0001\u0000\u0000\u0000\u0493\u0495"+ - "\u0001\u0000\u0000\u0000\u0494\u0496\u0003\u0111\u0081\u0000\u0495\u0494"+ - "\u0001\u0000\u0000\u0000\u0496\u0497\u0001\u0000\u0000\u0000\u0497\u0495"+ - "\u0001\u0000\u0000\u0000\u0497\u0498\u0001\u0000\u0000\u0000\u0498\u0114"+ - "\u0001\u0000\u0000\u0000\u0499\u049a\u0003\u0113\u0082\u0000\u049a\u049b"+ - "\u0001\u0000\u0000\u0000\u049b\u049c\u0006\u0083\u001c\u0000\u049c\u0116"+ - "\u0001\u0000\u0000\u0000\u049d\u049e\u00037\u0014\u0000\u049e\u049f\u0001"+ - "\u0000\u0000\u0000\u049f\u04a0\u0006\u0084\n\u0000\u04a0\u0118\u0001\u0000"+ - "\u0000\u0000\u04a1\u04a2\u00039\u0015\u0000\u04a2\u04a3\u0001\u0000\u0000"+ - "\u0000\u04a3\u04a4\u0006\u0085\n\u0000\u04a4\u011a\u0001\u0000\u0000\u0000"+ - "\u04a5\u04a6\u0003;\u0016\u0000\u04a6\u04a7\u0001\u0000\u0000\u0000\u04a7"+ - "\u04a8\u0006\u0086\n\u0000\u04a8\u011c\u0001\u0000\u0000\u0000\u04a9\u04aa"+ - "\u0003?\u0018\u0000\u04aa\u04ab\u0001\u0000\u0000\u0000\u04ab\u04ac\u0006"+ - "\u0087\u0010\u0000\u04ac\u04ad\u0006\u0087\u000b\u0000\u04ad\u04ae\u0006"+ - "\u0087\u000b\u0000\u04ae\u011e\u0001\u0000\u0000\u0000\u04af\u04b0\u0003"+ - "a)\u0000\u04b0\u04b1\u0001\u0000\u0000\u0000\u04b1\u04b2\u0006\u0088\u0013"+ - "\u0000\u04b2\u0120\u0001\u0000\u0000\u0000\u04b3\u04b4\u0003e+\u0000\u04b4"+ - "\u04b5\u0001\u0000\u0000\u0000\u04b5\u04b6\u0006\u0089\u0012\u0000\u04b6"+ - "\u0122\u0001\u0000\u0000\u0000\u04b7\u04b8\u0003i-\u0000\u04b8\u04b9\u0001"+ - "\u0000\u0000\u0000\u04b9\u04ba\u0006\u008a\u0016\u0000\u04ba\u0124\u0001"+ - "\u0000\u0000\u0000\u04bb\u04bc\u0003\u010f\u0080\u0000\u04bc\u04bd\u0001"+ - "\u0000\u0000\u0000\u04bd\u04be\u0006\u008b\u001d\u0000\u04be\u0126\u0001"+ - "\u0000\u0000\u0000\u04bf\u04c0\u0003\u00ebn\u0000\u04c0\u04c1\u0001\u0000"+ - "\u0000\u0000\u04c1\u04c2\u0006\u008c\u0019\u0000\u04c2\u0128\u0001\u0000"+ - "\u0000\u0000\u04c3\u04c4\u0003\u00afP\u0000\u04c4\u04c5\u0001\u0000\u0000"+ - "\u0000\u04c5\u04c6\u0006\u008d\u001e\u0000\u04c6\u012a\u0001\u0000\u0000"+ - "\u0000\u04c7\u04c8\u0004\u008e\b\u0000\u04c8\u04c9\u0003\u00819\u0000"+ - "\u04c9\u04ca\u0001\u0000\u0000\u0000\u04ca\u04cb\u0006\u008e\u0017\u0000"+ - "\u04cb\u012c\u0001\u0000\u0000\u0000\u04cc\u04cd\u0004\u008f\t\u0000\u04cd"+ - "\u04ce\u0003\u00a5K\u0000\u04ce\u04cf\u0001\u0000\u0000\u0000\u04cf\u04d0"+ - "\u0006\u008f\u0018\u0000\u04d0\u012e\u0001\u0000\u0000\u0000\u04d1\u04d2"+ - "\u00037\u0014\u0000\u04d2\u04d3\u0001\u0000\u0000\u0000\u04d3\u04d4\u0006"+ - "\u0090\n\u0000\u04d4\u0130\u0001\u0000\u0000\u0000\u04d5\u04d6\u00039"+ - "\u0015\u0000\u04d6\u04d7\u0001\u0000\u0000\u0000\u04d7\u04d8\u0006\u0091"+ - "\n\u0000\u04d8\u0132\u0001\u0000\u0000\u0000\u04d9\u04da\u0003;\u0016"+ - "\u0000\u04da\u04db\u0001\u0000\u0000\u0000\u04db\u04dc\u0006\u0092\n\u0000"+ - "\u04dc\u0134\u0001\u0000\u0000\u0000\u04dd\u04de\u0003?\u0018\u0000\u04de"+ - "\u04df\u0001\u0000\u0000\u0000\u04df\u04e0\u0006\u0093\u0010\u0000\u04e0"+ - "\u04e1\u0006\u0093\u000b\u0000\u04e1\u0136\u0001\u0000\u0000\u0000\u04e2"+ - "\u04e3\u0003i-\u0000\u04e3\u04e4\u0001\u0000\u0000\u0000\u04e4\u04e5\u0006"+ - "\u0094\u0016\u0000\u04e5\u0138\u0001\u0000\u0000\u0000\u04e6\u04e7\u0004"+ - "\u0095\n\u0000\u04e7\u04e8\u0003\u00819\u0000\u04e8\u04e9\u0001\u0000"+ - "\u0000\u0000\u04e9\u04ea\u0006\u0095\u0017\u0000\u04ea\u013a\u0001\u0000"+ - "\u0000\u0000\u04eb\u04ec\u0004\u0096\u000b\u0000\u04ec\u04ed\u0003\u00a5"+ - "K\u0000\u04ed\u04ee\u0001\u0000\u0000\u0000\u04ee\u04ef\u0006\u0096\u0018"+ - "\u0000\u04ef\u013c\u0001\u0000\u0000\u0000\u04f0\u04f1\u0003\u00afP\u0000"+ - "\u04f1\u04f2\u0001\u0000\u0000\u0000\u04f2\u04f3\u0006\u0097\u001e\u0000"+ - "\u04f3\u013e\u0001\u0000\u0000\u0000\u04f4\u04f5\u0003\u00abN\u0000\u04f5"+ - "\u04f6\u0001\u0000\u0000\u0000\u04f6\u04f7\u0006\u0098\u001f\u0000\u04f7"+ - "\u0140\u0001\u0000\u0000\u0000\u04f8\u04f9\u00037\u0014\u0000\u04f9\u04fa"+ - "\u0001\u0000\u0000\u0000\u04fa\u04fb\u0006\u0099\n\u0000\u04fb\u0142\u0001"+ - "\u0000\u0000\u0000\u04fc\u04fd\u00039\u0015\u0000\u04fd\u04fe\u0001\u0000"+ - "\u0000\u0000\u04fe\u04ff\u0006\u009a\n\u0000\u04ff\u0144\u0001\u0000\u0000"+ - "\u0000\u0500\u0501\u0003;\u0016\u0000\u0501\u0502\u0001\u0000\u0000\u0000"+ - "\u0502\u0503\u0006\u009b\n\u0000\u0503\u0146\u0001\u0000\u0000\u0000\u0504"+ - "\u0505\u0003?\u0018\u0000\u0505\u0506\u0001\u0000\u0000\u0000\u0506\u0507"+ - "\u0006\u009c\u0010\u0000\u0507\u0508\u0006\u009c\u000b\u0000\u0508\u0148"+ - "\u0001\u0000\u0000\u0000\u0509\u050a\u0007\u0001\u0000\u0000\u050a\u050b"+ - "\u0007\t\u0000\u0000\u050b\u050c\u0007\u000f\u0000\u0000\u050c\u050d\u0007"+ - "\u0007\u0000\u0000\u050d\u014a\u0001\u0000\u0000\u0000\u050e\u050f\u0003"+ - "7\u0014\u0000\u050f\u0510\u0001\u0000\u0000\u0000\u0510\u0511\u0006\u009e"+ - "\n\u0000\u0511\u014c\u0001\u0000\u0000\u0000\u0512\u0513\u00039\u0015"+ - "\u0000\u0513\u0514\u0001\u0000\u0000\u0000\u0514\u0515\u0006\u009f\n\u0000"+ - "\u0515\u014e\u0001\u0000\u0000\u0000\u0516\u0517\u0003;\u0016\u0000\u0517"+ - "\u0518\u0001\u0000\u0000\u0000\u0518\u0519\u0006\u00a0\n\u0000\u0519\u0150"+ - "\u0001\u0000\u0000\u0000\u051a\u051b\u0003\u00a9M\u0000\u051b\u051c\u0001"+ - "\u0000\u0000\u0000\u051c\u051d\u0006\u00a1\u0011\u0000\u051d\u051e\u0006"+ - "\u00a1\u000b\u0000\u051e\u0152\u0001\u0000\u0000\u0000\u051f\u0520\u0003"+ - "=\u0017\u0000\u0520\u0521\u0001\u0000\u0000\u0000\u0521\u0522\u0006\u00a2"+ - "\f\u0000\u0522\u0154\u0001\u0000\u0000\u0000\u0523\u0529\u0003K\u001e"+ - "\u0000\u0524\u0529\u0003A\u0019\u0000\u0525\u0529\u0003i-\u0000\u0526"+ - "\u0529\u0003C\u001a\u0000\u0527\u0529\u0003Q!\u0000\u0528\u0523\u0001"+ - "\u0000\u0000\u0000\u0528\u0524\u0001\u0000\u0000\u0000\u0528\u0525\u0001"+ - "\u0000\u0000\u0000\u0528\u0526\u0001\u0000\u0000\u0000\u0528\u0527\u0001"+ - "\u0000\u0000\u0000\u0529\u052a\u0001\u0000\u0000\u0000\u052a\u0528\u0001"+ - "\u0000\u0000\u0000\u052a\u052b\u0001\u0000\u0000\u0000\u052b\u0156\u0001"+ - "\u0000\u0000\u0000\u052c\u052d\u00037\u0014\u0000\u052d\u052e\u0001\u0000"+ - "\u0000\u0000\u052e\u052f\u0006\u00a4\n\u0000\u052f\u0158\u0001\u0000\u0000"+ - "\u0000\u0530\u0531\u00039\u0015\u0000\u0531\u0532\u0001\u0000\u0000\u0000"+ - "\u0532\u0533\u0006\u00a5\n\u0000\u0533\u015a\u0001\u0000\u0000\u0000\u0534"+ - "\u0535\u0003;\u0016\u0000\u0535\u0536\u0001\u0000\u0000\u0000\u0536\u0537"+ - "\u0006\u00a6\n\u0000\u0537\u015c\u0001\u0000\u0000\u0000\u0538\u0539\u0003"+ - "?\u0018\u0000\u0539\u053a\u0001\u0000\u0000\u0000\u053a\u053b\u0006\u00a7"+ - "\u0010\u0000\u053b\u053c\u0006\u00a7\u000b\u0000\u053c\u015e\u0001\u0000"+ - "\u0000\u0000\u053d\u053e\u0003=\u0017\u0000\u053e\u053f\u0001\u0000\u0000"+ - "\u0000\u053f\u0540\u0006\u00a8\f\u0000\u0540\u0160\u0001\u0000\u0000\u0000"+ - "\u0541\u0542\u0003e+\u0000\u0542\u0543\u0001\u0000\u0000\u0000\u0543\u0544"+ - "\u0006\u00a9\u0012\u0000\u0544\u0162\u0001\u0000\u0000\u0000\u0545\u0546"+ - "\u0003i-\u0000\u0546\u0547\u0001\u0000\u0000\u0000\u0547\u0548\u0006\u00aa"+ - "\u0016\u0000\u0548\u0164\u0001\u0000\u0000\u0000\u0549\u054a\u0003\u010d"+ - "\u007f\u0000\u054a\u054b\u0001\u0000\u0000\u0000\u054b\u054c\u0006\u00ab"+ - " \u0000\u054c\u054d\u0006\u00ab!\u0000\u054d\u0166\u0001\u0000\u0000\u0000"+ - "\u054e\u054f\u0003\u00d1a\u0000\u054f\u0550\u0001\u0000\u0000\u0000\u0550"+ - "\u0551\u0006\u00ac\u0014\u0000\u0551\u0168\u0001\u0000\u0000\u0000\u0552"+ - "\u0553\u0003U#\u0000\u0553\u0554\u0001\u0000\u0000\u0000\u0554\u0555\u0006"+ - "\u00ad\u0015\u0000\u0555\u016a\u0001\u0000\u0000\u0000\u0556\u0557\u0003"+ - "7\u0014\u0000\u0557\u0558\u0001\u0000\u0000\u0000\u0558\u0559\u0006\u00ae"+ - "\n\u0000\u0559\u016c\u0001\u0000\u0000\u0000\u055a\u055b\u00039\u0015"+ - "\u0000\u055b\u055c\u0001\u0000\u0000\u0000\u055c\u055d\u0006\u00af\n\u0000"+ - "\u055d\u016e\u0001\u0000\u0000\u0000\u055e\u055f\u0003;\u0016\u0000\u055f"+ - "\u0560\u0001\u0000\u0000\u0000\u0560\u0561\u0006\u00b0\n\u0000\u0561\u0170"+ - "\u0001\u0000\u0000\u0000\u0562\u0563\u0003?\u0018\u0000\u0563\u0564\u0001"+ - "\u0000\u0000\u0000\u0564\u0565\u0006\u00b1\u0010\u0000\u0565\u0566\u0006"+ - "\u00b1\u000b\u0000\u0566\u0567\u0006\u00b1\u000b\u0000\u0567\u0172\u0001"+ - "\u0000\u0000\u0000\u0568\u0569\u0003e+\u0000\u0569\u056a\u0001\u0000\u0000"+ - "\u0000\u056a\u056b\u0006\u00b2\u0012\u0000\u056b\u0174\u0001\u0000\u0000"+ - "\u0000\u056c\u056d\u0003i-\u0000\u056d\u056e\u0001\u0000\u0000\u0000\u056e"+ - "\u056f\u0006\u00b3\u0016\u0000\u056f\u0176\u0001\u0000\u0000\u0000\u0570"+ - "\u0571\u0003\u00ebn\u0000\u0571\u0572\u0001\u0000\u0000\u0000\u0572\u0573"+ - "\u0006\u00b4\u0019\u0000\u0573\u0178\u0001\u0000\u0000\u0000\u0574\u0575"+ - "\u00037\u0014\u0000\u0575\u0576\u0001\u0000\u0000\u0000\u0576\u0577\u0006"+ - "\u00b5\n\u0000\u0577\u017a\u0001\u0000\u0000\u0000\u0578\u0579\u00039"+ - "\u0015\u0000\u0579\u057a\u0001\u0000\u0000\u0000\u057a\u057b\u0006\u00b6"+ - "\n\u0000\u057b\u017c\u0001\u0000\u0000\u0000\u057c\u057d\u0003;\u0016"+ - "\u0000\u057d\u057e\u0001\u0000\u0000\u0000\u057e\u057f\u0006\u00b7\n\u0000"+ - "\u057f\u017e\u0001\u0000\u0000\u0000\u0580\u0581\u0003?\u0018\u0000\u0581"+ - "\u0582\u0001\u0000\u0000\u0000\u0582\u0583\u0006\u00b8\u0010\u0000\u0583"+ - "\u0584\u0006\u00b8\u000b\u0000\u0584\u0180\u0001\u0000\u0000\u0000\u0585"+ - "\u0586\u0003\u00d1a\u0000\u0586\u0587\u0001\u0000\u0000\u0000\u0587\u0588"+ - "\u0006\u00b9\u0014\u0000\u0588\u0589\u0006\u00b9\u000b\u0000\u0589\u058a"+ - "\u0006\u00b9\"\u0000\u058a\u0182\u0001\u0000\u0000\u0000\u058b\u058c\u0003"+ - "U#\u0000\u058c\u058d\u0001\u0000\u0000\u0000\u058d\u058e\u0006\u00ba\u0015"+ - "\u0000\u058e\u058f\u0006\u00ba\u000b\u0000\u058f\u0590\u0006\u00ba\"\u0000"+ - "\u0590\u0184\u0001\u0000\u0000\u0000\u0591\u0592\u00037\u0014\u0000\u0592"+ - "\u0593\u0001\u0000\u0000\u0000\u0593\u0594\u0006\u00bb\n\u0000\u0594\u0186"+ - "\u0001\u0000\u0000\u0000\u0595\u0596\u00039\u0015\u0000\u0596\u0597\u0001"+ - "\u0000\u0000\u0000\u0597\u0598\u0006\u00bc\n\u0000\u0598\u0188\u0001\u0000"+ - "\u0000\u0000\u0599\u059a\u0003;\u0016\u0000\u059a\u059b\u0001\u0000\u0000"+ - "\u0000\u059b\u059c\u0006\u00bd\n\u0000\u059c\u018a\u0001\u0000\u0000\u0000"+ - "\u059d\u059e\u0003=\u0017\u0000\u059e\u059f\u0001\u0000\u0000\u0000\u059f"+ - "\u05a0\u0006\u00be\f\u0000\u05a0\u05a1\u0006\u00be\u000b\u0000\u05a1\u05a2"+ - "\u0006\u00be\t\u0000\u05a2\u018c\u0001\u0000\u0000\u0000\u05a3\u05a4\u0003"+ - "e+\u0000\u05a4\u05a5\u0001\u0000\u0000\u0000\u05a5\u05a6\u0006\u00bf\u0012"+ - "\u0000\u05a6\u05a7\u0006\u00bf\u000b\u0000\u05a7\u05a8\u0006\u00bf\t\u0000"+ - "\u05a8\u018e\u0001\u0000\u0000\u0000\u05a9\u05aa\u00037\u0014\u0000\u05aa"+ - "\u05ab\u0001\u0000\u0000\u0000\u05ab\u05ac\u0006\u00c0\n\u0000\u05ac\u0190"+ - "\u0001\u0000\u0000\u0000\u05ad\u05ae\u00039\u0015\u0000\u05ae\u05af\u0001"+ - "\u0000\u0000\u0000\u05af\u05b0\u0006\u00c1\n\u0000\u05b0\u0192\u0001\u0000"+ - "\u0000\u0000\u05b1\u05b2\u0003;\u0016\u0000\u05b2\u05b3\u0001\u0000\u0000"+ - "\u0000\u05b3\u05b4\u0006\u00c2\n\u0000\u05b4\u0194\u0001\u0000\u0000\u0000"+ - "\u05b5\u05b6\u0003\u00afP\u0000\u05b6\u05b7\u0001\u0000\u0000\u0000\u05b7"+ - "\u05b8\u0006\u00c3\u000b\u0000\u05b8\u05b9\u0006\u00c3\u0000\u0000\u05b9"+ - "\u05ba\u0006\u00c3\u001e\u0000\u05ba\u0196\u0001\u0000\u0000\u0000\u05bb"+ - "\u05bc\u0003\u00abN\u0000\u05bc\u05bd\u0001\u0000\u0000\u0000\u05bd\u05be"+ - "\u0006\u00c4\u000b\u0000\u05be\u05bf\u0006\u00c4\u0000\u0000\u05bf\u05c0"+ - "\u0006\u00c4\u001f\u0000\u05c0\u0198\u0001\u0000\u0000\u0000\u05c1\u05c2"+ - "\u0003[&\u0000\u05c2\u05c3\u0001\u0000\u0000\u0000\u05c3\u05c4\u0006\u00c5"+ - "\u000b\u0000\u05c4\u05c5\u0006\u00c5\u0000\u0000\u05c5\u05c6\u0006\u00c5"+ - "#\u0000\u05c6\u019a\u0001\u0000\u0000\u0000\u05c7\u05c8\u0003?\u0018\u0000"+ - "\u05c8\u05c9\u0001\u0000\u0000\u0000\u05c9\u05ca\u0006\u00c6\u0010\u0000"+ - "\u05ca\u05cb\u0006\u00c6\u000b\u0000\u05cb\u019c\u0001\u0000\u0000\u0000"+ - "A\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e"+ - "\u0245\u024f\u0253\u0256\u025f\u0261\u026c\u0281\u0286\u028f\u0296\u029b"+ - "\u029d\u02a8\u02b0\u02b3\u02b5\u02ba\u02bf\u02c5\u02cc\u02d1\u02d7\u02da"+ - "\u02e2\u02e6\u0369\u036e\u0375\u0377\u0387\u038c\u0391\u0393\u0399\u03e6"+ - "\u03eb\u041c\u0420\u0425\u042a\u042f\u0431\u0435\u0437\u048e\u0492\u0497"+ - "\u0528\u052a$\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005"+ - "\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t"+ - "\u0000\u0005\u000b\u0000\u0005\r\u0000\u0000\u0001\u0000\u0004\u0000\u0000"+ - "\u0007\u0018\u0000\u0007\u0010\u0000\u0007A\u0000\u0005\u0000\u0000\u0007"+ - "\u0019\u0000\u0007B\u0000\u0007\"\u0000\u0007 \u0000\u0007L\u0000\u0007"+ - "\u001a\u0000\u0007$\u0000\u00070\u0000\u0007@\u0000\u0007P\u0000\u0005"+ - "\n\u0000\u0005\u0007\u0000\u0007Z\u0000\u0007Y\u0000\u0007D\u0000\u0007"+ - "C\u0000\u0007X\u0000\u0005\f\u0000\u0005\u000e\u0000\u0007\u001d\u0000"; + "\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001"+ + "\u00ba\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001"+ + "\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd\u0001"+ + "\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001\u00be\u0001"+ + "\u00be\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00c0\u0001"+ + "\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001"+ + "\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001"+ + "\u00c1\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001"+ + "\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4\u0001"+ + "\u00c4\u0001\u00c4\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001"+ + "\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c7\u0001\u00c7\u0001"+ + "\u00c7\u0001\u00c7\u0001\u00c7\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001"+ + "\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c9\u0001\u00c9\u0001\u00c9\u0001"+ + "\u00c9\u0001\u00c9\u0001\u00c9\u0001\u00ca\u0001\u00ca\u0001\u00ca\u0001"+ + "\u00ca\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cc\u0001"+ + "\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001"+ + "\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00ce\u0001\u00ce\u0001\u00ce\u0001"+ + "\u00ce\u0001\u00ce\u0001\u00ce\u0001\u00cf\u0001\u00cf\u0001\u00cf\u0001"+ + "\u00cf\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d1\u0001"+ + "\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001"+ + "\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001"+ + "\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d4\u0001\u00d4\u0001\u00d4\u0001"+ + "\u00d4\u0001\u00d4\u0001\u00d4\u0001\u00d5\u0001\u00d5\u0001\u00d5\u0001"+ + "\u00d5\u0001\u00d5\u0002\u02ad\u02f4\u0000\u00d6\u0010\u0001\u0012\u0002"+ + "\u0014\u0003\u0016\u0004\u0018\u0005\u001a\u0006\u001c\u0007\u001e\b "+ + "\t\"\n$\u000b&\f(\r*\u000e,\u000f.\u00100\u00112\u00124\u00136\u00148"+ + "\u0015:\u0016<\u0017>\u0018@\u0019B\u001aD\u001bF\u001cH\u001dJ\u001e"+ + "L\u0000N\u0000P\u0000R\u0000T\u0000V\u0000X\u0000Z\u0000\\\u0000^\u0000"+ + "`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+z,|-~.\u0080/\u00820\u00841\u00862\u0088"+ + "3\u008a4\u008c5\u008e6\u00907\u00928\u00949\u0096:\u0098;\u009a<\u009c"+ + "=\u009e>\u00a0?\u00a2@\u00a4A\u00a6B\u00a8C\u00aaD\u00ac\u0000\u00ae\u0000"+ + "\u00b0E\u00b2F\u00b4G\u00b6H\u00b8\u0000\u00baI\u00bcJ\u00beK\u00c0L\u00c2"+ + "\u0000\u00c4\u0000\u00c6M\u00c8N\u00caO\u00cc\u0000\u00ce\u0000\u00d0"+ + "\u0000\u00d2\u0000\u00d4\u0000\u00d6\u0000\u00d8P\u00da\u0000\u00dcQ\u00de"+ + "\u0000\u00e0\u0000\u00e2R\u00e4S\u00e6T\u00e8\u0000\u00ea\u0000\u00ec"+ + "\u0000\u00ee\u0000\u00f0\u0000\u00f2\u0000\u00f4\u0000\u00f6U\u00f8V\u00fa"+ + "W\u00fcX\u00fe\u0000\u0100\u0000\u0102\u0000\u0104\u0000\u0106\u0000\u0108"+ + "\u0000\u010aY\u010c\u0000\u010eZ\u0110[\u0112\\\u0114\u0000\u0116\u0000"+ + "\u0118]\u011a^\u011c\u0000\u011e_\u0120\u0000\u0122`\u0124a\u0126b\u0128"+ + "\u0000\u012a\u0000\u012c\u0000\u012e\u0000\u0130\u0000\u0132\u0000\u0134"+ + "\u0000\u0136\u0000\u0138\u0000\u013ac\u013cd\u013ee\u0140\u0000\u0142"+ + "\u0000\u0144\u0000\u0146\u0000\u0148\u0000\u014a\u0000\u014cf\u014eg\u0150"+ + "h\u0152\u0000\u0154i\u0156j\u0158k\u015al\u015c\u0000\u015e\u0000\u0160"+ + "m\u0162n\u0164o\u0166p\u0168\u0000\u016a\u0000\u016c\u0000\u016e\u0000"+ + "\u0170\u0000\u0172\u0000\u0174\u0000\u0176q\u0178r\u017as\u017c\u0000"+ + "\u017e\u0000\u0180\u0000\u0182\u0000\u0184t\u0186u\u0188v\u018a\u0000"+ + "\u018c\u0000\u018e\u0000\u0190\u0000\u0192w\u0194\u0000\u0196\u0000\u0198"+ + "x\u019ay\u019cz\u019e\u0000\u01a0\u0000\u01a2\u0000\u01a4{\u01a6|\u01a8"+ + "}\u01aa\u0000\u01ac\u0000\u01ae~\u01b0\u007f\u01b2\u0080\u01b4\u0000\u01b6"+ + "\u0000\u01b8\u0000\u01ba\u0000\u0010\u0000\u0001\u0002\u0003\u0004\u0005"+ + "\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f$\u0002\u0000DDdd\u0002\u0000"+ + "IIii\u0002\u0000SSss\u0002\u0000EEee\u0002\u0000CCcc\u0002\u0000TTtt\u0002"+ + "\u0000RRrr\u0002\u0000OOoo\u0002\u0000PPpp\u0002\u0000NNnn\u0002\u0000"+ + "HHhh\u0002\u0000VVvv\u0002\u0000AAaa\u0002\u0000LLll\u0002\u0000XXxx\u0002"+ + "\u0000FFff\u0002\u0000MMmm\u0002\u0000GGgg\u0002\u0000KKkk\u0002\u0000"+ + "WWww\u0002\u0000UUuu\u0002\u0000JJjj\u0006\u0000\t\n\r\r //[[]]\u0002"+ + "\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002\u0000AZaz\b\u0000"+ + "\"\"NNRRTT\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001"+ + "\u0000``\u0002\u0000BBbb\u0002\u0000YYyy\u000b\u0000\t\n\r\r \"\",,/"+ + "/::==[[]]||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u0663"+ + "\u0000\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001\u0000\u0000\u0000"+ + "\u0000\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001\u0000\u0000\u0000"+ + "\u0000\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001\u0000\u0000\u0000"+ + "\u0000\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001\u0000\u0000\u0000"+ + "\u0000 \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000\u0000\u0000\u0000"+ + "$\u0001\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000\u0000(\u0001"+ + "\u0000\u0000\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,\u0001\u0000\u0000"+ + "\u0000\u0000.\u0001\u0000\u0000\u0000\u00000\u0001\u0000\u0000\u0000\u0000"+ + "2\u0001\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000\u00006\u0001"+ + "\u0000\u0000\u0000\u00008\u0001\u0000\u0000\u0000\u0000:\u0001\u0000\u0000"+ + "\u0000\u0000<\u0001\u0000\u0000\u0000\u0000>\u0001\u0000\u0000\u0000\u0000"+ + "@\u0001\u0000\u0000\u0000\u0000B\u0001\u0000\u0000\u0000\u0000D\u0001"+ + "\u0000\u0000\u0000\u0000F\u0001\u0000\u0000\u0000\u0000H\u0001\u0000\u0000"+ + "\u0000\u0001J\u0001\u0000\u0000\u0000\u0001`\u0001\u0000\u0000\u0000\u0001"+ + "b\u0001\u0000\u0000\u0000\u0001d\u0001\u0000\u0000\u0000\u0001f\u0001"+ + "\u0000\u0000\u0000\u0001h\u0001\u0000\u0000\u0000\u0001j\u0001\u0000\u0000"+ + "\u0000\u0001l\u0001\u0000\u0000\u0000\u0001n\u0001\u0000\u0000\u0000\u0001"+ + "p\u0001\u0000\u0000\u0000\u0001r\u0001\u0000\u0000\u0000\u0001t\u0001"+ + "\u0000\u0000\u0000\u0001v\u0001\u0000\u0000\u0000\u0001x\u0001\u0000\u0000"+ + "\u0000\u0001z\u0001\u0000\u0000\u0000\u0001|\u0001\u0000\u0000\u0000\u0001"+ + "~\u0001\u0000\u0000\u0000\u0001\u0080\u0001\u0000\u0000\u0000\u0001\u0082"+ + "\u0001\u0000\u0000\u0000\u0001\u0084\u0001\u0000\u0000\u0000\u0001\u0086"+ + "\u0001\u0000\u0000\u0000\u0001\u0088\u0001\u0000\u0000\u0000\u0001\u008a"+ + "\u0001\u0000\u0000\u0000\u0001\u008c\u0001\u0000\u0000\u0000\u0001\u008e"+ + "\u0001\u0000\u0000\u0000\u0001\u0090\u0001\u0000\u0000\u0000\u0001\u0092"+ + "\u0001\u0000\u0000\u0000\u0001\u0094\u0001\u0000\u0000\u0000\u0001\u0096"+ + "\u0001\u0000\u0000\u0000\u0001\u0098\u0001\u0000\u0000\u0000\u0001\u009a"+ + "\u0001\u0000\u0000\u0000\u0001\u009c\u0001\u0000\u0000\u0000\u0001\u009e"+ + "\u0001\u0000\u0000\u0000\u0001\u00a0\u0001\u0000\u0000\u0000\u0001\u00a2"+ + "\u0001\u0000\u0000\u0000\u0001\u00a4\u0001\u0000\u0000\u0000\u0001\u00a6"+ + "\u0001\u0000\u0000\u0000\u0001\u00a8\u0001\u0000\u0000\u0000\u0001\u00aa"+ + "\u0001\u0000\u0000\u0000\u0001\u00ac\u0001\u0000\u0000\u0000\u0001\u00ae"+ + "\u0001\u0000\u0000\u0000\u0001\u00b0\u0001\u0000\u0000\u0000\u0001\u00b2"+ + "\u0001\u0000\u0000\u0000\u0001\u00b4\u0001\u0000\u0000\u0000\u0001\u00b6"+ + "\u0001\u0000\u0000\u0000\u0001\u00ba\u0001\u0000\u0000\u0000\u0001\u00bc"+ + "\u0001\u0000\u0000\u0000\u0001\u00be\u0001\u0000\u0000\u0000\u0001\u00c0"+ + "\u0001\u0000\u0000\u0000\u0002\u00c2\u0001\u0000\u0000\u0000\u0002\u00c4"+ + "\u0001\u0000\u0000\u0000\u0002\u00c6\u0001\u0000\u0000\u0000\u0002\u00c8"+ + "\u0001\u0000\u0000\u0000\u0002\u00ca\u0001\u0000\u0000\u0000\u0003\u00cc"+ + "\u0001\u0000\u0000\u0000\u0003\u00ce\u0001\u0000\u0000\u0000\u0003\u00d0"+ + "\u0001\u0000\u0000\u0000\u0003\u00d2\u0001\u0000\u0000\u0000\u0003\u00d4"+ + "\u0001\u0000\u0000\u0000\u0003\u00d6\u0001\u0000\u0000\u0000\u0003\u00d8"+ + "\u0001\u0000\u0000\u0000\u0003\u00dc\u0001\u0000\u0000\u0000\u0003\u00de"+ + "\u0001\u0000\u0000\u0000\u0003\u00e0\u0001\u0000\u0000\u0000\u0003\u00e2"+ + "\u0001\u0000\u0000\u0000\u0003\u00e4\u0001\u0000\u0000\u0000\u0003\u00e6"+ + "\u0001\u0000\u0000\u0000\u0004\u00e8\u0001\u0000\u0000\u0000\u0004\u00ea"+ + "\u0001\u0000\u0000\u0000\u0004\u00ec\u0001\u0000\u0000\u0000\u0004\u00ee"+ + "\u0001\u0000\u0000\u0000\u0004\u00f0\u0001\u0000\u0000\u0000\u0004\u00f6"+ + "\u0001\u0000\u0000\u0000\u0004\u00f8\u0001\u0000\u0000\u0000\u0004\u00fa"+ + "\u0001\u0000\u0000\u0000\u0004\u00fc\u0001\u0000\u0000\u0000\u0005\u00fe"+ + "\u0001\u0000\u0000\u0000\u0005\u0100\u0001\u0000\u0000\u0000\u0005\u0102"+ + "\u0001\u0000\u0000\u0000\u0005\u0104\u0001\u0000\u0000\u0000\u0005\u0106"+ + "\u0001\u0000\u0000\u0000\u0005\u0108\u0001\u0000\u0000\u0000\u0005\u010a"+ + "\u0001\u0000\u0000\u0000\u0005\u010c\u0001\u0000\u0000\u0000\u0005\u010e"+ + "\u0001\u0000\u0000\u0000\u0005\u0110\u0001\u0000\u0000\u0000\u0005\u0112"+ + "\u0001\u0000\u0000\u0000\u0006\u0114\u0001\u0000\u0000\u0000\u0006\u0116"+ + "\u0001\u0000\u0000\u0000\u0006\u0118\u0001\u0000\u0000\u0000\u0006\u011a"+ + "\u0001\u0000\u0000\u0000\u0006\u011e\u0001\u0000\u0000\u0000\u0006\u0120"+ + "\u0001\u0000\u0000\u0000\u0006\u0122\u0001\u0000\u0000\u0000\u0006\u0124"+ + "\u0001\u0000\u0000\u0000\u0006\u0126\u0001\u0000\u0000\u0000\u0007\u0128"+ + "\u0001\u0000\u0000\u0000\u0007\u012a\u0001\u0000\u0000\u0000\u0007\u012c"+ + "\u0001\u0000\u0000\u0000\u0007\u012e\u0001\u0000\u0000\u0000\u0007\u0130"+ + "\u0001\u0000\u0000\u0000\u0007\u0132\u0001\u0000\u0000\u0000\u0007\u0134"+ + "\u0001\u0000\u0000\u0000\u0007\u0136\u0001\u0000\u0000\u0000\u0007\u0138"+ + "\u0001\u0000\u0000\u0000\u0007\u013a\u0001\u0000\u0000\u0000\u0007\u013c"+ + "\u0001\u0000\u0000\u0000\u0007\u013e\u0001\u0000\u0000\u0000\b\u0140\u0001"+ + "\u0000\u0000\u0000\b\u0142\u0001\u0000\u0000\u0000\b\u0144\u0001\u0000"+ + "\u0000\u0000\b\u0146\u0001\u0000\u0000\u0000\b\u0148\u0001\u0000\u0000"+ + "\u0000\b\u014a\u0001\u0000\u0000\u0000\b\u014c\u0001\u0000\u0000\u0000"+ + "\b\u014e\u0001\u0000\u0000\u0000\b\u0150\u0001\u0000\u0000\u0000\t\u0152"+ + "\u0001\u0000\u0000\u0000\t\u0154\u0001\u0000\u0000\u0000\t\u0156\u0001"+ + "\u0000\u0000\u0000\t\u0158\u0001\u0000\u0000\u0000\t\u015a\u0001\u0000"+ + "\u0000\u0000\n\u015c\u0001\u0000\u0000\u0000\n\u015e\u0001\u0000\u0000"+ + "\u0000\n\u0160\u0001\u0000\u0000\u0000\n\u0162\u0001\u0000\u0000\u0000"+ + "\n\u0164\u0001\u0000\u0000\u0000\n\u0166\u0001\u0000\u0000\u0000\u000b"+ + "\u0168\u0001\u0000\u0000\u0000\u000b\u016a\u0001\u0000\u0000\u0000\u000b"+ + "\u016c\u0001\u0000\u0000\u0000\u000b\u016e\u0001\u0000\u0000\u0000\u000b"+ + "\u0170\u0001\u0000\u0000\u0000\u000b\u0172\u0001\u0000\u0000\u0000\u000b"+ + "\u0174\u0001\u0000\u0000\u0000\u000b\u0176\u0001\u0000\u0000\u0000\u000b"+ + "\u0178\u0001\u0000\u0000\u0000\u000b\u017a\u0001\u0000\u0000\u0000\f\u017c"+ + "\u0001\u0000\u0000\u0000\f\u017e\u0001\u0000\u0000\u0000\f\u0180\u0001"+ + "\u0000\u0000\u0000\f\u0182\u0001\u0000\u0000\u0000\f\u0184\u0001\u0000"+ + "\u0000\u0000\f\u0186\u0001\u0000\u0000\u0000\f\u0188\u0001\u0000\u0000"+ + "\u0000\r\u018a\u0001\u0000\u0000\u0000\r\u018c\u0001\u0000\u0000\u0000"+ + "\r\u018e\u0001\u0000\u0000\u0000\r\u0190\u0001\u0000\u0000\u0000\r\u0192"+ + "\u0001\u0000\u0000\u0000\r\u0194\u0001\u0000\u0000\u0000\r\u0196\u0001"+ + "\u0000\u0000\u0000\r\u0198\u0001\u0000\u0000\u0000\r\u019a\u0001\u0000"+ + "\u0000\u0000\r\u019c\u0001\u0000\u0000\u0000\u000e\u019e\u0001\u0000\u0000"+ + "\u0000\u000e\u01a0\u0001\u0000\u0000\u0000\u000e\u01a2\u0001\u0000\u0000"+ + "\u0000\u000e\u01a4\u0001\u0000\u0000\u0000\u000e\u01a6\u0001\u0000\u0000"+ + "\u0000\u000e\u01a8\u0001\u0000\u0000\u0000\u000f\u01aa\u0001\u0000\u0000"+ + "\u0000\u000f\u01ac\u0001\u0000\u0000\u0000\u000f\u01ae\u0001\u0000\u0000"+ + "\u0000\u000f\u01b0\u0001\u0000\u0000\u0000\u000f\u01b2\u0001\u0000\u0000"+ + "\u0000\u000f\u01b4\u0001\u0000\u0000\u0000\u000f\u01b6\u0001\u0000\u0000"+ + "\u0000\u000f\u01b8\u0001\u0000\u0000\u0000\u000f\u01ba\u0001\u0000\u0000"+ + "\u0000\u0010\u01bc\u0001\u0000\u0000\u0000\u0012\u01c6\u0001\u0000\u0000"+ + "\u0000\u0014\u01cd\u0001\u0000\u0000\u0000\u0016\u01d6\u0001\u0000\u0000"+ + "\u0000\u0018\u01dd\u0001\u0000\u0000\u0000\u001a\u01e7\u0001\u0000\u0000"+ + "\u0000\u001c\u01ee\u0001\u0000\u0000\u0000\u001e\u01f5\u0001\u0000\u0000"+ + "\u0000 \u01fc\u0001\u0000\u0000\u0000\"\u0204\u0001\u0000\u0000\u0000"+ + "$\u0210\u0001\u0000\u0000\u0000&\u0219\u0001\u0000\u0000\u0000(\u021f"+ + "\u0001\u0000\u0000\u0000*\u0226\u0001\u0000\u0000\u0000,\u022d\u0001\u0000"+ + "\u0000\u0000.\u0235\u0001\u0000\u0000\u00000\u023d\u0001\u0000\u0000\u0000"+ + "2\u024c\u0001\u0000\u0000\u00004\u0258\u0001\u0000\u0000\u00006\u0263"+ + "\u0001\u0000\u0000\u00008\u026b\u0001\u0000\u0000\u0000:\u0273\u0001\u0000"+ + "\u0000\u0000<\u027b\u0001\u0000\u0000\u0000>\u0284\u0001\u0000\u0000\u0000"+ + "@\u028f\u0001\u0000\u0000\u0000B\u0295\u0001\u0000\u0000\u0000D\u02a6"+ + "\u0001\u0000\u0000\u0000F\u02b6\u0001\u0000\u0000\u0000H\u02bc\u0001\u0000"+ + "\u0000\u0000J\u02be\u0001\u0000\u0000\u0000L\u02c2\u0001\u0000\u0000\u0000"+ + "N\u02c4\u0001\u0000\u0000\u0000P\u02c6\u0001\u0000\u0000\u0000R\u02c9"+ + "\u0001\u0000\u0000\u0000T\u02cb\u0001\u0000\u0000\u0000V\u02d4\u0001\u0000"+ + "\u0000\u0000X\u02d6\u0001\u0000\u0000\u0000Z\u02db\u0001\u0000\u0000\u0000"+ + "\\\u02dd\u0001\u0000\u0000\u0000^\u02e2\u0001\u0000\u0000\u0000`\u0301"+ + "\u0001\u0000\u0000\u0000b\u0304\u0001\u0000\u0000\u0000d\u0332\u0001\u0000"+ + "\u0000\u0000f\u0334\u0001\u0000\u0000\u0000h\u0337\u0001\u0000\u0000\u0000"+ + "j\u033b\u0001\u0000\u0000\u0000l\u033f\u0001\u0000\u0000\u0000n\u0341"+ + "\u0001\u0000\u0000\u0000p\u0344\u0001\u0000\u0000\u0000r\u0346\u0001\u0000"+ + "\u0000\u0000t\u034b\u0001\u0000\u0000\u0000v\u034d\u0001\u0000\u0000\u0000"+ + "x\u0353\u0001\u0000\u0000\u0000z\u0359\u0001\u0000\u0000\u0000|\u035c"+ + "\u0001\u0000\u0000\u0000~\u035f\u0001\u0000\u0000\u0000\u0080\u0364\u0001"+ + "\u0000\u0000\u0000\u0082\u0369\u0001\u0000\u0000\u0000\u0084\u036b\u0001"+ + "\u0000\u0000\u0000\u0086\u036f\u0001\u0000\u0000\u0000\u0088\u0374\u0001"+ + "\u0000\u0000\u0000\u008a\u037a\u0001\u0000\u0000\u0000\u008c\u037d\u0001"+ + "\u0000\u0000\u0000\u008e\u037f\u0001\u0000\u0000\u0000\u0090\u0385\u0001"+ + "\u0000\u0000\u0000\u0092\u0387\u0001\u0000\u0000\u0000\u0094\u038c\u0001"+ + "\u0000\u0000\u0000\u0096\u038f\u0001\u0000\u0000\u0000\u0098\u0392\u0001"+ + "\u0000\u0000\u0000\u009a\u0395\u0001\u0000\u0000\u0000\u009c\u0397\u0001"+ + "\u0000\u0000\u0000\u009e\u039a\u0001\u0000\u0000\u0000\u00a0\u039c\u0001"+ + "\u0000\u0000\u0000\u00a2\u039f\u0001\u0000\u0000\u0000\u00a4\u03a1\u0001"+ + "\u0000\u0000\u0000\u00a6\u03a3\u0001\u0000\u0000\u0000\u00a8\u03a5\u0001"+ + "\u0000\u0000\u0000\u00aa\u03a7\u0001\u0000\u0000\u0000\u00ac\u03a9\u0001"+ + "\u0000\u0000\u0000\u00ae\u03ae\u0001\u0000\u0000\u0000\u00b0\u03c3\u0001"+ + "\u0000\u0000\u0000\u00b2\u03c5\u0001\u0000\u0000\u0000\u00b4\u03ca\u0001"+ + "\u0000\u0000\u0000\u00b6\u03df\u0001\u0000\u0000\u0000\u00b8\u03e1\u0001"+ + "\u0000\u0000\u0000\u00ba\u03e9\u0001\u0000\u0000\u0000\u00bc\u03eb\u0001"+ + "\u0000\u0000\u0000\u00be\u03ef\u0001\u0000\u0000\u0000\u00c0\u03f3\u0001"+ + "\u0000\u0000\u0000\u00c2\u03f7\u0001\u0000\u0000\u0000\u00c4\u03fc\u0001"+ + "\u0000\u0000\u0000\u00c6\u0401\u0001\u0000\u0000\u0000\u00c8\u0405\u0001"+ + "\u0000\u0000\u0000\u00ca\u0409\u0001\u0000\u0000\u0000\u00cc\u040d\u0001"+ + "\u0000\u0000\u0000\u00ce\u0412\u0001\u0000\u0000\u0000\u00d0\u0416\u0001"+ + "\u0000\u0000\u0000\u00d2\u041a\u0001\u0000\u0000\u0000\u00d4\u041e\u0001"+ + "\u0000\u0000\u0000\u00d6\u0422\u0001\u0000\u0000\u0000\u00d8\u0426\u0001"+ + "\u0000\u0000\u0000\u00da\u0432\u0001\u0000\u0000\u0000\u00dc\u0435\u0001"+ + "\u0000\u0000\u0000\u00de\u0439\u0001\u0000\u0000\u0000\u00e0\u043d\u0001"+ + "\u0000\u0000\u0000\u00e2\u0441\u0001\u0000\u0000\u0000\u00e4\u0445\u0001"+ + "\u0000\u0000\u0000\u00e6\u0449\u0001\u0000\u0000\u0000\u00e8\u044d\u0001"+ + "\u0000\u0000\u0000\u00ea\u0452\u0001\u0000\u0000\u0000\u00ec\u0456\u0001"+ + "\u0000\u0000\u0000\u00ee\u045a\u0001\u0000\u0000\u0000\u00f0\u045f\u0001"+ + "\u0000\u0000\u0000\u00f2\u0468\u0001\u0000\u0000\u0000\u00f4\u047d\u0001"+ + "\u0000\u0000\u0000\u00f6\u0481\u0001\u0000\u0000\u0000\u00f8\u0485\u0001"+ + "\u0000\u0000\u0000\u00fa\u0489\u0001\u0000\u0000\u0000\u00fc\u048d\u0001"+ + "\u0000\u0000\u0000\u00fe\u0491\u0001\u0000\u0000\u0000\u0100\u0496\u0001"+ + "\u0000\u0000\u0000\u0102\u049a\u0001\u0000\u0000\u0000\u0104\u049e\u0001"+ + "\u0000\u0000\u0000\u0106\u04a2\u0001\u0000\u0000\u0000\u0108\u04a7\u0001"+ + "\u0000\u0000\u0000\u010a\u04ac\u0001\u0000\u0000\u0000\u010c\u04af\u0001"+ + "\u0000\u0000\u0000\u010e\u04b3\u0001\u0000\u0000\u0000\u0110\u04b7\u0001"+ + "\u0000\u0000\u0000\u0112\u04bb\u0001\u0000\u0000\u0000\u0114\u04bf\u0001"+ + "\u0000\u0000\u0000\u0116\u04c4\u0001\u0000\u0000\u0000\u0118\u04c9\u0001"+ + "\u0000\u0000\u0000\u011a\u04ce\u0001\u0000\u0000\u0000\u011c\u04d5\u0001"+ + "\u0000\u0000\u0000\u011e\u04de\u0001\u0000\u0000\u0000\u0120\u04e5\u0001"+ + "\u0000\u0000\u0000\u0122\u04e9\u0001\u0000\u0000\u0000\u0124\u04ed\u0001"+ + "\u0000\u0000\u0000\u0126\u04f1\u0001\u0000\u0000\u0000\u0128\u04f5\u0001"+ + "\u0000\u0000\u0000\u012a\u04fb\u0001\u0000\u0000\u0000\u012c\u04ff\u0001"+ + "\u0000\u0000\u0000\u012e\u0503\u0001\u0000\u0000\u0000\u0130\u0507\u0001"+ + "\u0000\u0000\u0000\u0132\u050b\u0001\u0000\u0000\u0000\u0134\u050f\u0001"+ + "\u0000\u0000\u0000\u0136\u0513\u0001\u0000\u0000\u0000\u0138\u0518\u0001"+ + "\u0000\u0000\u0000\u013a\u051d\u0001\u0000\u0000\u0000\u013c\u0521\u0001"+ + "\u0000\u0000\u0000\u013e\u0525\u0001\u0000\u0000\u0000\u0140\u0529\u0001"+ + "\u0000\u0000\u0000\u0142\u052e\u0001\u0000\u0000\u0000\u0144\u0532\u0001"+ + "\u0000\u0000\u0000\u0146\u0537\u0001\u0000\u0000\u0000\u0148\u053c\u0001"+ + "\u0000\u0000\u0000\u014a\u0540\u0001\u0000\u0000\u0000\u014c\u0544\u0001"+ + "\u0000\u0000\u0000\u014e\u0548\u0001\u0000\u0000\u0000\u0150\u054c\u0001"+ + "\u0000\u0000\u0000\u0152\u0550\u0001\u0000\u0000\u0000\u0154\u0555\u0001"+ + "\u0000\u0000\u0000\u0156\u055a\u0001\u0000\u0000\u0000\u0158\u055e\u0001"+ + "\u0000\u0000\u0000\u015a\u0562\u0001\u0000\u0000\u0000\u015c\u0566\u0001"+ + "\u0000\u0000\u0000\u015e\u056b\u0001\u0000\u0000\u0000\u0160\u0574\u0001"+ + "\u0000\u0000\u0000\u0162\u0578\u0001\u0000\u0000\u0000\u0164\u057c\u0001"+ + "\u0000\u0000\u0000\u0166\u0580\u0001\u0000\u0000\u0000\u0168\u0584\u0001"+ + "\u0000\u0000\u0000\u016a\u0589\u0001\u0000\u0000\u0000\u016c\u058d\u0001"+ + "\u0000\u0000\u0000\u016e\u0591\u0001\u0000\u0000\u0000\u0170\u0595\u0001"+ + "\u0000\u0000\u0000\u0172\u059a\u0001\u0000\u0000\u0000\u0174\u059e\u0001"+ + "\u0000\u0000\u0000\u0176\u05a2\u0001\u0000\u0000\u0000\u0178\u05a6\u0001"+ + "\u0000\u0000\u0000\u017a\u05aa\u0001\u0000\u0000\u0000\u017c\u05ae\u0001"+ + "\u0000\u0000\u0000\u017e\u05b4\u0001\u0000\u0000\u0000\u0180\u05b8\u0001"+ + "\u0000\u0000\u0000\u0182\u05bc\u0001\u0000\u0000\u0000\u0184\u05c0\u0001"+ + "\u0000\u0000\u0000\u0186\u05c4\u0001\u0000\u0000\u0000\u0188\u05c8\u0001"+ + "\u0000\u0000\u0000\u018a\u05cc\u0001\u0000\u0000\u0000\u018c\u05d1\u0001"+ + "\u0000\u0000\u0000\u018e\u05d5\u0001\u0000\u0000\u0000\u0190\u05d9\u0001"+ + "\u0000\u0000\u0000\u0192\u05df\u0001\u0000\u0000\u0000\u0194\u05e8\u0001"+ + "\u0000\u0000\u0000\u0196\u05ec\u0001\u0000\u0000\u0000\u0198\u05f0\u0001"+ + "\u0000\u0000\u0000\u019a\u05f4\u0001\u0000\u0000\u0000\u019c\u05f8\u0001"+ + "\u0000\u0000\u0000\u019e\u05fc\u0001\u0000\u0000\u0000\u01a0\u0601\u0001"+ + "\u0000\u0000\u0000\u01a2\u0607\u0001\u0000\u0000\u0000\u01a4\u060d\u0001"+ + "\u0000\u0000\u0000\u01a6\u0611\u0001\u0000\u0000\u0000\u01a8\u0615\u0001"+ + "\u0000\u0000\u0000\u01aa\u0619\u0001\u0000\u0000\u0000\u01ac\u061f\u0001"+ + "\u0000\u0000\u0000\u01ae\u0625\u0001\u0000\u0000\u0000\u01b0\u0629\u0001"+ + "\u0000\u0000\u0000\u01b2\u062d\u0001\u0000\u0000\u0000\u01b4\u0631\u0001"+ + "\u0000\u0000\u0000\u01b6\u0637\u0001\u0000\u0000\u0000\u01b8\u063d\u0001"+ + "\u0000\u0000\u0000\u01ba\u0643\u0001\u0000\u0000\u0000\u01bc\u01bd\u0007"+ + "\u0000\u0000\u0000\u01bd\u01be\u0007\u0001\u0000\u0000\u01be\u01bf\u0007"+ + "\u0002\u0000\u0000\u01bf\u01c0\u0007\u0002\u0000\u0000\u01c0\u01c1\u0007"+ + "\u0003\u0000\u0000\u01c1\u01c2\u0007\u0004\u0000\u0000\u01c2\u01c3\u0007"+ + "\u0005\u0000\u0000\u01c3\u01c4\u0001\u0000\u0000\u0000\u01c4\u01c5\u0006"+ + "\u0000\u0000\u0000\u01c5\u0011\u0001\u0000\u0000\u0000\u01c6\u01c7\u0007"+ + "\u0000\u0000\u0000\u01c7\u01c8\u0007\u0006\u0000\u0000\u01c8\u01c9\u0007"+ + "\u0007\u0000\u0000\u01c9\u01ca\u0007\b\u0000\u0000\u01ca\u01cb\u0001\u0000"+ + "\u0000\u0000\u01cb\u01cc\u0006\u0001\u0001\u0000\u01cc\u0013\u0001\u0000"+ + "\u0000\u0000\u01cd\u01ce\u0007\u0003\u0000\u0000\u01ce\u01cf\u0007\t\u0000"+ + "\u0000\u01cf\u01d0\u0007\u0006\u0000\u0000\u01d0\u01d1\u0007\u0001\u0000"+ + "\u0000\u01d1\u01d2\u0007\u0004\u0000\u0000\u01d2\u01d3\u0007\n\u0000\u0000"+ + "\u01d3\u01d4\u0001\u0000\u0000\u0000\u01d4\u01d5\u0006\u0002\u0002\u0000"+ + "\u01d5\u0015\u0001\u0000\u0000\u0000\u01d6\u01d7\u0007\u0003\u0000\u0000"+ + "\u01d7\u01d8\u0007\u000b\u0000\u0000\u01d8\u01d9\u0007\f\u0000\u0000\u01d9"+ + "\u01da\u0007\r\u0000\u0000\u01da\u01db\u0001\u0000\u0000\u0000\u01db\u01dc"+ + "\u0006\u0003\u0000\u0000\u01dc\u0017\u0001\u0000\u0000\u0000\u01dd\u01de"+ + "\u0007\u0003\u0000\u0000\u01de\u01df\u0007\u000e\u0000\u0000\u01df\u01e0"+ + "\u0007\b\u0000\u0000\u01e0\u01e1\u0007\r\u0000\u0000\u01e1\u01e2\u0007"+ + "\f\u0000\u0000\u01e2\u01e3\u0007\u0001\u0000\u0000\u01e3\u01e4\u0007\t"+ + "\u0000\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000\u01e5\u01e6\u0006\u0004"+ + "\u0003\u0000\u01e6\u0019\u0001\u0000\u0000\u0000\u01e7\u01e8\u0007\u000f"+ + "\u0000\u0000\u01e8\u01e9\u0007\u0006\u0000\u0000\u01e9\u01ea\u0007\u0007"+ + "\u0000\u0000\u01ea\u01eb\u0007\u0010\u0000\u0000\u01eb\u01ec\u0001\u0000"+ + "\u0000\u0000\u01ec\u01ed\u0006\u0005\u0004\u0000\u01ed\u001b\u0001\u0000"+ + "\u0000\u0000\u01ee\u01ef\u0007\u0011\u0000\u0000\u01ef\u01f0\u0007\u0006"+ + "\u0000\u0000\u01f0\u01f1\u0007\u0007\u0000\u0000\u01f1\u01f2\u0007\u0012"+ + "\u0000\u0000\u01f2\u01f3\u0001\u0000\u0000\u0000\u01f3\u01f4\u0006\u0006"+ + "\u0000\u0000\u01f4\u001d\u0001\u0000\u0000\u0000\u01f5\u01f6\u0007\u0012"+ + "\u0000\u0000\u01f6\u01f7\u0007\u0003\u0000\u0000\u01f7\u01f8\u0007\u0003"+ + "\u0000\u0000\u01f8\u01f9\u0007\b\u0000\u0000\u01f9\u01fa\u0001\u0000\u0000"+ + "\u0000\u01fa\u01fb\u0006\u0007\u0001\u0000\u01fb\u001f\u0001\u0000\u0000"+ + "\u0000\u01fc\u01fd\u0007\r\u0000\u0000\u01fd\u01fe\u0007\u0001\u0000\u0000"+ + "\u01fe\u01ff\u0007\u0010\u0000\u0000\u01ff\u0200\u0007\u0001\u0000\u0000"+ + "\u0200\u0201\u0007\u0005\u0000\u0000\u0201\u0202\u0001\u0000\u0000\u0000"+ + "\u0202\u0203\u0006\b\u0000\u0000\u0203!\u0001\u0000\u0000\u0000\u0204"+ + "\u0205\u0007\u0010\u0000\u0000\u0205\u0206\u0007\u000b\u0000\u0000\u0206"+ + "\u0207\u0005_\u0000\u0000\u0207\u0208\u0007\u0003\u0000\u0000\u0208\u0209"+ + "\u0007\u000e\u0000\u0000\u0209\u020a\u0007\b\u0000\u0000\u020a\u020b\u0007"+ + "\f\u0000\u0000\u020b\u020c\u0007\t\u0000\u0000\u020c\u020d\u0007\u0000"+ + "\u0000\u0000\u020d\u020e\u0001\u0000\u0000\u0000\u020e\u020f\u0006\t\u0005"+ + "\u0000\u020f#\u0001\u0000\u0000\u0000\u0210\u0211\u0007\u0006\u0000\u0000"+ + "\u0211\u0212\u0007\u0003\u0000\u0000\u0212\u0213\u0007\t\u0000\u0000\u0213"+ + "\u0214\u0007\f\u0000\u0000\u0214\u0215\u0007\u0010\u0000\u0000\u0215\u0216"+ + "\u0007\u0003\u0000\u0000\u0216\u0217\u0001\u0000\u0000\u0000\u0217\u0218"+ + "\u0006\n\u0006\u0000\u0218%\u0001\u0000\u0000\u0000\u0219\u021a\u0007"+ + "\u0006\u0000\u0000\u021a\u021b\u0007\u0007\u0000\u0000\u021b\u021c\u0007"+ + "\u0013\u0000\u0000\u021c\u021d\u0001\u0000\u0000\u0000\u021d\u021e\u0006"+ + "\u000b\u0000\u0000\u021e\'\u0001\u0000\u0000\u0000\u021f\u0220\u0007\u0002"+ + "\u0000\u0000\u0220\u0221\u0007\n\u0000\u0000\u0221\u0222\u0007\u0007\u0000"+ + "\u0000\u0222\u0223\u0007\u0013\u0000\u0000\u0223\u0224\u0001\u0000\u0000"+ + "\u0000\u0224\u0225\u0006\f\u0007\u0000\u0225)\u0001\u0000\u0000\u0000"+ + "\u0226\u0227\u0007\u0002\u0000\u0000\u0227\u0228\u0007\u0007\u0000\u0000"+ + "\u0228\u0229\u0007\u0006\u0000\u0000\u0229\u022a\u0007\u0005\u0000\u0000"+ + "\u022a\u022b\u0001\u0000\u0000\u0000\u022b\u022c\u0006\r\u0000\u0000\u022c"+ + "+\u0001\u0000\u0000\u0000\u022d\u022e\u0007\u0002\u0000\u0000\u022e\u022f"+ + "\u0007\u0005\u0000\u0000\u022f\u0230\u0007\f\u0000\u0000\u0230\u0231\u0007"+ + "\u0005\u0000\u0000\u0231\u0232\u0007\u0002\u0000\u0000\u0232\u0233\u0001"+ + "\u0000\u0000\u0000\u0233\u0234\u0006\u000e\u0000\u0000\u0234-\u0001\u0000"+ + "\u0000\u0000\u0235\u0236\u0007\u0013\u0000\u0000\u0236\u0237\u0007\n\u0000"+ + "\u0000\u0237\u0238\u0007\u0003\u0000\u0000\u0238\u0239\u0007\u0006\u0000"+ + "\u0000\u0239\u023a\u0007\u0003\u0000\u0000\u023a\u023b\u0001\u0000\u0000"+ + "\u0000\u023b\u023c\u0006\u000f\u0000\u0000\u023c/\u0001\u0000\u0000\u0000"+ + "\u023d\u023e\u0004\u0010\u0000\u0000\u023e\u023f\u0007\u0001\u0000\u0000"+ + "\u023f\u0240\u0007\t\u0000\u0000\u0240\u0241\u0007\r\u0000\u0000\u0241"+ + "\u0242\u0007\u0001\u0000\u0000\u0242\u0243\u0007\t\u0000\u0000\u0243\u0244"+ + "\u0007\u0003\u0000\u0000\u0244\u0245\u0007\u0002\u0000\u0000\u0245\u0246"+ + "\u0007\u0005\u0000\u0000\u0246\u0247\u0007\f\u0000\u0000\u0247\u0248\u0007"+ + "\u0005\u0000\u0000\u0248\u0249\u0007\u0002\u0000\u0000\u0249\u024a\u0001"+ + "\u0000\u0000\u0000\u024a\u024b\u0006\u0010\u0000\u0000\u024b1\u0001\u0000"+ + "\u0000\u0000\u024c\u024d\u0004\u0011\u0001\u0000\u024d\u024e\u0007\r\u0000"+ + "\u0000\u024e\u024f\u0007\u0007\u0000\u0000\u024f\u0250\u0007\u0007\u0000"+ + "\u0000\u0250\u0251\u0007\u0012\u0000\u0000\u0251\u0252\u0007\u0014\u0000"+ + "\u0000\u0252\u0253\u0007\b\u0000\u0000\u0253\u0254\u0005_\u0000\u0000"+ + "\u0254\u0255\u0005\u8001\uf414\u0000\u0000\u0255\u0256\u0001\u0000\u0000"+ + "\u0000\u0256\u0257\u0006\u0011\b\u0000\u02573\u0001\u0000\u0000\u0000"+ + "\u0258\u0259\u0004\u0012\u0002\u0000\u0259\u025a\u0007\u0010\u0000\u0000"+ + "\u025a\u025b\u0007\u0003\u0000\u0000\u025b\u025c\u0007\u0005\u0000\u0000"+ + "\u025c\u025d\u0007\u0006\u0000\u0000\u025d\u025e\u0007\u0001\u0000\u0000"+ + "\u025e\u025f\u0007\u0004\u0000\u0000\u025f\u0260\u0007\u0002\u0000\u0000"+ + "\u0260\u0261\u0001\u0000\u0000\u0000\u0261\u0262\u0006\u0012\t\u0000\u0262"+ + "5\u0001\u0000\u0000\u0000\u0263\u0264\u0004\u0013\u0003\u0000\u0264\u0265"+ + "\u0007\u0015\u0000\u0000\u0265\u0266\u0007\u0007\u0000\u0000\u0266\u0267"+ + "\u0007\u0001\u0000\u0000\u0267\u0268\u0007\t\u0000\u0000\u0268\u0269\u0001"+ + "\u0000\u0000\u0000\u0269\u026a\u0006\u0013\n\u0000\u026a7\u0001\u0000"+ + "\u0000\u0000\u026b\u026c\u0004\u0014\u0004\u0000\u026c\u026d\u0007\u000f"+ + "\u0000\u0000\u026d\u026e\u0007\u0014\u0000\u0000\u026e\u026f\u0007\r\u0000"+ + "\u0000\u026f\u0270\u0007\r\u0000\u0000\u0270\u0271\u0001\u0000\u0000\u0000"+ + "\u0271\u0272\u0006\u0014\n\u0000\u02729\u0001\u0000\u0000\u0000\u0273"+ + "\u0274\u0004\u0015\u0005\u0000\u0274\u0275\u0007\r\u0000\u0000\u0275\u0276"+ + "\u0007\u0003\u0000\u0000\u0276\u0277\u0007\u000f\u0000\u0000\u0277\u0278"+ + "\u0007\u0005\u0000\u0000\u0278\u0279\u0001\u0000\u0000\u0000\u0279\u027a"+ + "\u0006\u0015\n\u0000\u027a;\u0001\u0000\u0000\u0000\u027b\u027c\u0004"+ + "\u0016\u0006\u0000\u027c\u027d\u0007\u0006\u0000\u0000\u027d\u027e\u0007"+ + "\u0001\u0000\u0000\u027e\u027f\u0007\u0011\u0000\u0000\u027f\u0280\u0007"+ + "\n\u0000\u0000\u0280\u0281\u0007\u0005\u0000\u0000\u0281\u0282\u0001\u0000"+ + "\u0000\u0000\u0282\u0283\u0006\u0016\n\u0000\u0283=\u0001\u0000\u0000"+ + "\u0000\u0284\u0285\u0004\u0017\u0007\u0000\u0285\u0286\u0007\r\u0000\u0000"+ + "\u0286\u0287\u0007\u0007\u0000\u0000\u0287\u0288\u0007\u0007\u0000\u0000"+ + "\u0288\u0289\u0007\u0012\u0000\u0000\u0289\u028a\u0007\u0014\u0000\u0000"+ + "\u028a\u028b\u0007\b\u0000\u0000\u028b\u028c\u0001\u0000\u0000\u0000\u028c"+ + "\u028d\u0006\u0017\n\u0000\u028d?\u0001\u0000\u0000\u0000\u028e\u0290"+ + "\b\u0016\u0000\u0000\u028f\u028e\u0001\u0000\u0000\u0000\u0290\u0291\u0001"+ + "\u0000\u0000\u0000\u0291\u028f\u0001\u0000\u0000\u0000\u0291\u0292\u0001"+ + "\u0000\u0000\u0000\u0292\u0293\u0001\u0000\u0000\u0000\u0293\u0294\u0006"+ + "\u0018\u0000\u0000\u0294A\u0001\u0000\u0000\u0000\u0295\u0296\u0005/\u0000"+ + "\u0000\u0296\u0297\u0005/\u0000\u0000\u0297\u029b\u0001\u0000\u0000\u0000"+ + "\u0298\u029a\b\u0017\u0000\u0000\u0299\u0298\u0001\u0000\u0000\u0000\u029a"+ + "\u029d\u0001\u0000\u0000\u0000\u029b\u0299\u0001\u0000\u0000\u0000\u029b"+ + "\u029c\u0001\u0000\u0000\u0000\u029c\u029f\u0001\u0000\u0000\u0000\u029d"+ + "\u029b\u0001\u0000\u0000\u0000\u029e\u02a0\u0005\r\u0000\u0000\u029f\u029e"+ + "\u0001\u0000\u0000\u0000\u029f\u02a0\u0001\u0000\u0000\u0000\u02a0\u02a2"+ + "\u0001\u0000\u0000\u0000\u02a1\u02a3\u0005\n\u0000\u0000\u02a2\u02a1\u0001"+ + "\u0000\u0000\u0000\u02a2\u02a3\u0001\u0000\u0000\u0000\u02a3\u02a4\u0001"+ + "\u0000\u0000\u0000\u02a4\u02a5\u0006\u0019\u000b\u0000\u02a5C\u0001\u0000"+ + "\u0000\u0000\u02a6\u02a7\u0005/\u0000\u0000\u02a7\u02a8\u0005*\u0000\u0000"+ + "\u02a8\u02ad\u0001\u0000\u0000\u0000\u02a9\u02ac\u0003D\u001a\u0000\u02aa"+ + "\u02ac\t\u0000\u0000\u0000\u02ab\u02a9\u0001\u0000\u0000\u0000\u02ab\u02aa"+ + "\u0001\u0000\u0000\u0000\u02ac\u02af\u0001\u0000\u0000\u0000\u02ad\u02ae"+ + "\u0001\u0000\u0000\u0000\u02ad\u02ab\u0001\u0000\u0000\u0000\u02ae\u02b0"+ + "\u0001\u0000\u0000\u0000\u02af\u02ad\u0001\u0000\u0000\u0000\u02b0\u02b1"+ + "\u0005*\u0000\u0000\u02b1\u02b2\u0005/\u0000\u0000\u02b2\u02b3\u0001\u0000"+ + "\u0000\u0000\u02b3\u02b4\u0006\u001a\u000b\u0000\u02b4E\u0001\u0000\u0000"+ + "\u0000\u02b5\u02b7\u0007\u0018\u0000\u0000\u02b6\u02b5\u0001\u0000\u0000"+ + "\u0000\u02b7\u02b8\u0001\u0000\u0000\u0000\u02b8\u02b6\u0001\u0000\u0000"+ + "\u0000\u02b8\u02b9\u0001\u0000\u0000\u0000\u02b9\u02ba\u0001\u0000\u0000"+ + "\u0000\u02ba\u02bb\u0006\u001b\u000b\u0000\u02bbG\u0001\u0000\u0000\u0000"+ + "\u02bc\u02bd\u0005:\u0000\u0000\u02bdI\u0001\u0000\u0000\u0000\u02be\u02bf"+ + "\u0005|\u0000\u0000\u02bf\u02c0\u0001\u0000\u0000\u0000\u02c0\u02c1\u0006"+ + "\u001d\f\u0000\u02c1K\u0001\u0000\u0000\u0000\u02c2\u02c3\u0007\u0019"+ + "\u0000\u0000\u02c3M\u0001\u0000\u0000\u0000\u02c4\u02c5\u0007\u001a\u0000"+ + "\u0000\u02c5O\u0001\u0000\u0000\u0000\u02c6\u02c7\u0005\\\u0000\u0000"+ + "\u02c7\u02c8\u0007\u001b\u0000\u0000\u02c8Q\u0001\u0000\u0000\u0000\u02c9"+ + "\u02ca\b\u001c\u0000\u0000\u02caS\u0001\u0000\u0000\u0000\u02cb\u02cd"+ + "\u0007\u0003\u0000\u0000\u02cc\u02ce\u0007\u001d\u0000\u0000\u02cd\u02cc"+ + "\u0001\u0000\u0000\u0000\u02cd\u02ce\u0001\u0000\u0000\u0000\u02ce\u02d0"+ + "\u0001\u0000\u0000\u0000\u02cf\u02d1\u0003L\u001e\u0000\u02d0\u02cf\u0001"+ + "\u0000\u0000\u0000\u02d1\u02d2\u0001\u0000\u0000\u0000\u02d2\u02d0\u0001"+ + "\u0000\u0000\u0000\u02d2\u02d3\u0001\u0000\u0000\u0000\u02d3U\u0001\u0000"+ + "\u0000\u0000\u02d4\u02d5\u0005@\u0000\u0000\u02d5W\u0001\u0000\u0000\u0000"+ + "\u02d6\u02d7\u0005`\u0000\u0000\u02d7Y\u0001\u0000\u0000\u0000\u02d8\u02dc"+ + "\b\u001e\u0000\u0000\u02d9\u02da\u0005`\u0000\u0000\u02da\u02dc\u0005"+ + "`\u0000\u0000\u02db\u02d8\u0001\u0000\u0000\u0000\u02db\u02d9\u0001\u0000"+ + "\u0000\u0000\u02dc[\u0001\u0000\u0000\u0000\u02dd\u02de\u0005_\u0000\u0000"+ + "\u02de]\u0001\u0000\u0000\u0000\u02df\u02e3\u0003N\u001f\u0000\u02e0\u02e3"+ + "\u0003L\u001e\u0000\u02e1\u02e3\u0003\\&\u0000\u02e2\u02df\u0001\u0000"+ + "\u0000\u0000\u02e2\u02e0\u0001\u0000\u0000\u0000\u02e2\u02e1\u0001\u0000"+ + "\u0000\u0000\u02e3_\u0001\u0000\u0000\u0000\u02e4\u02e9\u0005\"\u0000"+ + "\u0000\u02e5\u02e8\u0003P \u0000\u02e6\u02e8\u0003R!\u0000\u02e7\u02e5"+ + "\u0001\u0000\u0000\u0000\u02e7\u02e6\u0001\u0000\u0000\u0000\u02e8\u02eb"+ + "\u0001\u0000\u0000\u0000\u02e9\u02e7\u0001\u0000\u0000\u0000\u02e9\u02ea"+ + "\u0001\u0000\u0000\u0000\u02ea\u02ec\u0001\u0000\u0000\u0000\u02eb\u02e9"+ + "\u0001\u0000\u0000\u0000\u02ec\u0302\u0005\"\u0000\u0000\u02ed\u02ee\u0005"+ + "\"\u0000\u0000\u02ee\u02ef\u0005\"\u0000\u0000\u02ef\u02f0\u0005\"\u0000"+ + "\u0000\u02f0\u02f4\u0001\u0000\u0000\u0000\u02f1\u02f3\b\u0017\u0000\u0000"+ + "\u02f2\u02f1\u0001\u0000\u0000\u0000\u02f3\u02f6\u0001\u0000\u0000\u0000"+ + "\u02f4\u02f5\u0001\u0000\u0000\u0000\u02f4\u02f2\u0001\u0000\u0000\u0000"+ + "\u02f5\u02f7\u0001\u0000\u0000\u0000\u02f6\u02f4\u0001\u0000\u0000\u0000"+ + "\u02f7\u02f8\u0005\"\u0000\u0000\u02f8\u02f9\u0005\"\u0000\u0000\u02f9"+ + "\u02fa\u0005\"\u0000\u0000\u02fa\u02fc\u0001\u0000\u0000\u0000\u02fb\u02fd"+ + "\u0005\"\u0000\u0000\u02fc\u02fb\u0001\u0000\u0000\u0000\u02fc\u02fd\u0001"+ + "\u0000\u0000\u0000\u02fd\u02ff\u0001\u0000\u0000\u0000\u02fe\u0300\u0005"+ + "\"\u0000\u0000\u02ff\u02fe\u0001\u0000\u0000\u0000\u02ff\u0300\u0001\u0000"+ + "\u0000\u0000\u0300\u0302\u0001\u0000\u0000\u0000\u0301\u02e4\u0001\u0000"+ + "\u0000\u0000\u0301\u02ed\u0001\u0000\u0000\u0000\u0302a\u0001\u0000\u0000"+ + "\u0000\u0303\u0305\u0003L\u001e\u0000\u0304\u0303\u0001\u0000\u0000\u0000"+ + "\u0305\u0306\u0001\u0000\u0000\u0000\u0306\u0304\u0001\u0000\u0000\u0000"+ + "\u0306\u0307\u0001\u0000\u0000\u0000\u0307c\u0001\u0000\u0000\u0000\u0308"+ + "\u030a\u0003L\u001e\u0000\u0309\u0308\u0001\u0000\u0000\u0000\u030a\u030b"+ + "\u0001\u0000\u0000\u0000\u030b\u0309\u0001\u0000\u0000\u0000\u030b\u030c"+ + "\u0001\u0000\u0000\u0000\u030c\u030d\u0001\u0000\u0000\u0000\u030d\u0311"+ + "\u0003t2\u0000\u030e\u0310\u0003L\u001e\u0000\u030f\u030e\u0001\u0000"+ + "\u0000\u0000\u0310\u0313\u0001\u0000\u0000\u0000\u0311\u030f\u0001\u0000"+ + "\u0000\u0000\u0311\u0312\u0001\u0000\u0000\u0000\u0312\u0333\u0001\u0000"+ + "\u0000\u0000\u0313\u0311\u0001\u0000\u0000\u0000\u0314\u0316\u0003t2\u0000"+ + "\u0315\u0317\u0003L\u001e\u0000\u0316\u0315\u0001\u0000\u0000\u0000\u0317"+ + "\u0318\u0001\u0000\u0000\u0000\u0318\u0316\u0001\u0000\u0000\u0000\u0318"+ + "\u0319\u0001\u0000\u0000\u0000\u0319\u0333\u0001\u0000\u0000\u0000\u031a"+ + "\u031c\u0003L\u001e\u0000\u031b\u031a\u0001\u0000\u0000\u0000\u031c\u031d"+ + "\u0001\u0000\u0000\u0000\u031d\u031b\u0001\u0000\u0000\u0000\u031d\u031e"+ + "\u0001\u0000\u0000\u0000\u031e\u0326\u0001\u0000\u0000\u0000\u031f\u0323"+ + "\u0003t2\u0000\u0320\u0322\u0003L\u001e\u0000\u0321\u0320\u0001\u0000"+ + "\u0000\u0000\u0322\u0325\u0001\u0000\u0000\u0000\u0323\u0321\u0001\u0000"+ + "\u0000\u0000\u0323\u0324\u0001\u0000\u0000\u0000\u0324\u0327\u0001\u0000"+ + "\u0000\u0000\u0325\u0323\u0001\u0000\u0000\u0000\u0326\u031f\u0001\u0000"+ + "\u0000\u0000\u0326\u0327\u0001\u0000\u0000\u0000\u0327\u0328\u0001\u0000"+ + "\u0000\u0000\u0328\u0329\u0003T\"\u0000\u0329\u0333\u0001\u0000\u0000"+ + "\u0000\u032a\u032c\u0003t2\u0000\u032b\u032d\u0003L\u001e\u0000\u032c"+ + "\u032b\u0001\u0000\u0000\u0000\u032d\u032e\u0001\u0000\u0000\u0000\u032e"+ + "\u032c\u0001\u0000\u0000\u0000\u032e\u032f\u0001\u0000\u0000\u0000\u032f"+ + "\u0330\u0001\u0000\u0000\u0000\u0330\u0331\u0003T\"\u0000\u0331\u0333"+ + "\u0001\u0000\u0000\u0000\u0332\u0309\u0001\u0000\u0000\u0000\u0332\u0314"+ + "\u0001\u0000\u0000\u0000\u0332\u031b\u0001\u0000\u0000\u0000\u0332\u032a"+ + "\u0001\u0000\u0000\u0000\u0333e\u0001\u0000\u0000\u0000\u0334\u0335\u0007"+ + "\u001f\u0000\u0000\u0335\u0336\u0007 \u0000\u0000\u0336g\u0001\u0000\u0000"+ + "\u0000\u0337\u0338\u0007\f\u0000\u0000\u0338\u0339\u0007\t\u0000\u0000"+ + "\u0339\u033a\u0007\u0000\u0000\u0000\u033ai\u0001\u0000\u0000\u0000\u033b"+ + "\u033c\u0007\f\u0000\u0000\u033c\u033d\u0007\u0002\u0000\u0000\u033d\u033e"+ + "\u0007\u0004\u0000\u0000\u033ek\u0001\u0000\u0000\u0000\u033f\u0340\u0005"+ + "=\u0000\u0000\u0340m\u0001\u0000\u0000\u0000\u0341\u0342\u0005:\u0000"+ + "\u0000\u0342\u0343\u0005:\u0000\u0000\u0343o\u0001\u0000\u0000\u0000\u0344"+ + "\u0345\u0005,\u0000\u0000\u0345q\u0001\u0000\u0000\u0000\u0346\u0347\u0007"+ + "\u0000\u0000\u0000\u0347\u0348\u0007\u0003\u0000\u0000\u0348\u0349\u0007"+ + "\u0002\u0000\u0000\u0349\u034a\u0007\u0004\u0000\u0000\u034as\u0001\u0000"+ + "\u0000\u0000\u034b\u034c\u0005.\u0000\u0000\u034cu\u0001\u0000\u0000\u0000"+ + "\u034d\u034e\u0007\u000f\u0000\u0000\u034e\u034f\u0007\f\u0000\u0000\u034f"+ + "\u0350\u0007\r\u0000\u0000\u0350\u0351\u0007\u0002\u0000\u0000\u0351\u0352"+ + "\u0007\u0003\u0000\u0000\u0352w\u0001\u0000\u0000\u0000\u0353\u0354\u0007"+ + "\u000f\u0000\u0000\u0354\u0355\u0007\u0001\u0000\u0000\u0355\u0356\u0007"+ + "\u0006\u0000\u0000\u0356\u0357\u0007\u0002\u0000\u0000\u0357\u0358\u0007"+ + "\u0005\u0000\u0000\u0358y\u0001\u0000\u0000\u0000\u0359\u035a\u0007\u0001"+ + "\u0000\u0000\u035a\u035b\u0007\t\u0000\u0000\u035b{\u0001\u0000\u0000"+ + "\u0000\u035c\u035d\u0007\u0001\u0000\u0000\u035d\u035e\u0007\u0002\u0000"+ + "\u0000\u035e}\u0001\u0000\u0000\u0000\u035f\u0360\u0007\r\u0000\u0000"+ + "\u0360\u0361\u0007\f\u0000\u0000\u0361\u0362\u0007\u0002\u0000\u0000\u0362"+ + "\u0363\u0007\u0005\u0000\u0000\u0363\u007f\u0001\u0000\u0000\u0000\u0364"+ + "\u0365\u0007\r\u0000\u0000\u0365\u0366\u0007\u0001\u0000\u0000\u0366\u0367"+ + "\u0007\u0012\u0000\u0000\u0367\u0368\u0007\u0003\u0000\u0000\u0368\u0081"+ + "\u0001\u0000\u0000\u0000\u0369\u036a\u0005(\u0000\u0000\u036a\u0083\u0001"+ + "\u0000\u0000\u0000\u036b\u036c\u0007\t\u0000\u0000\u036c\u036d\u0007\u0007"+ + "\u0000\u0000\u036d\u036e\u0007\u0005\u0000\u0000\u036e\u0085\u0001\u0000"+ + "\u0000\u0000\u036f\u0370\u0007\t\u0000\u0000\u0370\u0371\u0007\u0014\u0000"+ + "\u0000\u0371\u0372\u0007\r\u0000\u0000\u0372\u0373\u0007\r\u0000\u0000"+ + "\u0373\u0087\u0001\u0000\u0000\u0000\u0374\u0375\u0007\t\u0000\u0000\u0375"+ + "\u0376\u0007\u0014\u0000\u0000\u0376\u0377\u0007\r\u0000\u0000\u0377\u0378"+ + "\u0007\r\u0000\u0000\u0378\u0379\u0007\u0002\u0000\u0000\u0379\u0089\u0001"+ + "\u0000\u0000\u0000\u037a\u037b\u0007\u0007\u0000\u0000\u037b\u037c\u0007"+ + "\u0006\u0000\u0000\u037c\u008b\u0001\u0000\u0000\u0000\u037d\u037e\u0005"+ + "?\u0000\u0000\u037e\u008d\u0001\u0000\u0000\u0000\u037f\u0380\u0007\u0006"+ + "\u0000\u0000\u0380\u0381\u0007\r\u0000\u0000\u0381\u0382\u0007\u0001\u0000"+ + "\u0000\u0382\u0383\u0007\u0012\u0000\u0000\u0383\u0384\u0007\u0003\u0000"+ + "\u0000\u0384\u008f\u0001\u0000\u0000\u0000\u0385\u0386\u0005)\u0000\u0000"+ + "\u0386\u0091\u0001\u0000\u0000\u0000\u0387\u0388\u0007\u0005\u0000\u0000"+ + "\u0388\u0389\u0007\u0006\u0000\u0000\u0389\u038a\u0007\u0014\u0000\u0000"+ + "\u038a\u038b\u0007\u0003\u0000\u0000\u038b\u0093\u0001\u0000\u0000\u0000"+ + "\u038c\u038d\u0005=\u0000\u0000\u038d\u038e\u0005=\u0000\u0000\u038e\u0095"+ + "\u0001\u0000\u0000\u0000\u038f\u0390\u0005=\u0000\u0000\u0390\u0391\u0005"+ + "~\u0000\u0000\u0391\u0097\u0001\u0000\u0000\u0000\u0392\u0393\u0005!\u0000"+ + "\u0000\u0393\u0394\u0005=\u0000\u0000\u0394\u0099\u0001\u0000\u0000\u0000"+ + "\u0395\u0396\u0005<\u0000\u0000\u0396\u009b\u0001\u0000\u0000\u0000\u0397"+ + "\u0398\u0005<\u0000\u0000\u0398\u0399\u0005=\u0000\u0000\u0399\u009d\u0001"+ + "\u0000\u0000\u0000\u039a\u039b\u0005>\u0000\u0000\u039b\u009f\u0001\u0000"+ + "\u0000\u0000\u039c\u039d\u0005>\u0000\u0000\u039d\u039e\u0005=\u0000\u0000"+ + "\u039e\u00a1\u0001\u0000\u0000\u0000\u039f\u03a0\u0005+\u0000\u0000\u03a0"+ + "\u00a3\u0001\u0000\u0000\u0000\u03a1\u03a2\u0005-\u0000\u0000\u03a2\u00a5"+ + "\u0001\u0000\u0000\u0000\u03a3\u03a4\u0005*\u0000\u0000\u03a4\u00a7\u0001"+ + "\u0000\u0000\u0000\u03a5\u03a6\u0005/\u0000\u0000\u03a6\u00a9\u0001\u0000"+ + "\u0000\u0000\u03a7\u03a8\u0005%\u0000\u0000\u03a8\u00ab\u0001\u0000\u0000"+ + "\u0000\u03a9\u03aa\u0004N\b\u0000\u03aa\u03ab\u0003H\u001c\u0000\u03ab"+ + "\u03ac\u0001\u0000\u0000\u0000\u03ac\u03ad\u0006N\r\u0000\u03ad\u00ad"+ + "\u0001\u0000\u0000\u0000\u03ae\u03af\u0003.\u000f\u0000\u03af\u03b0\u0001"+ + "\u0000\u0000\u0000\u03b0\u03b1\u0006O\u000e\u0000\u03b1\u00af\u0001\u0000"+ + "\u0000\u0000\u03b2\u03b5\u0003\u008c>\u0000\u03b3\u03b6\u0003N\u001f\u0000"+ + "\u03b4\u03b6\u0003\\&\u0000\u03b5\u03b3\u0001\u0000\u0000\u0000\u03b5"+ + "\u03b4\u0001\u0000\u0000\u0000\u03b6\u03ba\u0001\u0000\u0000\u0000\u03b7"+ + "\u03b9\u0003^\'\u0000\u03b8\u03b7\u0001\u0000\u0000\u0000\u03b9\u03bc"+ + "\u0001\u0000\u0000\u0000\u03ba\u03b8\u0001\u0000\u0000\u0000\u03ba\u03bb"+ + "\u0001\u0000\u0000\u0000\u03bb\u03c4\u0001\u0000\u0000\u0000\u03bc\u03ba"+ + "\u0001\u0000\u0000\u0000\u03bd\u03bf\u0003\u008c>\u0000\u03be\u03c0\u0003"+ + "L\u001e\u0000\u03bf\u03be\u0001\u0000\u0000\u0000\u03c0\u03c1\u0001\u0000"+ + "\u0000\u0000\u03c1\u03bf\u0001\u0000\u0000\u0000\u03c1\u03c2\u0001\u0000"+ + "\u0000\u0000\u03c2\u03c4\u0001\u0000\u0000\u0000\u03c3\u03b2\u0001\u0000"+ + "\u0000\u0000\u03c3\u03bd\u0001\u0000\u0000\u0000\u03c4\u00b1\u0001\u0000"+ + "\u0000\u0000\u03c5\u03c6\u0005[\u0000\u0000\u03c6\u03c7\u0001\u0000\u0000"+ + "\u0000\u03c7\u03c8\u0006Q\u0000\u0000\u03c8\u03c9\u0006Q\u0000\u0000\u03c9"+ + "\u00b3\u0001\u0000\u0000\u0000\u03ca\u03cb\u0005]\u0000\u0000\u03cb\u03cc"+ + "\u0001\u0000\u0000\u0000\u03cc\u03cd\u0006R\f\u0000\u03cd\u03ce\u0006"+ + "R\f\u0000\u03ce\u00b5\u0001\u0000\u0000\u0000\u03cf\u03d3\u0003N\u001f"+ + "\u0000\u03d0\u03d2\u0003^\'\u0000\u03d1\u03d0\u0001\u0000\u0000\u0000"+ + "\u03d2\u03d5\u0001\u0000\u0000\u0000\u03d3\u03d1\u0001\u0000\u0000\u0000"+ + "\u03d3\u03d4\u0001\u0000\u0000\u0000\u03d4\u03e0\u0001\u0000\u0000\u0000"+ + "\u03d5\u03d3\u0001\u0000\u0000\u0000\u03d6\u03d9\u0003\\&\u0000\u03d7"+ + "\u03d9\u0003V#\u0000\u03d8\u03d6\u0001\u0000\u0000\u0000\u03d8\u03d7\u0001"+ + "\u0000\u0000\u0000\u03d9\u03db\u0001\u0000\u0000\u0000\u03da\u03dc\u0003"+ + "^\'\u0000\u03db\u03da\u0001\u0000\u0000\u0000\u03dc\u03dd\u0001\u0000"+ + "\u0000\u0000\u03dd\u03db\u0001\u0000\u0000\u0000\u03dd\u03de\u0001\u0000"+ + "\u0000\u0000\u03de\u03e0\u0001\u0000\u0000\u0000\u03df\u03cf\u0001\u0000"+ + "\u0000\u0000\u03df\u03d8\u0001\u0000\u0000\u0000\u03e0\u00b7\u0001\u0000"+ + "\u0000\u0000\u03e1\u03e3\u0003X$\u0000\u03e2\u03e4\u0003Z%\u0000\u03e3"+ + "\u03e2\u0001\u0000\u0000\u0000\u03e4\u03e5\u0001\u0000\u0000\u0000\u03e5"+ + "\u03e3\u0001\u0000\u0000\u0000\u03e5\u03e6\u0001\u0000\u0000\u0000\u03e6"+ + "\u03e7\u0001\u0000\u0000\u0000\u03e7\u03e8\u0003X$\u0000\u03e8\u00b9\u0001"+ + "\u0000\u0000\u0000\u03e9\u03ea\u0003\u00b8T\u0000\u03ea\u00bb\u0001\u0000"+ + "\u0000\u0000\u03eb\u03ec\u0003B\u0019\u0000\u03ec\u03ed\u0001\u0000\u0000"+ + "\u0000\u03ed\u03ee\u0006V\u000b\u0000\u03ee\u00bd\u0001\u0000\u0000\u0000"+ + "\u03ef\u03f0\u0003D\u001a\u0000\u03f0\u03f1\u0001\u0000\u0000\u0000\u03f1"+ + "\u03f2\u0006W\u000b\u0000\u03f2\u00bf\u0001\u0000\u0000\u0000\u03f3\u03f4"+ + "\u0003F\u001b\u0000\u03f4\u03f5\u0001\u0000\u0000\u0000\u03f5\u03f6\u0006"+ + "X\u000b\u0000\u03f6\u00c1\u0001\u0000\u0000\u0000\u03f7\u03f8\u0003\u00b2"+ + "Q\u0000\u03f8\u03f9\u0001\u0000\u0000\u0000\u03f9\u03fa\u0006Y\u000f\u0000"+ + "\u03fa\u03fb\u0006Y\u0010\u0000\u03fb\u00c3\u0001\u0000\u0000\u0000\u03fc"+ + "\u03fd\u0003J\u001d\u0000\u03fd\u03fe\u0001\u0000\u0000\u0000\u03fe\u03ff"+ + "\u0006Z\u0011\u0000\u03ff\u0400\u0006Z\f\u0000\u0400\u00c5\u0001\u0000"+ + "\u0000\u0000\u0401\u0402\u0003F\u001b\u0000\u0402\u0403\u0001\u0000\u0000"+ + "\u0000\u0403\u0404\u0006[\u000b\u0000\u0404\u00c7\u0001\u0000\u0000\u0000"+ + "\u0405\u0406\u0003B\u0019\u0000\u0406\u0407\u0001\u0000\u0000\u0000\u0407"+ + "\u0408\u0006\\\u000b\u0000\u0408\u00c9\u0001\u0000\u0000\u0000\u0409\u040a"+ + "\u0003D\u001a\u0000\u040a\u040b\u0001\u0000\u0000\u0000\u040b\u040c\u0006"+ + "]\u000b\u0000\u040c\u00cb\u0001\u0000\u0000\u0000\u040d\u040e\u0003J\u001d"+ + "\u0000\u040e\u040f\u0001\u0000\u0000\u0000\u040f\u0410\u0006^\u0011\u0000"+ + "\u0410\u0411\u0006^\f\u0000\u0411\u00cd\u0001\u0000\u0000\u0000\u0412"+ + "\u0413\u0003\u00b2Q\u0000\u0413\u0414\u0001\u0000\u0000\u0000\u0414\u0415"+ + "\u0006_\u000f\u0000\u0415\u00cf\u0001\u0000\u0000\u0000\u0416\u0417\u0003"+ + "\u00b4R\u0000\u0417\u0418\u0001\u0000\u0000\u0000\u0418\u0419\u0006`\u0012"+ + "\u0000\u0419\u00d1\u0001\u0000\u0000\u0000\u041a\u041b\u0003H\u001c\u0000"+ + "\u041b\u041c\u0001\u0000\u0000\u0000\u041c\u041d\u0006a\r\u0000\u041d"+ + "\u00d3\u0001\u0000\u0000\u0000\u041e\u041f\u0003p0\u0000\u041f\u0420\u0001"+ + "\u0000\u0000\u0000\u0420\u0421\u0006b\u0013\u0000\u0421\u00d5\u0001\u0000"+ + "\u0000\u0000\u0422\u0423\u0003l.\u0000\u0423\u0424\u0001\u0000\u0000\u0000"+ + "\u0424\u0425\u0006c\u0014\u0000\u0425\u00d7\u0001\u0000\u0000\u0000\u0426"+ + "\u0427\u0007\u0010\u0000\u0000\u0427\u0428\u0007\u0003\u0000\u0000\u0428"+ + "\u0429\u0007\u0005\u0000\u0000\u0429\u042a\u0007\f\u0000\u0000\u042a\u042b"+ + "\u0007\u0000\u0000\u0000\u042b\u042c\u0007\f\u0000\u0000\u042c\u042d\u0007"+ + "\u0005\u0000\u0000\u042d\u042e\u0007\f\u0000\u0000\u042e\u00d9\u0001\u0000"+ + "\u0000\u0000\u042f\u0433\b!\u0000\u0000\u0430\u0431\u0005/\u0000\u0000"+ + "\u0431\u0433\b\"\u0000\u0000\u0432\u042f\u0001\u0000\u0000\u0000\u0432"+ + "\u0430\u0001\u0000\u0000\u0000\u0433\u00db\u0001\u0000\u0000\u0000\u0434"+ + "\u0436\u0003\u00dae\u0000\u0435\u0434\u0001\u0000\u0000\u0000\u0436\u0437"+ + "\u0001\u0000\u0000\u0000\u0437\u0435\u0001\u0000\u0000\u0000\u0437\u0438"+ + "\u0001\u0000\u0000\u0000\u0438\u00dd\u0001\u0000\u0000\u0000\u0439\u043a"+ + "\u0003\u00dcf\u0000\u043a\u043b\u0001\u0000\u0000\u0000\u043b\u043c\u0006"+ + "g\u0015\u0000\u043c\u00df\u0001\u0000\u0000\u0000\u043d\u043e\u0003`("+ + "\u0000\u043e\u043f\u0001\u0000\u0000\u0000\u043f\u0440\u0006h\u0016\u0000"+ + "\u0440\u00e1\u0001\u0000\u0000\u0000\u0441\u0442\u0003B\u0019\u0000\u0442"+ + "\u0443\u0001\u0000\u0000\u0000\u0443\u0444\u0006i\u000b\u0000\u0444\u00e3"+ + "\u0001\u0000\u0000\u0000\u0445\u0446\u0003D\u001a\u0000\u0446\u0447\u0001"+ + "\u0000\u0000\u0000\u0447\u0448\u0006j\u000b\u0000\u0448\u00e5\u0001\u0000"+ + "\u0000\u0000\u0449\u044a\u0003F\u001b\u0000\u044a\u044b\u0001\u0000\u0000"+ + "\u0000\u044b\u044c\u0006k\u000b\u0000\u044c\u00e7\u0001\u0000\u0000\u0000"+ + "\u044d\u044e\u0003J\u001d\u0000\u044e\u044f\u0001\u0000\u0000\u0000\u044f"+ + "\u0450\u0006l\u0011\u0000\u0450\u0451\u0006l\f\u0000\u0451\u00e9\u0001"+ + "\u0000\u0000\u0000\u0452\u0453\u0003t2\u0000\u0453\u0454\u0001\u0000\u0000"+ + "\u0000\u0454\u0455\u0006m\u0017\u0000\u0455\u00eb\u0001\u0000\u0000\u0000"+ + "\u0456\u0457\u0003p0\u0000\u0457\u0458\u0001\u0000\u0000\u0000\u0458\u0459"+ + "\u0006n\u0013\u0000\u0459\u00ed\u0001\u0000\u0000\u0000\u045a\u045b\u0004"+ + "o\t\u0000\u045b\u045c\u0003\u008c>\u0000\u045c\u045d\u0001\u0000\u0000"+ + "\u0000\u045d\u045e\u0006o\u0018\u0000\u045e\u00ef\u0001\u0000\u0000\u0000"+ + "\u045f\u0460\u0004p\n\u0000\u0460\u0461\u0003\u00b0P\u0000\u0461\u0462"+ + "\u0001\u0000\u0000\u0000\u0462\u0463\u0006p\u0019\u0000\u0463\u00f1\u0001"+ + "\u0000\u0000\u0000\u0464\u0469\u0003N\u001f\u0000\u0465\u0469\u0003L\u001e"+ + "\u0000\u0466\u0469\u0003\\&\u0000\u0467\u0469\u0003\u00a6K\u0000\u0468"+ + "\u0464\u0001\u0000\u0000\u0000\u0468\u0465\u0001\u0000\u0000\u0000\u0468"+ + "\u0466\u0001\u0000\u0000\u0000\u0468\u0467\u0001\u0000\u0000\u0000\u0469"+ + "\u00f3\u0001\u0000\u0000\u0000\u046a\u046d\u0003N\u001f\u0000\u046b\u046d"+ + "\u0003\u00a6K\u0000\u046c\u046a\u0001\u0000\u0000\u0000\u046c\u046b\u0001"+ + "\u0000\u0000\u0000\u046d\u0471\u0001\u0000\u0000\u0000\u046e\u0470\u0003"+ + "\u00f2q\u0000\u046f\u046e\u0001\u0000\u0000\u0000\u0470\u0473\u0001\u0000"+ + "\u0000\u0000\u0471\u046f\u0001\u0000\u0000\u0000\u0471\u0472\u0001\u0000"+ + "\u0000\u0000\u0472\u047e\u0001\u0000\u0000\u0000\u0473\u0471\u0001\u0000"+ + "\u0000\u0000\u0474\u0477\u0003\\&\u0000\u0475\u0477\u0003V#\u0000\u0476"+ + "\u0474\u0001\u0000\u0000\u0000\u0476\u0475\u0001\u0000\u0000\u0000\u0477"+ + "\u0479\u0001\u0000\u0000\u0000\u0478\u047a\u0003\u00f2q\u0000\u0479\u0478"+ + "\u0001\u0000\u0000\u0000\u047a\u047b\u0001\u0000\u0000\u0000\u047b\u0479"+ + "\u0001\u0000\u0000\u0000\u047b\u047c\u0001\u0000\u0000\u0000\u047c\u047e"+ + "\u0001\u0000\u0000\u0000\u047d\u046c\u0001\u0000\u0000\u0000\u047d\u0476"+ + "\u0001\u0000\u0000\u0000\u047e\u00f5\u0001\u0000\u0000\u0000\u047f\u0482"+ + "\u0003\u00f4r\u0000\u0480\u0482\u0003\u00b8T\u0000\u0481\u047f\u0001\u0000"+ + "\u0000\u0000\u0481\u0480\u0001\u0000\u0000\u0000\u0482\u0483\u0001\u0000"+ + "\u0000\u0000\u0483\u0481\u0001\u0000\u0000\u0000\u0483\u0484\u0001\u0000"+ + "\u0000\u0000\u0484\u00f7\u0001\u0000\u0000\u0000\u0485\u0486\u0003B\u0019"+ + "\u0000\u0486\u0487\u0001\u0000\u0000\u0000\u0487\u0488\u0006t\u000b\u0000"+ + "\u0488\u00f9\u0001\u0000\u0000\u0000\u0489\u048a\u0003D\u001a\u0000\u048a"+ + "\u048b\u0001\u0000\u0000\u0000\u048b\u048c\u0006u\u000b\u0000\u048c\u00fb"+ + "\u0001\u0000\u0000\u0000\u048d\u048e\u0003F\u001b\u0000\u048e\u048f\u0001"+ + "\u0000\u0000\u0000\u048f\u0490\u0006v\u000b\u0000\u0490\u00fd\u0001\u0000"+ + "\u0000\u0000\u0491\u0492\u0003J\u001d\u0000\u0492\u0493\u0001\u0000\u0000"+ + "\u0000\u0493\u0494\u0006w\u0011\u0000\u0494\u0495\u0006w\f\u0000\u0495"+ + "\u00ff\u0001\u0000\u0000\u0000\u0496\u0497\u0003l.\u0000\u0497\u0498\u0001"+ + "\u0000\u0000\u0000\u0498\u0499\u0006x\u0014\u0000\u0499\u0101\u0001\u0000"+ + "\u0000\u0000\u049a\u049b\u0003p0\u0000\u049b\u049c\u0001\u0000\u0000\u0000"+ + "\u049c\u049d\u0006y\u0013\u0000\u049d\u0103\u0001\u0000\u0000\u0000\u049e"+ + "\u049f\u0003t2\u0000\u049f\u04a0\u0001\u0000\u0000\u0000\u04a0\u04a1\u0006"+ + "z\u0017\u0000\u04a1\u0105\u0001\u0000\u0000\u0000\u04a2\u04a3\u0004{\u000b"+ + "\u0000\u04a3\u04a4\u0003\u008c>\u0000\u04a4\u04a5\u0001\u0000\u0000\u0000"+ + "\u04a5\u04a6\u0006{\u0018\u0000\u04a6\u0107\u0001\u0000\u0000\u0000\u04a7"+ + "\u04a8\u0004|\f\u0000\u04a8\u04a9\u0003\u00b0P\u0000\u04a9\u04aa\u0001"+ + "\u0000\u0000\u0000\u04aa\u04ab\u0006|\u0019\u0000\u04ab\u0109\u0001\u0000"+ + "\u0000\u0000\u04ac\u04ad\u0007\f\u0000\u0000\u04ad\u04ae\u0007\u0002\u0000"+ + "\u0000\u04ae\u010b\u0001\u0000\u0000\u0000\u04af\u04b0\u0003\u00f6s\u0000"+ + "\u04b0\u04b1\u0001\u0000\u0000\u0000\u04b1\u04b2\u0006~\u001a\u0000\u04b2"+ + "\u010d\u0001\u0000\u0000\u0000\u04b3\u04b4\u0003B\u0019\u0000\u04b4\u04b5"+ + "\u0001\u0000\u0000\u0000\u04b5\u04b6\u0006\u007f\u000b\u0000\u04b6\u010f"+ + "\u0001\u0000\u0000\u0000\u04b7\u04b8\u0003D\u001a\u0000\u04b8\u04b9\u0001"+ + "\u0000\u0000\u0000\u04b9\u04ba\u0006\u0080\u000b\u0000\u04ba\u0111\u0001"+ + "\u0000\u0000\u0000\u04bb\u04bc\u0003F\u001b\u0000\u04bc\u04bd\u0001\u0000"+ + "\u0000\u0000\u04bd\u04be\u0006\u0081\u000b\u0000\u04be\u0113\u0001\u0000"+ + "\u0000\u0000\u04bf\u04c0\u0003J\u001d\u0000\u04c0\u04c1\u0001\u0000\u0000"+ + "\u0000\u04c1\u04c2\u0006\u0082\u0011\u0000\u04c2\u04c3\u0006\u0082\f\u0000"+ + "\u04c3\u0115\u0001\u0000\u0000\u0000\u04c4\u04c5\u0003\u00b2Q\u0000\u04c5"+ + "\u04c6\u0001\u0000\u0000\u0000\u04c6\u04c7\u0006\u0083\u000f\u0000\u04c7"+ + "\u04c8\u0006\u0083\u001b\u0000\u04c8\u0117\u0001\u0000\u0000\u0000\u04c9"+ + "\u04ca\u0007\u0007\u0000\u0000\u04ca\u04cb\u0007\t\u0000\u0000\u04cb\u04cc"+ + "\u0001\u0000\u0000\u0000\u04cc\u04cd\u0006\u0084\u001c\u0000\u04cd\u0119"+ + "\u0001\u0000\u0000\u0000\u04ce\u04cf\u0007\u0013\u0000\u0000\u04cf\u04d0"+ + "\u0007\u0001\u0000\u0000\u04d0\u04d1\u0007\u0005\u0000\u0000\u04d1\u04d2"+ + "\u0007\n\u0000\u0000\u04d2\u04d3\u0001\u0000\u0000\u0000\u04d3\u04d4\u0006"+ + "\u0085\u001c\u0000\u04d4\u011b\u0001\u0000\u0000\u0000\u04d5\u04d6\b#"+ + "\u0000\u0000\u04d6\u011d\u0001\u0000\u0000\u0000\u04d7\u04d9\u0003\u011c"+ + "\u0086\u0000\u04d8\u04d7\u0001\u0000\u0000\u0000\u04d9\u04da\u0001\u0000"+ + "\u0000\u0000\u04da\u04d8\u0001\u0000\u0000\u0000\u04da\u04db\u0001\u0000"+ + "\u0000\u0000\u04db\u04dc\u0001\u0000\u0000\u0000\u04dc\u04dd\u0003H\u001c"+ + "\u0000\u04dd\u04df\u0001\u0000\u0000\u0000\u04de\u04d8\u0001\u0000\u0000"+ + "\u0000\u04de\u04df\u0001\u0000\u0000\u0000\u04df\u04e1\u0001\u0000\u0000"+ + "\u0000\u04e0\u04e2\u0003\u011c\u0086\u0000\u04e1\u04e0\u0001\u0000\u0000"+ + "\u0000\u04e2\u04e3\u0001\u0000\u0000\u0000\u04e3\u04e1\u0001\u0000\u0000"+ + "\u0000\u04e3\u04e4\u0001\u0000\u0000\u0000\u04e4\u011f\u0001\u0000\u0000"+ + "\u0000\u04e5\u04e6\u0003\u011e\u0087\u0000\u04e6\u04e7\u0001\u0000\u0000"+ + "\u0000\u04e7\u04e8\u0006\u0088\u001d\u0000\u04e8\u0121\u0001\u0000\u0000"+ + "\u0000\u04e9\u04ea\u0003B\u0019\u0000\u04ea\u04eb\u0001\u0000\u0000\u0000"+ + "\u04eb\u04ec\u0006\u0089\u000b\u0000\u04ec\u0123\u0001\u0000\u0000\u0000"+ + "\u04ed\u04ee\u0003D\u001a\u0000\u04ee\u04ef\u0001\u0000\u0000\u0000\u04ef"+ + "\u04f0\u0006\u008a\u000b\u0000\u04f0\u0125\u0001\u0000\u0000\u0000\u04f1"+ + "\u04f2\u0003F\u001b\u0000\u04f2\u04f3\u0001\u0000\u0000\u0000\u04f3\u04f4"+ + "\u0006\u008b\u000b\u0000\u04f4\u0127\u0001\u0000\u0000\u0000\u04f5\u04f6"+ + "\u0003J\u001d\u0000\u04f6\u04f7\u0001\u0000\u0000\u0000\u04f7\u04f8\u0006"+ + "\u008c\u0011\u0000\u04f8\u04f9\u0006\u008c\f\u0000\u04f9\u04fa\u0006\u008c"+ + "\f\u0000\u04fa\u0129\u0001\u0000\u0000\u0000\u04fb\u04fc\u0003l.\u0000"+ + "\u04fc\u04fd\u0001\u0000\u0000\u0000\u04fd\u04fe\u0006\u008d\u0014\u0000"+ + "\u04fe\u012b\u0001\u0000\u0000\u0000\u04ff\u0500\u0003p0\u0000\u0500\u0501"+ + "\u0001\u0000\u0000\u0000\u0501\u0502\u0006\u008e\u0013\u0000\u0502\u012d"+ + "\u0001\u0000\u0000\u0000\u0503\u0504\u0003t2\u0000\u0504\u0505\u0001\u0000"+ + "\u0000\u0000\u0505\u0506\u0006\u008f\u0017\u0000\u0506\u012f\u0001\u0000"+ + "\u0000\u0000\u0507\u0508\u0003\u011a\u0085\u0000\u0508\u0509\u0001\u0000"+ + "\u0000\u0000\u0509\u050a\u0006\u0090\u001e\u0000\u050a\u0131\u0001\u0000"+ + "\u0000\u0000\u050b\u050c\u0003\u00f6s\u0000\u050c\u050d\u0001\u0000\u0000"+ + "\u0000\u050d\u050e\u0006\u0091\u001a\u0000\u050e\u0133\u0001\u0000\u0000"+ + "\u0000\u050f\u0510\u0003\u00baU\u0000\u0510\u0511\u0001\u0000\u0000\u0000"+ + "\u0511\u0512\u0006\u0092\u001f\u0000\u0512\u0135\u0001\u0000\u0000\u0000"+ + "\u0513\u0514\u0004\u0093\r\u0000\u0514\u0515\u0003\u008c>\u0000\u0515"+ + "\u0516\u0001\u0000\u0000\u0000\u0516\u0517\u0006\u0093\u0018\u0000\u0517"+ + "\u0137\u0001\u0000\u0000\u0000\u0518\u0519\u0004\u0094\u000e\u0000\u0519"+ + "\u051a\u0003\u00b0P\u0000\u051a\u051b\u0001\u0000\u0000\u0000\u051b\u051c"+ + "\u0006\u0094\u0019\u0000\u051c\u0139\u0001\u0000\u0000\u0000\u051d\u051e"+ + "\u0003B\u0019\u0000\u051e\u051f\u0001\u0000\u0000\u0000\u051f\u0520\u0006"+ + "\u0095\u000b\u0000\u0520\u013b\u0001\u0000\u0000\u0000\u0521\u0522\u0003"+ + "D\u001a\u0000\u0522\u0523\u0001\u0000\u0000\u0000\u0523\u0524\u0006\u0096"+ + "\u000b\u0000\u0524\u013d\u0001\u0000\u0000\u0000\u0525\u0526\u0003F\u001b"+ + "\u0000\u0526\u0527\u0001\u0000\u0000\u0000\u0527\u0528\u0006\u0097\u000b"+ + "\u0000\u0528\u013f\u0001\u0000\u0000\u0000\u0529\u052a\u0003J\u001d\u0000"+ + "\u052a\u052b\u0001\u0000\u0000\u0000\u052b\u052c\u0006\u0098\u0011\u0000"+ + "\u052c\u052d\u0006\u0098\f\u0000\u052d\u0141\u0001\u0000\u0000\u0000\u052e"+ + "\u052f\u0003t2\u0000\u052f\u0530\u0001\u0000\u0000\u0000\u0530\u0531\u0006"+ + "\u0099\u0017\u0000\u0531\u0143\u0001\u0000\u0000\u0000\u0532\u0533\u0004"+ + "\u009a\u000f\u0000\u0533\u0534\u0003\u008c>\u0000\u0534\u0535\u0001\u0000"+ + "\u0000\u0000\u0535\u0536\u0006\u009a\u0018\u0000\u0536\u0145\u0001\u0000"+ + "\u0000\u0000\u0537\u0538\u0004\u009b\u0010\u0000\u0538\u0539\u0003\u00b0"+ + "P\u0000\u0539\u053a\u0001\u0000\u0000\u0000\u053a\u053b\u0006\u009b\u0019"+ + "\u0000\u053b\u0147\u0001\u0000\u0000\u0000\u053c\u053d\u0003\u00baU\u0000"+ + "\u053d\u053e\u0001\u0000\u0000\u0000\u053e\u053f\u0006\u009c\u001f\u0000"+ + "\u053f\u0149\u0001\u0000\u0000\u0000\u0540\u0541\u0003\u00b6S\u0000\u0541"+ + "\u0542\u0001\u0000\u0000\u0000\u0542\u0543\u0006\u009d \u0000\u0543\u014b"+ + "\u0001\u0000\u0000\u0000\u0544\u0545\u0003B\u0019\u0000\u0545\u0546\u0001"+ + "\u0000\u0000\u0000\u0546\u0547\u0006\u009e\u000b\u0000\u0547\u014d\u0001"+ + "\u0000\u0000\u0000\u0548\u0549\u0003D\u001a\u0000\u0549\u054a\u0001\u0000"+ + "\u0000\u0000\u054a\u054b\u0006\u009f\u000b\u0000\u054b\u014f\u0001\u0000"+ + "\u0000\u0000\u054c\u054d\u0003F\u001b\u0000\u054d\u054e\u0001\u0000\u0000"+ + "\u0000\u054e\u054f\u0006\u00a0\u000b\u0000\u054f\u0151\u0001\u0000\u0000"+ + "\u0000\u0550\u0551\u0003J\u001d\u0000\u0551\u0552\u0001\u0000\u0000\u0000"+ + "\u0552\u0553\u0006\u00a1\u0011\u0000\u0553\u0554\u0006\u00a1\f\u0000\u0554"+ + "\u0153\u0001\u0000\u0000\u0000\u0555\u0556\u0007\u0001\u0000\u0000\u0556"+ + "\u0557\u0007\t\u0000\u0000\u0557\u0558\u0007\u000f\u0000\u0000\u0558\u0559"+ + "\u0007\u0007\u0000\u0000\u0559\u0155\u0001\u0000\u0000\u0000\u055a\u055b"+ + "\u0003B\u0019\u0000\u055b\u055c\u0001\u0000\u0000\u0000\u055c\u055d\u0006"+ + "\u00a3\u000b\u0000\u055d\u0157\u0001\u0000\u0000\u0000\u055e\u055f\u0003"+ + "D\u001a\u0000\u055f\u0560\u0001\u0000\u0000\u0000\u0560\u0561\u0006\u00a4"+ + "\u000b\u0000\u0561\u0159\u0001\u0000\u0000\u0000\u0562\u0563\u0003F\u001b"+ + "\u0000\u0563\u0564\u0001\u0000\u0000\u0000\u0564\u0565\u0006\u00a5\u000b"+ + "\u0000\u0565\u015b\u0001\u0000\u0000\u0000\u0566\u0567\u0003\u00b4R\u0000"+ + "\u0567\u0568\u0001\u0000\u0000\u0000\u0568\u0569\u0006\u00a6\u0012\u0000"+ + "\u0569\u056a\u0006\u00a6\f\u0000\u056a\u015d\u0001\u0000\u0000\u0000\u056b"+ + "\u056c\u0003H\u001c\u0000\u056c\u056d\u0001\u0000\u0000\u0000\u056d\u056e"+ + "\u0006\u00a7\r\u0000\u056e\u015f\u0001\u0000\u0000\u0000\u056f\u0575\u0003"+ + "V#\u0000\u0570\u0575\u0003L\u001e\u0000\u0571\u0575\u0003t2\u0000\u0572"+ + "\u0575\u0003N\u001f\u0000\u0573\u0575\u0003\\&\u0000\u0574\u056f\u0001"+ + "\u0000\u0000\u0000\u0574\u0570\u0001\u0000\u0000\u0000\u0574\u0571\u0001"+ + "\u0000\u0000\u0000\u0574\u0572\u0001\u0000\u0000\u0000\u0574\u0573\u0001"+ + "\u0000\u0000\u0000\u0575\u0576\u0001\u0000\u0000\u0000\u0576\u0574\u0001"+ + "\u0000\u0000\u0000\u0576\u0577\u0001\u0000\u0000\u0000\u0577\u0161\u0001"+ + "\u0000\u0000\u0000\u0578\u0579\u0003B\u0019\u0000\u0579\u057a\u0001\u0000"+ + "\u0000\u0000\u057a\u057b\u0006\u00a9\u000b\u0000\u057b\u0163\u0001\u0000"+ + "\u0000\u0000\u057c\u057d\u0003D\u001a\u0000\u057d\u057e\u0001\u0000\u0000"+ + "\u0000\u057e\u057f\u0006\u00aa\u000b\u0000\u057f\u0165\u0001\u0000\u0000"+ + "\u0000\u0580\u0581\u0003F\u001b\u0000\u0581\u0582\u0001\u0000\u0000\u0000"+ + "\u0582\u0583\u0006\u00ab\u000b\u0000\u0583\u0167\u0001\u0000\u0000\u0000"+ + "\u0584\u0585\u0003J\u001d\u0000\u0585\u0586\u0001\u0000\u0000\u0000\u0586"+ + "\u0587\u0006\u00ac\u0011\u0000\u0587\u0588\u0006\u00ac\f\u0000\u0588\u0169"+ + "\u0001\u0000\u0000\u0000\u0589\u058a\u0003H\u001c\u0000\u058a\u058b\u0001"+ + "\u0000\u0000\u0000\u058b\u058c\u0006\u00ad\r\u0000\u058c\u016b\u0001\u0000"+ + "\u0000\u0000\u058d\u058e\u0003p0\u0000\u058e\u058f\u0001\u0000\u0000\u0000"+ + "\u058f\u0590\u0006\u00ae\u0013\u0000\u0590\u016d\u0001\u0000\u0000\u0000"+ + "\u0591\u0592\u0003t2\u0000\u0592\u0593\u0001\u0000\u0000\u0000\u0593\u0594"+ + "\u0006\u00af\u0017\u0000\u0594\u016f\u0001\u0000\u0000\u0000\u0595\u0596"+ + "\u0003\u0118\u0084\u0000\u0596\u0597\u0001\u0000\u0000\u0000\u0597\u0598"+ + "\u0006\u00b0!\u0000\u0598\u0599\u0006\u00b0\"\u0000\u0599\u0171\u0001"+ + "\u0000\u0000\u0000\u059a\u059b\u0003\u00dcf\u0000\u059b\u059c\u0001\u0000"+ + "\u0000\u0000\u059c\u059d\u0006\u00b1\u0015\u0000\u059d\u0173\u0001\u0000"+ + "\u0000\u0000\u059e\u059f\u0003`(\u0000\u059f\u05a0\u0001\u0000\u0000\u0000"+ + "\u05a0\u05a1\u0006\u00b2\u0016\u0000\u05a1\u0175\u0001\u0000\u0000\u0000"+ + "\u05a2\u05a3\u0003B\u0019\u0000\u05a3\u05a4\u0001\u0000\u0000\u0000\u05a4"+ + "\u05a5\u0006\u00b3\u000b\u0000\u05a5\u0177\u0001\u0000\u0000\u0000\u05a6"+ + "\u05a7\u0003D\u001a\u0000\u05a7\u05a8\u0001\u0000\u0000\u0000\u05a8\u05a9"+ + "\u0006\u00b4\u000b\u0000\u05a9\u0179\u0001\u0000\u0000\u0000\u05aa\u05ab"+ + "\u0003F\u001b\u0000\u05ab\u05ac\u0001\u0000\u0000\u0000\u05ac\u05ad\u0006"+ + "\u00b5\u000b\u0000\u05ad\u017b\u0001\u0000\u0000\u0000\u05ae\u05af\u0003"+ + "J\u001d\u0000\u05af\u05b0\u0001\u0000\u0000\u0000\u05b0\u05b1\u0006\u00b6"+ + "\u0011\u0000\u05b1\u05b2\u0006\u00b6\f\u0000\u05b2\u05b3\u0006\u00b6\f"+ + "\u0000\u05b3\u017d\u0001\u0000\u0000\u0000\u05b4\u05b5\u0003p0\u0000\u05b5"+ + "\u05b6\u0001\u0000\u0000\u0000\u05b6\u05b7\u0006\u00b7\u0013\u0000\u05b7"+ + "\u017f\u0001\u0000\u0000\u0000\u05b8\u05b9\u0003t2\u0000\u05b9\u05ba\u0001"+ + "\u0000\u0000\u0000\u05ba\u05bb\u0006\u00b8\u0017\u0000\u05bb\u0181\u0001"+ + "\u0000\u0000\u0000\u05bc\u05bd\u0003\u00f6s\u0000\u05bd\u05be\u0001\u0000"+ + "\u0000\u0000\u05be\u05bf\u0006\u00b9\u001a\u0000\u05bf\u0183\u0001\u0000"+ + "\u0000\u0000\u05c0\u05c1\u0003B\u0019\u0000\u05c1\u05c2\u0001\u0000\u0000"+ + "\u0000\u05c2\u05c3\u0006\u00ba\u000b\u0000\u05c3\u0185\u0001\u0000\u0000"+ + "\u0000\u05c4\u05c5\u0003D\u001a\u0000\u05c5\u05c6\u0001\u0000\u0000\u0000"+ + "\u05c6\u05c7\u0006\u00bb\u000b\u0000\u05c7\u0187\u0001\u0000\u0000\u0000"+ + "\u05c8\u05c9\u0003F\u001b\u0000\u05c9\u05ca\u0001\u0000\u0000\u0000\u05ca"+ + "\u05cb\u0006\u00bc\u000b\u0000\u05cb\u0189\u0001\u0000\u0000\u0000\u05cc"+ + "\u05cd\u0003J\u001d\u0000\u05cd\u05ce\u0001\u0000\u0000\u0000\u05ce\u05cf"+ + "\u0006\u00bd\u0011\u0000\u05cf\u05d0\u0006\u00bd\f\u0000\u05d0\u018b\u0001"+ + "\u0000\u0000\u0000\u05d1\u05d2\u00036\u0013\u0000\u05d2\u05d3\u0001\u0000"+ + "\u0000\u0000\u05d3\u05d4\u0006\u00be#\u0000\u05d4\u018d\u0001\u0000\u0000"+ + "\u0000\u05d5\u05d6\u0003\u010a}\u0000\u05d6\u05d7\u0001\u0000\u0000\u0000"+ + "\u05d7\u05d8\u0006\u00bf$\u0000\u05d8\u018f\u0001\u0000\u0000\u0000\u05d9"+ + "\u05da\u0003\u0118\u0084\u0000\u05da\u05db\u0001\u0000\u0000\u0000\u05db"+ + "\u05dc\u0006\u00c0!\u0000\u05dc\u05dd\u0006\u00c0\f\u0000\u05dd\u05de"+ + "\u0006\u00c0\u0000\u0000\u05de\u0191\u0001\u0000\u0000\u0000\u05df\u05e0"+ + "\u0007\u0014\u0000\u0000\u05e0\u05e1\u0007\u0002\u0000\u0000\u05e1\u05e2"+ + "\u0007\u0001\u0000\u0000\u05e2\u05e3\u0007\t\u0000\u0000\u05e3\u05e4\u0007"+ + "\u0011\u0000\u0000\u05e4\u05e5\u0001\u0000\u0000\u0000\u05e5\u05e6\u0006"+ + "\u00c1\f\u0000\u05e6\u05e7\u0006\u00c1\u0000\u0000\u05e7\u0193\u0001\u0000"+ + "\u0000\u0000\u05e8\u05e9\u0003\u00b6S\u0000\u05e9\u05ea\u0001\u0000\u0000"+ + "\u0000\u05ea\u05eb\u0006\u00c2 \u0000\u05eb\u0195\u0001\u0000\u0000\u0000"+ + "\u05ec\u05ed\u0003\u00baU\u0000\u05ed\u05ee\u0001\u0000\u0000\u0000\u05ee"+ + "\u05ef\u0006\u00c3\u001f\u0000\u05ef\u0197\u0001\u0000\u0000\u0000\u05f0"+ + "\u05f1\u0003B\u0019\u0000\u05f1\u05f2\u0001\u0000\u0000\u0000\u05f2\u05f3"+ + "\u0006\u00c4\u000b\u0000\u05f3\u0199\u0001\u0000\u0000\u0000\u05f4\u05f5"+ + "\u0003D\u001a\u0000\u05f5\u05f6\u0001\u0000\u0000\u0000\u05f6\u05f7\u0006"+ + "\u00c5\u000b\u0000\u05f7\u019b\u0001\u0000\u0000\u0000\u05f8\u05f9\u0003"+ + "F\u001b\u0000\u05f9\u05fa\u0001\u0000\u0000\u0000\u05fa\u05fb\u0006\u00c6"+ + "\u000b\u0000\u05fb\u019d\u0001\u0000\u0000\u0000\u05fc\u05fd\u0003J\u001d"+ + "\u0000\u05fd\u05fe\u0001\u0000\u0000\u0000\u05fe\u05ff\u0006\u00c7\u0011"+ + "\u0000\u05ff\u0600\u0006\u00c7\f\u0000\u0600\u019f\u0001\u0000\u0000\u0000"+ + "\u0601\u0602\u0003\u00dcf\u0000\u0602\u0603\u0001\u0000\u0000\u0000\u0603"+ + "\u0604\u0006\u00c8\u0015\u0000\u0604\u0605\u0006\u00c8\f\u0000\u0605\u0606"+ + "\u0006\u00c8%\u0000\u0606\u01a1\u0001\u0000\u0000\u0000\u0607\u0608\u0003"+ + "`(\u0000\u0608\u0609\u0001\u0000\u0000\u0000\u0609\u060a\u0006\u00c9\u0016"+ + "\u0000\u060a\u060b\u0006\u00c9\f\u0000\u060b\u060c\u0006\u00c9%\u0000"+ + "\u060c\u01a3\u0001\u0000\u0000\u0000\u060d\u060e\u0003B\u0019\u0000\u060e"+ + "\u060f\u0001\u0000\u0000\u0000\u060f\u0610\u0006\u00ca\u000b\u0000\u0610"+ + "\u01a5\u0001\u0000\u0000\u0000\u0611\u0612\u0003D\u001a\u0000\u0612\u0613"+ + "\u0001\u0000\u0000\u0000\u0613\u0614\u0006\u00cb\u000b\u0000\u0614\u01a7"+ + "\u0001\u0000\u0000\u0000\u0615\u0616\u0003F\u001b\u0000\u0616\u0617\u0001"+ + "\u0000\u0000\u0000\u0617\u0618\u0006\u00cc\u000b\u0000\u0618\u01a9\u0001"+ + "\u0000\u0000\u0000\u0619\u061a\u0003H\u001c\u0000\u061a\u061b\u0001\u0000"+ + "\u0000\u0000\u061b\u061c\u0006\u00cd\r\u0000\u061c\u061d\u0006\u00cd\f"+ + "\u0000\u061d\u061e\u0006\u00cd\t\u0000\u061e\u01ab\u0001\u0000\u0000\u0000"+ + "\u061f\u0620\u0003p0\u0000\u0620\u0621\u0001\u0000\u0000\u0000\u0621\u0622"+ + "\u0006\u00ce\u0013\u0000\u0622\u0623\u0006\u00ce\f\u0000\u0623\u0624\u0006"+ + "\u00ce\t\u0000\u0624\u01ad\u0001\u0000\u0000\u0000\u0625\u0626\u0003B"+ + "\u0019\u0000\u0626\u0627\u0001\u0000\u0000\u0000\u0627\u0628\u0006\u00cf"+ + "\u000b\u0000\u0628\u01af\u0001\u0000\u0000\u0000\u0629\u062a\u0003D\u001a"+ + "\u0000\u062a\u062b\u0001\u0000\u0000\u0000\u062b\u062c\u0006\u00d0\u000b"+ + "\u0000\u062c\u01b1\u0001\u0000\u0000\u0000\u062d\u062e\u0003F\u001b\u0000"+ + "\u062e\u062f\u0001\u0000\u0000\u0000\u062f\u0630\u0006\u00d1\u000b\u0000"+ + "\u0630\u01b3\u0001\u0000\u0000\u0000\u0631\u0632\u0003\u00baU\u0000\u0632"+ + "\u0633\u0001\u0000\u0000\u0000\u0633\u0634\u0006\u00d2\f\u0000\u0634\u0635"+ + "\u0006\u00d2\u0000\u0000\u0635\u0636\u0006\u00d2\u001f\u0000\u0636\u01b5"+ + "\u0001\u0000\u0000\u0000\u0637\u0638\u0003\u00b6S\u0000\u0638\u0639\u0001"+ + "\u0000\u0000\u0000\u0639\u063a\u0006\u00d3\f\u0000\u063a\u063b\u0006\u00d3"+ + "\u0000\u0000\u063b\u063c\u0006\u00d3 \u0000\u063c\u01b7\u0001\u0000\u0000"+ + "\u0000\u063d\u063e\u0003f+\u0000\u063e\u063f\u0001\u0000\u0000\u0000\u063f"+ + "\u0640\u0006\u00d4\f\u0000\u0640\u0641\u0006\u00d4\u0000\u0000\u0641\u0642"+ + "\u0006\u00d4&\u0000\u0642\u01b9\u0001\u0000\u0000\u0000\u0643\u0644\u0003"+ + "J\u001d\u0000\u0644\u0645\u0001\u0000\u0000\u0000\u0645\u0646\u0006\u00d5"+ + "\u0011\u0000\u0646\u0647\u0006\u00d5\f\u0000\u0647\u01bb\u0001\u0000\u0000"+ + "\u0000B\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f"+ + "\r\u000e\u000f\u0291\u029b\u029f\u02a2\u02ab\u02ad\u02b8\u02cd\u02d2\u02db"+ + "\u02e2\u02e7\u02e9\u02f4\u02fc\u02ff\u0301\u0306\u030b\u0311\u0318\u031d"+ + "\u0323\u0326\u032e\u0332\u03b5\u03ba\u03c1\u03c3\u03d3\u03d8\u03dd\u03df"+ + "\u03e5\u0432\u0437\u0468\u046c\u0471\u0476\u047b\u047d\u0481\u0483\u04da"+ + "\u04de\u04e3\u0574\u0576\'\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006"+ + "\u0000\u0005\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005\u0000"+ + "\u0005\t\u0000\u0005\u000b\u0000\u0005\u000e\u0000\u0005\r\u0000\u0000"+ + "\u0001\u0000\u0004\u0000\u0000\u0007\u001d\u0000\u0007\u0010\u0000\u0007"+ + "F\u0000\u0005\u0000\u0000\u0007\u001e\u0000\u0007G\u0000\u0007\'\u0000"+ + "\u0007%\u0000\u0007Q\u0000\u0007\u001f\u0000\u0007)\u0000\u00075\u0000"+ + "\u0007E\u0000\u0007U\u0000\u0005\n\u0000\u0005\u0007\u0000\u0007_\u0000"+ + "\u0007^\u0000\u0007I\u0000\u0007H\u0000\u0007]\u0000\u0005\f\u0000\u0007"+ + "\u0014\u0000\u0007Y\u0000\u0005\u000f\u0000\u0007\"\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index a2b339f378f12..71930451ad55c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -23,6 +23,11 @@ null null null null +null +null +null +null +null ':' '|' null @@ -113,6 +118,10 @@ null null null null +'USING' +null +null +null null null null @@ -141,6 +150,11 @@ WHERE DEV_INLINESTATS DEV_LOOKUP DEV_METRICS +DEV_JOIN +DEV_JOIN_FULL +DEV_JOIN_LEFT +DEV_JOIN_RIGHT +DEV_JOIN_LOOKUP UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT @@ -235,6 +249,10 @@ LOOKUP_WS LOOKUP_FIELD_LINE_COMMENT LOOKUP_FIELD_MULTILINE_COMMENT LOOKUP_FIELD_WS +USING +JOIN_LINE_COMMENT +JOIN_MULTILINE_COMMENT +JOIN_WS METRICS_LINE_COMMENT METRICS_MULTILINE_COMMENT METRICS_WS @@ -305,7 +323,11 @@ enrichCommand enrichWithClause lookupCommand inlinestatsCommand +joinCommand +joinTarget +joinCondition +joinPredicate atn: -[4, 1, 119, 603, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 134, 8, 1, 10, 1, 12, 1, 137, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 145, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 163, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 175, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 182, 8, 5, 10, 5, 12, 5, 185, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 192, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 198, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 206, 8, 5, 10, 5, 12, 5, 209, 9, 5, 1, 6, 1, 6, 3, 6, 213, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 220, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 225, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 236, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 242, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 250, 8, 9, 10, 9, 12, 9, 253, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 263, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 268, 8, 10, 10, 10, 12, 10, 271, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 279, 8, 11, 10, 11, 12, 11, 282, 9, 11, 3, 11, 284, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 298, 8, 15, 10, 15, 12, 15, 301, 9, 15, 1, 16, 1, 16, 1, 16, 3, 16, 306, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 314, 8, 17, 10, 17, 12, 17, 317, 9, 17, 1, 17, 3, 17, 320, 8, 17, 1, 18, 1, 18, 1, 18, 3, 18, 325, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 335, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 341, 8, 22, 10, 22, 12, 22, 344, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 354, 8, 24, 10, 24, 12, 24, 357, 9, 24, 1, 24, 3, 24, 360, 8, 24, 1, 24, 1, 24, 3, 24, 364, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 371, 8, 26, 1, 26, 1, 26, 3, 26, 375, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 380, 8, 27, 10, 27, 12, 27, 383, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 388, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 393, 8, 29, 10, 29, 12, 29, 396, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 401, 8, 30, 10, 30, 12, 30, 404, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 409, 8, 31, 10, 31, 12, 31, 412, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 419, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 434, 8, 34, 10, 34, 12, 34, 437, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 445, 8, 34, 10, 34, 12, 34, 448, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 456, 8, 34, 10, 34, 12, 34, 459, 9, 34, 1, 34, 1, 34, 3, 34, 463, 8, 34, 1, 35, 1, 35, 3, 35, 467, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 472, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 481, 8, 38, 10, 38, 12, 38, 484, 9, 38, 1, 39, 1, 39, 3, 39, 488, 8, 39, 1, 39, 1, 39, 3, 39, 492, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 504, 8, 42, 10, 42, 12, 42, 507, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 517, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 529, 8, 47, 10, 47, 12, 47, 532, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 542, 8, 50, 1, 51, 3, 51, 545, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 550, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 572, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 578, 8, 58, 10, 58, 12, 58, 581, 9, 58, 3, 58, 583, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 588, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 601, 8, 61, 1, 61, 0, 4, 2, 10, 18, 20, 62, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 0, 8, 1, 0, 59, 60, 1, 0, 61, 63, 2, 0, 26, 26, 76, 76, 1, 0, 67, 68, 2, 0, 31, 31, 35, 35, 2, 0, 38, 38, 41, 41, 2, 0, 37, 37, 51, 51, 2, 0, 52, 52, 54, 58, 628, 0, 124, 1, 0, 0, 0, 2, 127, 1, 0, 0, 0, 4, 144, 1, 0, 0, 0, 6, 162, 1, 0, 0, 0, 8, 164, 1, 0, 0, 0, 10, 197, 1, 0, 0, 0, 12, 224, 1, 0, 0, 0, 14, 226, 1, 0, 0, 0, 16, 235, 1, 0, 0, 0, 18, 241, 1, 0, 0, 0, 20, 262, 1, 0, 0, 0, 22, 272, 1, 0, 0, 0, 24, 287, 1, 0, 0, 0, 26, 289, 1, 0, 0, 0, 28, 291, 1, 0, 0, 0, 30, 294, 1, 0, 0, 0, 32, 305, 1, 0, 0, 0, 34, 309, 1, 0, 0, 0, 36, 324, 1, 0, 0, 0, 38, 328, 1, 0, 0, 0, 40, 330, 1, 0, 0, 0, 42, 334, 1, 0, 0, 0, 44, 336, 1, 0, 0, 0, 46, 345, 1, 0, 0, 0, 48, 349, 1, 0, 0, 0, 50, 365, 1, 0, 0, 0, 52, 368, 1, 0, 0, 0, 54, 376, 1, 0, 0, 0, 56, 384, 1, 0, 0, 0, 58, 389, 1, 0, 0, 0, 60, 397, 1, 0, 0, 0, 62, 405, 1, 0, 0, 0, 64, 413, 1, 0, 0, 0, 66, 418, 1, 0, 0, 0, 68, 462, 1, 0, 0, 0, 70, 466, 1, 0, 0, 0, 72, 471, 1, 0, 0, 0, 74, 473, 1, 0, 0, 0, 76, 476, 1, 0, 0, 0, 78, 485, 1, 0, 0, 0, 80, 493, 1, 0, 0, 0, 82, 496, 1, 0, 0, 0, 84, 499, 1, 0, 0, 0, 86, 508, 1, 0, 0, 0, 88, 512, 1, 0, 0, 0, 90, 518, 1, 0, 0, 0, 92, 522, 1, 0, 0, 0, 94, 525, 1, 0, 0, 0, 96, 533, 1, 0, 0, 0, 98, 537, 1, 0, 0, 0, 100, 541, 1, 0, 0, 0, 102, 544, 1, 0, 0, 0, 104, 549, 1, 0, 0, 0, 106, 553, 1, 0, 0, 0, 108, 555, 1, 0, 0, 0, 110, 557, 1, 0, 0, 0, 112, 560, 1, 0, 0, 0, 114, 564, 1, 0, 0, 0, 116, 567, 1, 0, 0, 0, 118, 587, 1, 0, 0, 0, 120, 591, 1, 0, 0, 0, 122, 596, 1, 0, 0, 0, 124, 125, 3, 2, 1, 0, 125, 126, 5, 0, 0, 1, 126, 1, 1, 0, 0, 0, 127, 128, 6, 1, -1, 0, 128, 129, 3, 4, 2, 0, 129, 135, 1, 0, 0, 0, 130, 131, 10, 1, 0, 0, 131, 132, 5, 25, 0, 0, 132, 134, 3, 6, 3, 0, 133, 130, 1, 0, 0, 0, 134, 137, 1, 0, 0, 0, 135, 133, 1, 0, 0, 0, 135, 136, 1, 0, 0, 0, 136, 3, 1, 0, 0, 0, 137, 135, 1, 0, 0, 0, 138, 145, 3, 110, 55, 0, 139, 145, 3, 34, 17, 0, 140, 145, 3, 28, 14, 0, 141, 145, 3, 114, 57, 0, 142, 143, 4, 2, 1, 0, 143, 145, 3, 48, 24, 0, 144, 138, 1, 0, 0, 0, 144, 139, 1, 0, 0, 0, 144, 140, 1, 0, 0, 0, 144, 141, 1, 0, 0, 0, 144, 142, 1, 0, 0, 0, 145, 5, 1, 0, 0, 0, 146, 163, 3, 50, 25, 0, 147, 163, 3, 8, 4, 0, 148, 163, 3, 80, 40, 0, 149, 163, 3, 74, 37, 0, 150, 163, 3, 52, 26, 0, 151, 163, 3, 76, 38, 0, 152, 163, 3, 82, 41, 0, 153, 163, 3, 84, 42, 0, 154, 163, 3, 88, 44, 0, 155, 163, 3, 90, 45, 0, 156, 163, 3, 116, 58, 0, 157, 163, 3, 92, 46, 0, 158, 159, 4, 3, 2, 0, 159, 163, 3, 122, 61, 0, 160, 161, 4, 3, 3, 0, 161, 163, 3, 120, 60, 0, 162, 146, 1, 0, 0, 0, 162, 147, 1, 0, 0, 0, 162, 148, 1, 0, 0, 0, 162, 149, 1, 0, 0, 0, 162, 150, 1, 0, 0, 0, 162, 151, 1, 0, 0, 0, 162, 152, 1, 0, 0, 0, 162, 153, 1, 0, 0, 0, 162, 154, 1, 0, 0, 0, 162, 155, 1, 0, 0, 0, 162, 156, 1, 0, 0, 0, 162, 157, 1, 0, 0, 0, 162, 158, 1, 0, 0, 0, 162, 160, 1, 0, 0, 0, 163, 7, 1, 0, 0, 0, 164, 165, 5, 16, 0, 0, 165, 166, 3, 10, 5, 0, 166, 9, 1, 0, 0, 0, 167, 168, 6, 5, -1, 0, 168, 169, 5, 44, 0, 0, 169, 198, 3, 10, 5, 8, 170, 198, 3, 16, 8, 0, 171, 198, 3, 12, 6, 0, 172, 174, 3, 16, 8, 0, 173, 175, 5, 44, 0, 0, 174, 173, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 177, 5, 39, 0, 0, 177, 178, 5, 43, 0, 0, 178, 183, 3, 16, 8, 0, 179, 180, 5, 34, 0, 0, 180, 182, 3, 16, 8, 0, 181, 179, 1, 0, 0, 0, 182, 185, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 186, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 186, 187, 5, 50, 0, 0, 187, 198, 1, 0, 0, 0, 188, 189, 3, 16, 8, 0, 189, 191, 5, 40, 0, 0, 190, 192, 5, 44, 0, 0, 191, 190, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 1, 0, 0, 0, 193, 194, 5, 45, 0, 0, 194, 198, 1, 0, 0, 0, 195, 196, 4, 5, 4, 0, 196, 198, 3, 14, 7, 0, 197, 167, 1, 0, 0, 0, 197, 170, 1, 0, 0, 0, 197, 171, 1, 0, 0, 0, 197, 172, 1, 0, 0, 0, 197, 188, 1, 0, 0, 0, 197, 195, 1, 0, 0, 0, 198, 207, 1, 0, 0, 0, 199, 200, 10, 5, 0, 0, 200, 201, 5, 30, 0, 0, 201, 206, 3, 10, 5, 6, 202, 203, 10, 4, 0, 0, 203, 204, 5, 47, 0, 0, 204, 206, 3, 10, 5, 5, 205, 199, 1, 0, 0, 0, 205, 202, 1, 0, 0, 0, 206, 209, 1, 0, 0, 0, 207, 205, 1, 0, 0, 0, 207, 208, 1, 0, 0, 0, 208, 11, 1, 0, 0, 0, 209, 207, 1, 0, 0, 0, 210, 212, 3, 16, 8, 0, 211, 213, 5, 44, 0, 0, 212, 211, 1, 0, 0, 0, 212, 213, 1, 0, 0, 0, 213, 214, 1, 0, 0, 0, 214, 215, 5, 42, 0, 0, 215, 216, 3, 106, 53, 0, 216, 225, 1, 0, 0, 0, 217, 219, 3, 16, 8, 0, 218, 220, 5, 44, 0, 0, 219, 218, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 221, 1, 0, 0, 0, 221, 222, 5, 49, 0, 0, 222, 223, 3, 106, 53, 0, 223, 225, 1, 0, 0, 0, 224, 210, 1, 0, 0, 0, 224, 217, 1, 0, 0, 0, 225, 13, 1, 0, 0, 0, 226, 227, 3, 58, 29, 0, 227, 228, 5, 24, 0, 0, 228, 229, 3, 68, 34, 0, 229, 15, 1, 0, 0, 0, 230, 236, 3, 18, 9, 0, 231, 232, 3, 18, 9, 0, 232, 233, 3, 108, 54, 0, 233, 234, 3, 18, 9, 0, 234, 236, 1, 0, 0, 0, 235, 230, 1, 0, 0, 0, 235, 231, 1, 0, 0, 0, 236, 17, 1, 0, 0, 0, 237, 238, 6, 9, -1, 0, 238, 242, 3, 20, 10, 0, 239, 240, 7, 0, 0, 0, 240, 242, 3, 18, 9, 3, 241, 237, 1, 0, 0, 0, 241, 239, 1, 0, 0, 0, 242, 251, 1, 0, 0, 0, 243, 244, 10, 2, 0, 0, 244, 245, 7, 1, 0, 0, 245, 250, 3, 18, 9, 3, 246, 247, 10, 1, 0, 0, 247, 248, 7, 0, 0, 0, 248, 250, 3, 18, 9, 2, 249, 243, 1, 0, 0, 0, 249, 246, 1, 0, 0, 0, 250, 253, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 251, 252, 1, 0, 0, 0, 252, 19, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 254, 255, 6, 10, -1, 0, 255, 263, 3, 68, 34, 0, 256, 263, 3, 58, 29, 0, 257, 263, 3, 22, 11, 0, 258, 259, 5, 43, 0, 0, 259, 260, 3, 10, 5, 0, 260, 261, 5, 50, 0, 0, 261, 263, 1, 0, 0, 0, 262, 254, 1, 0, 0, 0, 262, 256, 1, 0, 0, 0, 262, 257, 1, 0, 0, 0, 262, 258, 1, 0, 0, 0, 263, 269, 1, 0, 0, 0, 264, 265, 10, 1, 0, 0, 265, 266, 5, 33, 0, 0, 266, 268, 3, 26, 13, 0, 267, 264, 1, 0, 0, 0, 268, 271, 1, 0, 0, 0, 269, 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 21, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 272, 273, 3, 24, 12, 0, 273, 283, 5, 43, 0, 0, 274, 284, 5, 61, 0, 0, 275, 280, 3, 10, 5, 0, 276, 277, 5, 34, 0, 0, 277, 279, 3, 10, 5, 0, 278, 276, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 274, 1, 0, 0, 0, 283, 275, 1, 0, 0, 0, 283, 284, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 286, 5, 50, 0, 0, 286, 23, 1, 0, 0, 0, 287, 288, 3, 72, 36, 0, 288, 25, 1, 0, 0, 0, 289, 290, 3, 64, 32, 0, 290, 27, 1, 0, 0, 0, 291, 292, 5, 12, 0, 0, 292, 293, 3, 30, 15, 0, 293, 29, 1, 0, 0, 0, 294, 299, 3, 32, 16, 0, 295, 296, 5, 34, 0, 0, 296, 298, 3, 32, 16, 0, 297, 295, 1, 0, 0, 0, 298, 301, 1, 0, 0, 0, 299, 297, 1, 0, 0, 0, 299, 300, 1, 0, 0, 0, 300, 31, 1, 0, 0, 0, 301, 299, 1, 0, 0, 0, 302, 303, 3, 58, 29, 0, 303, 304, 5, 32, 0, 0, 304, 306, 1, 0, 0, 0, 305, 302, 1, 0, 0, 0, 305, 306, 1, 0, 0, 0, 306, 307, 1, 0, 0, 0, 307, 308, 3, 10, 5, 0, 308, 33, 1, 0, 0, 0, 309, 310, 5, 6, 0, 0, 310, 315, 3, 36, 18, 0, 311, 312, 5, 34, 0, 0, 312, 314, 3, 36, 18, 0, 313, 311, 1, 0, 0, 0, 314, 317, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 318, 320, 3, 42, 21, 0, 319, 318, 1, 0, 0, 0, 319, 320, 1, 0, 0, 0, 320, 35, 1, 0, 0, 0, 321, 322, 3, 38, 19, 0, 322, 323, 5, 24, 0, 0, 323, 325, 1, 0, 0, 0, 324, 321, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 326, 1, 0, 0, 0, 326, 327, 3, 40, 20, 0, 327, 37, 1, 0, 0, 0, 328, 329, 5, 76, 0, 0, 329, 39, 1, 0, 0, 0, 330, 331, 7, 2, 0, 0, 331, 41, 1, 0, 0, 0, 332, 335, 3, 44, 22, 0, 333, 335, 3, 46, 23, 0, 334, 332, 1, 0, 0, 0, 334, 333, 1, 0, 0, 0, 335, 43, 1, 0, 0, 0, 336, 337, 5, 75, 0, 0, 337, 342, 5, 76, 0, 0, 338, 339, 5, 34, 0, 0, 339, 341, 5, 76, 0, 0, 340, 338, 1, 0, 0, 0, 341, 344, 1, 0, 0, 0, 342, 340, 1, 0, 0, 0, 342, 343, 1, 0, 0, 0, 343, 45, 1, 0, 0, 0, 344, 342, 1, 0, 0, 0, 345, 346, 5, 65, 0, 0, 346, 347, 3, 44, 22, 0, 347, 348, 5, 66, 0, 0, 348, 47, 1, 0, 0, 0, 349, 350, 5, 19, 0, 0, 350, 355, 3, 36, 18, 0, 351, 352, 5, 34, 0, 0, 352, 354, 3, 36, 18, 0, 353, 351, 1, 0, 0, 0, 354, 357, 1, 0, 0, 0, 355, 353, 1, 0, 0, 0, 355, 356, 1, 0, 0, 0, 356, 359, 1, 0, 0, 0, 357, 355, 1, 0, 0, 0, 358, 360, 3, 54, 27, 0, 359, 358, 1, 0, 0, 0, 359, 360, 1, 0, 0, 0, 360, 363, 1, 0, 0, 0, 361, 362, 5, 29, 0, 0, 362, 364, 3, 30, 15, 0, 363, 361, 1, 0, 0, 0, 363, 364, 1, 0, 0, 0, 364, 49, 1, 0, 0, 0, 365, 366, 5, 4, 0, 0, 366, 367, 3, 30, 15, 0, 367, 51, 1, 0, 0, 0, 368, 370, 5, 15, 0, 0, 369, 371, 3, 54, 27, 0, 370, 369, 1, 0, 0, 0, 370, 371, 1, 0, 0, 0, 371, 374, 1, 0, 0, 0, 372, 373, 5, 29, 0, 0, 373, 375, 3, 30, 15, 0, 374, 372, 1, 0, 0, 0, 374, 375, 1, 0, 0, 0, 375, 53, 1, 0, 0, 0, 376, 381, 3, 56, 28, 0, 377, 378, 5, 34, 0, 0, 378, 380, 3, 56, 28, 0, 379, 377, 1, 0, 0, 0, 380, 383, 1, 0, 0, 0, 381, 379, 1, 0, 0, 0, 381, 382, 1, 0, 0, 0, 382, 55, 1, 0, 0, 0, 383, 381, 1, 0, 0, 0, 384, 387, 3, 32, 16, 0, 385, 386, 5, 16, 0, 0, 386, 388, 3, 10, 5, 0, 387, 385, 1, 0, 0, 0, 387, 388, 1, 0, 0, 0, 388, 57, 1, 0, 0, 0, 389, 394, 3, 72, 36, 0, 390, 391, 5, 36, 0, 0, 391, 393, 3, 72, 36, 0, 392, 390, 1, 0, 0, 0, 393, 396, 1, 0, 0, 0, 394, 392, 1, 0, 0, 0, 394, 395, 1, 0, 0, 0, 395, 59, 1, 0, 0, 0, 396, 394, 1, 0, 0, 0, 397, 402, 3, 66, 33, 0, 398, 399, 5, 36, 0, 0, 399, 401, 3, 66, 33, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 61, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 410, 3, 60, 30, 0, 406, 407, 5, 34, 0, 0, 407, 409, 3, 60, 30, 0, 408, 406, 1, 0, 0, 0, 409, 412, 1, 0, 0, 0, 410, 408, 1, 0, 0, 0, 410, 411, 1, 0, 0, 0, 411, 63, 1, 0, 0, 0, 412, 410, 1, 0, 0, 0, 413, 414, 7, 3, 0, 0, 414, 65, 1, 0, 0, 0, 415, 419, 5, 80, 0, 0, 416, 417, 4, 33, 10, 0, 417, 419, 3, 70, 35, 0, 418, 415, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 419, 67, 1, 0, 0, 0, 420, 463, 5, 45, 0, 0, 421, 422, 3, 104, 52, 0, 422, 423, 5, 67, 0, 0, 423, 463, 1, 0, 0, 0, 424, 463, 3, 102, 51, 0, 425, 463, 3, 104, 52, 0, 426, 463, 3, 98, 49, 0, 427, 463, 3, 70, 35, 0, 428, 463, 3, 106, 53, 0, 429, 430, 5, 65, 0, 0, 430, 435, 3, 100, 50, 0, 431, 432, 5, 34, 0, 0, 432, 434, 3, 100, 50, 0, 433, 431, 1, 0, 0, 0, 434, 437, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 435, 436, 1, 0, 0, 0, 436, 438, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 438, 439, 5, 66, 0, 0, 439, 463, 1, 0, 0, 0, 440, 441, 5, 65, 0, 0, 441, 446, 3, 98, 49, 0, 442, 443, 5, 34, 0, 0, 443, 445, 3, 98, 49, 0, 444, 442, 1, 0, 0, 0, 445, 448, 1, 0, 0, 0, 446, 444, 1, 0, 0, 0, 446, 447, 1, 0, 0, 0, 447, 449, 1, 0, 0, 0, 448, 446, 1, 0, 0, 0, 449, 450, 5, 66, 0, 0, 450, 463, 1, 0, 0, 0, 451, 452, 5, 65, 0, 0, 452, 457, 3, 106, 53, 0, 453, 454, 5, 34, 0, 0, 454, 456, 3, 106, 53, 0, 455, 453, 1, 0, 0, 0, 456, 459, 1, 0, 0, 0, 457, 455, 1, 0, 0, 0, 457, 458, 1, 0, 0, 0, 458, 460, 1, 0, 0, 0, 459, 457, 1, 0, 0, 0, 460, 461, 5, 66, 0, 0, 461, 463, 1, 0, 0, 0, 462, 420, 1, 0, 0, 0, 462, 421, 1, 0, 0, 0, 462, 424, 1, 0, 0, 0, 462, 425, 1, 0, 0, 0, 462, 426, 1, 0, 0, 0, 462, 427, 1, 0, 0, 0, 462, 428, 1, 0, 0, 0, 462, 429, 1, 0, 0, 0, 462, 440, 1, 0, 0, 0, 462, 451, 1, 0, 0, 0, 463, 69, 1, 0, 0, 0, 464, 467, 5, 48, 0, 0, 465, 467, 5, 64, 0, 0, 466, 464, 1, 0, 0, 0, 466, 465, 1, 0, 0, 0, 467, 71, 1, 0, 0, 0, 468, 472, 3, 64, 32, 0, 469, 470, 4, 36, 11, 0, 470, 472, 3, 70, 35, 0, 471, 468, 1, 0, 0, 0, 471, 469, 1, 0, 0, 0, 472, 73, 1, 0, 0, 0, 473, 474, 5, 9, 0, 0, 474, 475, 5, 27, 0, 0, 475, 75, 1, 0, 0, 0, 476, 477, 5, 14, 0, 0, 477, 482, 3, 78, 39, 0, 478, 479, 5, 34, 0, 0, 479, 481, 3, 78, 39, 0, 480, 478, 1, 0, 0, 0, 481, 484, 1, 0, 0, 0, 482, 480, 1, 0, 0, 0, 482, 483, 1, 0, 0, 0, 483, 77, 1, 0, 0, 0, 484, 482, 1, 0, 0, 0, 485, 487, 3, 10, 5, 0, 486, 488, 7, 4, 0, 0, 487, 486, 1, 0, 0, 0, 487, 488, 1, 0, 0, 0, 488, 491, 1, 0, 0, 0, 489, 490, 5, 46, 0, 0, 490, 492, 7, 5, 0, 0, 491, 489, 1, 0, 0, 0, 491, 492, 1, 0, 0, 0, 492, 79, 1, 0, 0, 0, 493, 494, 5, 8, 0, 0, 494, 495, 3, 62, 31, 0, 495, 81, 1, 0, 0, 0, 496, 497, 5, 2, 0, 0, 497, 498, 3, 62, 31, 0, 498, 83, 1, 0, 0, 0, 499, 500, 5, 11, 0, 0, 500, 505, 3, 86, 43, 0, 501, 502, 5, 34, 0, 0, 502, 504, 3, 86, 43, 0, 503, 501, 1, 0, 0, 0, 504, 507, 1, 0, 0, 0, 505, 503, 1, 0, 0, 0, 505, 506, 1, 0, 0, 0, 506, 85, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 508, 509, 3, 60, 30, 0, 509, 510, 5, 84, 0, 0, 510, 511, 3, 60, 30, 0, 511, 87, 1, 0, 0, 0, 512, 513, 5, 1, 0, 0, 513, 514, 3, 20, 10, 0, 514, 516, 3, 106, 53, 0, 515, 517, 3, 94, 47, 0, 516, 515, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 89, 1, 0, 0, 0, 518, 519, 5, 7, 0, 0, 519, 520, 3, 20, 10, 0, 520, 521, 3, 106, 53, 0, 521, 91, 1, 0, 0, 0, 522, 523, 5, 10, 0, 0, 523, 524, 3, 58, 29, 0, 524, 93, 1, 0, 0, 0, 525, 530, 3, 96, 48, 0, 526, 527, 5, 34, 0, 0, 527, 529, 3, 96, 48, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 95, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 534, 3, 64, 32, 0, 534, 535, 5, 32, 0, 0, 535, 536, 3, 68, 34, 0, 536, 97, 1, 0, 0, 0, 537, 538, 7, 6, 0, 0, 538, 99, 1, 0, 0, 0, 539, 542, 3, 102, 51, 0, 540, 542, 3, 104, 52, 0, 541, 539, 1, 0, 0, 0, 541, 540, 1, 0, 0, 0, 542, 101, 1, 0, 0, 0, 543, 545, 7, 0, 0, 0, 544, 543, 1, 0, 0, 0, 544, 545, 1, 0, 0, 0, 545, 546, 1, 0, 0, 0, 546, 547, 5, 28, 0, 0, 547, 103, 1, 0, 0, 0, 548, 550, 7, 0, 0, 0, 549, 548, 1, 0, 0, 0, 549, 550, 1, 0, 0, 0, 550, 551, 1, 0, 0, 0, 551, 552, 5, 27, 0, 0, 552, 105, 1, 0, 0, 0, 553, 554, 5, 26, 0, 0, 554, 107, 1, 0, 0, 0, 555, 556, 7, 7, 0, 0, 556, 109, 1, 0, 0, 0, 557, 558, 5, 5, 0, 0, 558, 559, 3, 112, 56, 0, 559, 111, 1, 0, 0, 0, 560, 561, 5, 65, 0, 0, 561, 562, 3, 2, 1, 0, 562, 563, 5, 66, 0, 0, 563, 113, 1, 0, 0, 0, 564, 565, 5, 13, 0, 0, 565, 566, 5, 100, 0, 0, 566, 115, 1, 0, 0, 0, 567, 568, 5, 3, 0, 0, 568, 571, 5, 90, 0, 0, 569, 570, 5, 88, 0, 0, 570, 572, 3, 60, 30, 0, 571, 569, 1, 0, 0, 0, 571, 572, 1, 0, 0, 0, 572, 582, 1, 0, 0, 0, 573, 574, 5, 89, 0, 0, 574, 579, 3, 118, 59, 0, 575, 576, 5, 34, 0, 0, 576, 578, 3, 118, 59, 0, 577, 575, 1, 0, 0, 0, 578, 581, 1, 0, 0, 0, 579, 577, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 583, 1, 0, 0, 0, 581, 579, 1, 0, 0, 0, 582, 573, 1, 0, 0, 0, 582, 583, 1, 0, 0, 0, 583, 117, 1, 0, 0, 0, 584, 585, 3, 60, 30, 0, 585, 586, 5, 32, 0, 0, 586, 588, 1, 0, 0, 0, 587, 584, 1, 0, 0, 0, 587, 588, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 590, 3, 60, 30, 0, 590, 119, 1, 0, 0, 0, 591, 592, 5, 18, 0, 0, 592, 593, 3, 36, 18, 0, 593, 594, 5, 88, 0, 0, 594, 595, 3, 62, 31, 0, 595, 121, 1, 0, 0, 0, 596, 597, 5, 17, 0, 0, 597, 600, 3, 54, 27, 0, 598, 599, 5, 29, 0, 0, 599, 601, 3, 30, 15, 0, 600, 598, 1, 0, 0, 0, 600, 601, 1, 0, 0, 0, 601, 123, 1, 0, 0, 0, 58, 135, 144, 162, 174, 183, 191, 197, 205, 207, 212, 219, 224, 235, 241, 249, 251, 262, 269, 280, 283, 299, 305, 315, 319, 324, 334, 342, 355, 359, 363, 370, 374, 381, 387, 394, 402, 410, 418, 435, 446, 457, 462, 466, 471, 482, 487, 491, 505, 516, 530, 541, 544, 549, 571, 579, 582, 587, 600] \ No newline at end of file +[4, 1, 128, 636, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 142, 8, 1, 10, 1, 12, 1, 145, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 153, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 173, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 185, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 192, 8, 5, 10, 5, 12, 5, 195, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 202, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 208, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 216, 8, 5, 10, 5, 12, 5, 219, 9, 5, 1, 6, 1, 6, 3, 6, 223, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 230, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 235, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 246, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 252, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 260, 8, 9, 10, 9, 12, 9, 263, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 273, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 278, 8, 10, 10, 10, 12, 10, 281, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 289, 8, 11, 10, 11, 12, 11, 292, 9, 11, 3, 11, 294, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 308, 8, 15, 10, 15, 12, 15, 311, 9, 15, 1, 16, 1, 16, 1, 16, 3, 16, 316, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 324, 8, 17, 10, 17, 12, 17, 327, 9, 17, 1, 17, 3, 17, 330, 8, 17, 1, 18, 1, 18, 1, 18, 3, 18, 335, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 345, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 351, 8, 22, 10, 22, 12, 22, 354, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 364, 8, 24, 10, 24, 12, 24, 367, 9, 24, 1, 24, 3, 24, 370, 8, 24, 1, 24, 1, 24, 3, 24, 374, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 381, 8, 26, 1, 26, 1, 26, 3, 26, 385, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 390, 8, 27, 10, 27, 12, 27, 393, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 398, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 403, 8, 29, 10, 29, 12, 29, 406, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 411, 8, 30, 10, 30, 12, 30, 414, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 419, 8, 31, 10, 31, 12, 31, 422, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 429, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 444, 8, 34, 10, 34, 12, 34, 447, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 455, 8, 34, 10, 34, 12, 34, 458, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 466, 8, 34, 10, 34, 12, 34, 469, 9, 34, 1, 34, 1, 34, 3, 34, 473, 8, 34, 1, 35, 1, 35, 3, 35, 477, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 482, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 491, 8, 38, 10, 38, 12, 38, 494, 9, 38, 1, 39, 1, 39, 3, 39, 498, 8, 39, 1, 39, 1, 39, 3, 39, 502, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 514, 8, 42, 10, 42, 12, 42, 517, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 527, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 539, 8, 47, 10, 47, 12, 47, 542, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 552, 8, 50, 1, 51, 3, 51, 555, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 560, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 582, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 588, 8, 58, 10, 58, 12, 58, 591, 9, 58, 3, 58, 593, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 598, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 611, 8, 61, 1, 62, 3, 62, 614, 8, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 3, 63, 623, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 629, 8, 64, 10, 64, 12, 64, 632, 9, 64, 1, 65, 1, 65, 1, 65, 0, 4, 2, 10, 18, 20, 66, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 0, 9, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 31, 31, 81, 81, 1, 0, 72, 73, 2, 0, 36, 36, 40, 40, 2, 0, 43, 43, 46, 46, 2, 0, 42, 42, 56, 56, 2, 0, 57, 57, 59, 63, 1, 0, 22, 24, 661, 0, 132, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 4, 152, 1, 0, 0, 0, 6, 172, 1, 0, 0, 0, 8, 174, 1, 0, 0, 0, 10, 207, 1, 0, 0, 0, 12, 234, 1, 0, 0, 0, 14, 236, 1, 0, 0, 0, 16, 245, 1, 0, 0, 0, 18, 251, 1, 0, 0, 0, 20, 272, 1, 0, 0, 0, 22, 282, 1, 0, 0, 0, 24, 297, 1, 0, 0, 0, 26, 299, 1, 0, 0, 0, 28, 301, 1, 0, 0, 0, 30, 304, 1, 0, 0, 0, 32, 315, 1, 0, 0, 0, 34, 319, 1, 0, 0, 0, 36, 334, 1, 0, 0, 0, 38, 338, 1, 0, 0, 0, 40, 340, 1, 0, 0, 0, 42, 344, 1, 0, 0, 0, 44, 346, 1, 0, 0, 0, 46, 355, 1, 0, 0, 0, 48, 359, 1, 0, 0, 0, 50, 375, 1, 0, 0, 0, 52, 378, 1, 0, 0, 0, 54, 386, 1, 0, 0, 0, 56, 394, 1, 0, 0, 0, 58, 399, 1, 0, 0, 0, 60, 407, 1, 0, 0, 0, 62, 415, 1, 0, 0, 0, 64, 423, 1, 0, 0, 0, 66, 428, 1, 0, 0, 0, 68, 472, 1, 0, 0, 0, 70, 476, 1, 0, 0, 0, 72, 481, 1, 0, 0, 0, 74, 483, 1, 0, 0, 0, 76, 486, 1, 0, 0, 0, 78, 495, 1, 0, 0, 0, 80, 503, 1, 0, 0, 0, 82, 506, 1, 0, 0, 0, 84, 509, 1, 0, 0, 0, 86, 518, 1, 0, 0, 0, 88, 522, 1, 0, 0, 0, 90, 528, 1, 0, 0, 0, 92, 532, 1, 0, 0, 0, 94, 535, 1, 0, 0, 0, 96, 543, 1, 0, 0, 0, 98, 547, 1, 0, 0, 0, 100, 551, 1, 0, 0, 0, 102, 554, 1, 0, 0, 0, 104, 559, 1, 0, 0, 0, 106, 563, 1, 0, 0, 0, 108, 565, 1, 0, 0, 0, 110, 567, 1, 0, 0, 0, 112, 570, 1, 0, 0, 0, 114, 574, 1, 0, 0, 0, 116, 577, 1, 0, 0, 0, 118, 597, 1, 0, 0, 0, 120, 601, 1, 0, 0, 0, 122, 606, 1, 0, 0, 0, 124, 613, 1, 0, 0, 0, 126, 619, 1, 0, 0, 0, 128, 624, 1, 0, 0, 0, 130, 633, 1, 0, 0, 0, 132, 133, 3, 2, 1, 0, 133, 134, 5, 0, 0, 1, 134, 1, 1, 0, 0, 0, 135, 136, 6, 1, -1, 0, 136, 137, 3, 4, 2, 0, 137, 143, 1, 0, 0, 0, 138, 139, 10, 1, 0, 0, 139, 140, 5, 30, 0, 0, 140, 142, 3, 6, 3, 0, 141, 138, 1, 0, 0, 0, 142, 145, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 3, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 146, 153, 3, 110, 55, 0, 147, 153, 3, 34, 17, 0, 148, 153, 3, 28, 14, 0, 149, 153, 3, 114, 57, 0, 150, 151, 4, 2, 1, 0, 151, 153, 3, 48, 24, 0, 152, 146, 1, 0, 0, 0, 152, 147, 1, 0, 0, 0, 152, 148, 1, 0, 0, 0, 152, 149, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 153, 5, 1, 0, 0, 0, 154, 173, 3, 50, 25, 0, 155, 173, 3, 8, 4, 0, 156, 173, 3, 80, 40, 0, 157, 173, 3, 74, 37, 0, 158, 173, 3, 52, 26, 0, 159, 173, 3, 76, 38, 0, 160, 173, 3, 82, 41, 0, 161, 173, 3, 84, 42, 0, 162, 173, 3, 88, 44, 0, 163, 173, 3, 90, 45, 0, 164, 173, 3, 116, 58, 0, 165, 173, 3, 92, 46, 0, 166, 167, 4, 3, 2, 0, 167, 173, 3, 122, 61, 0, 168, 169, 4, 3, 3, 0, 169, 173, 3, 120, 60, 0, 170, 171, 4, 3, 4, 0, 171, 173, 3, 124, 62, 0, 172, 154, 1, 0, 0, 0, 172, 155, 1, 0, 0, 0, 172, 156, 1, 0, 0, 0, 172, 157, 1, 0, 0, 0, 172, 158, 1, 0, 0, 0, 172, 159, 1, 0, 0, 0, 172, 160, 1, 0, 0, 0, 172, 161, 1, 0, 0, 0, 172, 162, 1, 0, 0, 0, 172, 163, 1, 0, 0, 0, 172, 164, 1, 0, 0, 0, 172, 165, 1, 0, 0, 0, 172, 166, 1, 0, 0, 0, 172, 168, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 7, 1, 0, 0, 0, 174, 175, 5, 16, 0, 0, 175, 176, 3, 10, 5, 0, 176, 9, 1, 0, 0, 0, 177, 178, 6, 5, -1, 0, 178, 179, 5, 49, 0, 0, 179, 208, 3, 10, 5, 8, 180, 208, 3, 16, 8, 0, 181, 208, 3, 12, 6, 0, 182, 184, 3, 16, 8, 0, 183, 185, 5, 49, 0, 0, 184, 183, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 5, 44, 0, 0, 187, 188, 5, 48, 0, 0, 188, 193, 3, 16, 8, 0, 189, 190, 5, 39, 0, 0, 190, 192, 3, 16, 8, 0, 191, 189, 1, 0, 0, 0, 192, 195, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 196, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 196, 197, 5, 55, 0, 0, 197, 208, 1, 0, 0, 0, 198, 199, 3, 16, 8, 0, 199, 201, 5, 45, 0, 0, 200, 202, 5, 49, 0, 0, 201, 200, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 204, 5, 50, 0, 0, 204, 208, 1, 0, 0, 0, 205, 206, 4, 5, 5, 0, 206, 208, 3, 14, 7, 0, 207, 177, 1, 0, 0, 0, 207, 180, 1, 0, 0, 0, 207, 181, 1, 0, 0, 0, 207, 182, 1, 0, 0, 0, 207, 198, 1, 0, 0, 0, 207, 205, 1, 0, 0, 0, 208, 217, 1, 0, 0, 0, 209, 210, 10, 5, 0, 0, 210, 211, 5, 35, 0, 0, 211, 216, 3, 10, 5, 6, 212, 213, 10, 4, 0, 0, 213, 214, 5, 52, 0, 0, 214, 216, 3, 10, 5, 5, 215, 209, 1, 0, 0, 0, 215, 212, 1, 0, 0, 0, 216, 219, 1, 0, 0, 0, 217, 215, 1, 0, 0, 0, 217, 218, 1, 0, 0, 0, 218, 11, 1, 0, 0, 0, 219, 217, 1, 0, 0, 0, 220, 222, 3, 16, 8, 0, 221, 223, 5, 49, 0, 0, 222, 221, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 225, 5, 47, 0, 0, 225, 226, 3, 106, 53, 0, 226, 235, 1, 0, 0, 0, 227, 229, 3, 16, 8, 0, 228, 230, 5, 49, 0, 0, 229, 228, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 232, 5, 54, 0, 0, 232, 233, 3, 106, 53, 0, 233, 235, 1, 0, 0, 0, 234, 220, 1, 0, 0, 0, 234, 227, 1, 0, 0, 0, 235, 13, 1, 0, 0, 0, 236, 237, 3, 58, 29, 0, 237, 238, 5, 29, 0, 0, 238, 239, 3, 68, 34, 0, 239, 15, 1, 0, 0, 0, 240, 246, 3, 18, 9, 0, 241, 242, 3, 18, 9, 0, 242, 243, 3, 108, 54, 0, 243, 244, 3, 18, 9, 0, 244, 246, 1, 0, 0, 0, 245, 240, 1, 0, 0, 0, 245, 241, 1, 0, 0, 0, 246, 17, 1, 0, 0, 0, 247, 248, 6, 9, -1, 0, 248, 252, 3, 20, 10, 0, 249, 250, 7, 0, 0, 0, 250, 252, 3, 18, 9, 3, 251, 247, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 252, 261, 1, 0, 0, 0, 253, 254, 10, 2, 0, 0, 254, 255, 7, 1, 0, 0, 255, 260, 3, 18, 9, 3, 256, 257, 10, 1, 0, 0, 257, 258, 7, 0, 0, 0, 258, 260, 3, 18, 9, 2, 259, 253, 1, 0, 0, 0, 259, 256, 1, 0, 0, 0, 260, 263, 1, 0, 0, 0, 261, 259, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 19, 1, 0, 0, 0, 263, 261, 1, 0, 0, 0, 264, 265, 6, 10, -1, 0, 265, 273, 3, 68, 34, 0, 266, 273, 3, 58, 29, 0, 267, 273, 3, 22, 11, 0, 268, 269, 5, 48, 0, 0, 269, 270, 3, 10, 5, 0, 270, 271, 5, 55, 0, 0, 271, 273, 1, 0, 0, 0, 272, 264, 1, 0, 0, 0, 272, 266, 1, 0, 0, 0, 272, 267, 1, 0, 0, 0, 272, 268, 1, 0, 0, 0, 273, 279, 1, 0, 0, 0, 274, 275, 10, 1, 0, 0, 275, 276, 5, 38, 0, 0, 276, 278, 3, 26, 13, 0, 277, 274, 1, 0, 0, 0, 278, 281, 1, 0, 0, 0, 279, 277, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 21, 1, 0, 0, 0, 281, 279, 1, 0, 0, 0, 282, 283, 3, 24, 12, 0, 283, 293, 5, 48, 0, 0, 284, 294, 5, 66, 0, 0, 285, 290, 3, 10, 5, 0, 286, 287, 5, 39, 0, 0, 287, 289, 3, 10, 5, 0, 288, 286, 1, 0, 0, 0, 289, 292, 1, 0, 0, 0, 290, 288, 1, 0, 0, 0, 290, 291, 1, 0, 0, 0, 291, 294, 1, 0, 0, 0, 292, 290, 1, 0, 0, 0, 293, 284, 1, 0, 0, 0, 293, 285, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 296, 5, 55, 0, 0, 296, 23, 1, 0, 0, 0, 297, 298, 3, 72, 36, 0, 298, 25, 1, 0, 0, 0, 299, 300, 3, 64, 32, 0, 300, 27, 1, 0, 0, 0, 301, 302, 5, 12, 0, 0, 302, 303, 3, 30, 15, 0, 303, 29, 1, 0, 0, 0, 304, 309, 3, 32, 16, 0, 305, 306, 5, 39, 0, 0, 306, 308, 3, 32, 16, 0, 307, 305, 1, 0, 0, 0, 308, 311, 1, 0, 0, 0, 309, 307, 1, 0, 0, 0, 309, 310, 1, 0, 0, 0, 310, 31, 1, 0, 0, 0, 311, 309, 1, 0, 0, 0, 312, 313, 3, 58, 29, 0, 313, 314, 5, 37, 0, 0, 314, 316, 1, 0, 0, 0, 315, 312, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 317, 1, 0, 0, 0, 317, 318, 3, 10, 5, 0, 318, 33, 1, 0, 0, 0, 319, 320, 5, 6, 0, 0, 320, 325, 3, 36, 18, 0, 321, 322, 5, 39, 0, 0, 322, 324, 3, 36, 18, 0, 323, 321, 1, 0, 0, 0, 324, 327, 1, 0, 0, 0, 325, 323, 1, 0, 0, 0, 325, 326, 1, 0, 0, 0, 326, 329, 1, 0, 0, 0, 327, 325, 1, 0, 0, 0, 328, 330, 3, 42, 21, 0, 329, 328, 1, 0, 0, 0, 329, 330, 1, 0, 0, 0, 330, 35, 1, 0, 0, 0, 331, 332, 3, 38, 19, 0, 332, 333, 5, 29, 0, 0, 333, 335, 1, 0, 0, 0, 334, 331, 1, 0, 0, 0, 334, 335, 1, 0, 0, 0, 335, 336, 1, 0, 0, 0, 336, 337, 3, 40, 20, 0, 337, 37, 1, 0, 0, 0, 338, 339, 5, 81, 0, 0, 339, 39, 1, 0, 0, 0, 340, 341, 7, 2, 0, 0, 341, 41, 1, 0, 0, 0, 342, 345, 3, 44, 22, 0, 343, 345, 3, 46, 23, 0, 344, 342, 1, 0, 0, 0, 344, 343, 1, 0, 0, 0, 345, 43, 1, 0, 0, 0, 346, 347, 5, 80, 0, 0, 347, 352, 5, 81, 0, 0, 348, 349, 5, 39, 0, 0, 349, 351, 5, 81, 0, 0, 350, 348, 1, 0, 0, 0, 351, 354, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 352, 353, 1, 0, 0, 0, 353, 45, 1, 0, 0, 0, 354, 352, 1, 0, 0, 0, 355, 356, 5, 70, 0, 0, 356, 357, 3, 44, 22, 0, 357, 358, 5, 71, 0, 0, 358, 47, 1, 0, 0, 0, 359, 360, 5, 19, 0, 0, 360, 365, 3, 36, 18, 0, 361, 362, 5, 39, 0, 0, 362, 364, 3, 36, 18, 0, 363, 361, 1, 0, 0, 0, 364, 367, 1, 0, 0, 0, 365, 363, 1, 0, 0, 0, 365, 366, 1, 0, 0, 0, 366, 369, 1, 0, 0, 0, 367, 365, 1, 0, 0, 0, 368, 370, 3, 54, 27, 0, 369, 368, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 373, 1, 0, 0, 0, 371, 372, 5, 34, 0, 0, 372, 374, 3, 30, 15, 0, 373, 371, 1, 0, 0, 0, 373, 374, 1, 0, 0, 0, 374, 49, 1, 0, 0, 0, 375, 376, 5, 4, 0, 0, 376, 377, 3, 30, 15, 0, 377, 51, 1, 0, 0, 0, 378, 380, 5, 15, 0, 0, 379, 381, 3, 54, 27, 0, 380, 379, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 383, 5, 34, 0, 0, 383, 385, 3, 30, 15, 0, 384, 382, 1, 0, 0, 0, 384, 385, 1, 0, 0, 0, 385, 53, 1, 0, 0, 0, 386, 391, 3, 56, 28, 0, 387, 388, 5, 39, 0, 0, 388, 390, 3, 56, 28, 0, 389, 387, 1, 0, 0, 0, 390, 393, 1, 0, 0, 0, 391, 389, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 55, 1, 0, 0, 0, 393, 391, 1, 0, 0, 0, 394, 397, 3, 32, 16, 0, 395, 396, 5, 16, 0, 0, 396, 398, 3, 10, 5, 0, 397, 395, 1, 0, 0, 0, 397, 398, 1, 0, 0, 0, 398, 57, 1, 0, 0, 0, 399, 404, 3, 72, 36, 0, 400, 401, 5, 41, 0, 0, 401, 403, 3, 72, 36, 0, 402, 400, 1, 0, 0, 0, 403, 406, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 404, 405, 1, 0, 0, 0, 405, 59, 1, 0, 0, 0, 406, 404, 1, 0, 0, 0, 407, 412, 3, 66, 33, 0, 408, 409, 5, 41, 0, 0, 409, 411, 3, 66, 33, 0, 410, 408, 1, 0, 0, 0, 411, 414, 1, 0, 0, 0, 412, 410, 1, 0, 0, 0, 412, 413, 1, 0, 0, 0, 413, 61, 1, 0, 0, 0, 414, 412, 1, 0, 0, 0, 415, 420, 3, 60, 30, 0, 416, 417, 5, 39, 0, 0, 417, 419, 3, 60, 30, 0, 418, 416, 1, 0, 0, 0, 419, 422, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 420, 421, 1, 0, 0, 0, 421, 63, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 423, 424, 7, 3, 0, 0, 424, 65, 1, 0, 0, 0, 425, 429, 5, 85, 0, 0, 426, 427, 4, 33, 11, 0, 427, 429, 3, 70, 35, 0, 428, 425, 1, 0, 0, 0, 428, 426, 1, 0, 0, 0, 429, 67, 1, 0, 0, 0, 430, 473, 5, 50, 0, 0, 431, 432, 3, 104, 52, 0, 432, 433, 5, 72, 0, 0, 433, 473, 1, 0, 0, 0, 434, 473, 3, 102, 51, 0, 435, 473, 3, 104, 52, 0, 436, 473, 3, 98, 49, 0, 437, 473, 3, 70, 35, 0, 438, 473, 3, 106, 53, 0, 439, 440, 5, 70, 0, 0, 440, 445, 3, 100, 50, 0, 441, 442, 5, 39, 0, 0, 442, 444, 3, 100, 50, 0, 443, 441, 1, 0, 0, 0, 444, 447, 1, 0, 0, 0, 445, 443, 1, 0, 0, 0, 445, 446, 1, 0, 0, 0, 446, 448, 1, 0, 0, 0, 447, 445, 1, 0, 0, 0, 448, 449, 5, 71, 0, 0, 449, 473, 1, 0, 0, 0, 450, 451, 5, 70, 0, 0, 451, 456, 3, 98, 49, 0, 452, 453, 5, 39, 0, 0, 453, 455, 3, 98, 49, 0, 454, 452, 1, 0, 0, 0, 455, 458, 1, 0, 0, 0, 456, 454, 1, 0, 0, 0, 456, 457, 1, 0, 0, 0, 457, 459, 1, 0, 0, 0, 458, 456, 1, 0, 0, 0, 459, 460, 5, 71, 0, 0, 460, 473, 1, 0, 0, 0, 461, 462, 5, 70, 0, 0, 462, 467, 3, 106, 53, 0, 463, 464, 5, 39, 0, 0, 464, 466, 3, 106, 53, 0, 465, 463, 1, 0, 0, 0, 466, 469, 1, 0, 0, 0, 467, 465, 1, 0, 0, 0, 467, 468, 1, 0, 0, 0, 468, 470, 1, 0, 0, 0, 469, 467, 1, 0, 0, 0, 470, 471, 5, 71, 0, 0, 471, 473, 1, 0, 0, 0, 472, 430, 1, 0, 0, 0, 472, 431, 1, 0, 0, 0, 472, 434, 1, 0, 0, 0, 472, 435, 1, 0, 0, 0, 472, 436, 1, 0, 0, 0, 472, 437, 1, 0, 0, 0, 472, 438, 1, 0, 0, 0, 472, 439, 1, 0, 0, 0, 472, 450, 1, 0, 0, 0, 472, 461, 1, 0, 0, 0, 473, 69, 1, 0, 0, 0, 474, 477, 5, 53, 0, 0, 475, 477, 5, 69, 0, 0, 476, 474, 1, 0, 0, 0, 476, 475, 1, 0, 0, 0, 477, 71, 1, 0, 0, 0, 478, 482, 3, 64, 32, 0, 479, 480, 4, 36, 12, 0, 480, 482, 3, 70, 35, 0, 481, 478, 1, 0, 0, 0, 481, 479, 1, 0, 0, 0, 482, 73, 1, 0, 0, 0, 483, 484, 5, 9, 0, 0, 484, 485, 5, 32, 0, 0, 485, 75, 1, 0, 0, 0, 486, 487, 5, 14, 0, 0, 487, 492, 3, 78, 39, 0, 488, 489, 5, 39, 0, 0, 489, 491, 3, 78, 39, 0, 490, 488, 1, 0, 0, 0, 491, 494, 1, 0, 0, 0, 492, 490, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 77, 1, 0, 0, 0, 494, 492, 1, 0, 0, 0, 495, 497, 3, 10, 5, 0, 496, 498, 7, 4, 0, 0, 497, 496, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 501, 1, 0, 0, 0, 499, 500, 5, 51, 0, 0, 500, 502, 7, 5, 0, 0, 501, 499, 1, 0, 0, 0, 501, 502, 1, 0, 0, 0, 502, 79, 1, 0, 0, 0, 503, 504, 5, 8, 0, 0, 504, 505, 3, 62, 31, 0, 505, 81, 1, 0, 0, 0, 506, 507, 5, 2, 0, 0, 507, 508, 3, 62, 31, 0, 508, 83, 1, 0, 0, 0, 509, 510, 5, 11, 0, 0, 510, 515, 3, 86, 43, 0, 511, 512, 5, 39, 0, 0, 512, 514, 3, 86, 43, 0, 513, 511, 1, 0, 0, 0, 514, 517, 1, 0, 0, 0, 515, 513, 1, 0, 0, 0, 515, 516, 1, 0, 0, 0, 516, 85, 1, 0, 0, 0, 517, 515, 1, 0, 0, 0, 518, 519, 3, 60, 30, 0, 519, 520, 5, 89, 0, 0, 520, 521, 3, 60, 30, 0, 521, 87, 1, 0, 0, 0, 522, 523, 5, 1, 0, 0, 523, 524, 3, 20, 10, 0, 524, 526, 3, 106, 53, 0, 525, 527, 3, 94, 47, 0, 526, 525, 1, 0, 0, 0, 526, 527, 1, 0, 0, 0, 527, 89, 1, 0, 0, 0, 528, 529, 5, 7, 0, 0, 529, 530, 3, 20, 10, 0, 530, 531, 3, 106, 53, 0, 531, 91, 1, 0, 0, 0, 532, 533, 5, 10, 0, 0, 533, 534, 3, 58, 29, 0, 534, 93, 1, 0, 0, 0, 535, 540, 3, 96, 48, 0, 536, 537, 5, 39, 0, 0, 537, 539, 3, 96, 48, 0, 538, 536, 1, 0, 0, 0, 539, 542, 1, 0, 0, 0, 540, 538, 1, 0, 0, 0, 540, 541, 1, 0, 0, 0, 541, 95, 1, 0, 0, 0, 542, 540, 1, 0, 0, 0, 543, 544, 3, 64, 32, 0, 544, 545, 5, 37, 0, 0, 545, 546, 3, 68, 34, 0, 546, 97, 1, 0, 0, 0, 547, 548, 7, 6, 0, 0, 548, 99, 1, 0, 0, 0, 549, 552, 3, 102, 51, 0, 550, 552, 3, 104, 52, 0, 551, 549, 1, 0, 0, 0, 551, 550, 1, 0, 0, 0, 552, 101, 1, 0, 0, 0, 553, 555, 7, 0, 0, 0, 554, 553, 1, 0, 0, 0, 554, 555, 1, 0, 0, 0, 555, 556, 1, 0, 0, 0, 556, 557, 5, 33, 0, 0, 557, 103, 1, 0, 0, 0, 558, 560, 7, 0, 0, 0, 559, 558, 1, 0, 0, 0, 559, 560, 1, 0, 0, 0, 560, 561, 1, 0, 0, 0, 561, 562, 5, 32, 0, 0, 562, 105, 1, 0, 0, 0, 563, 564, 5, 31, 0, 0, 564, 107, 1, 0, 0, 0, 565, 566, 7, 7, 0, 0, 566, 109, 1, 0, 0, 0, 567, 568, 5, 5, 0, 0, 568, 569, 3, 112, 56, 0, 569, 111, 1, 0, 0, 0, 570, 571, 5, 70, 0, 0, 571, 572, 3, 2, 1, 0, 572, 573, 5, 71, 0, 0, 573, 113, 1, 0, 0, 0, 574, 575, 5, 13, 0, 0, 575, 576, 5, 105, 0, 0, 576, 115, 1, 0, 0, 0, 577, 578, 5, 3, 0, 0, 578, 581, 5, 95, 0, 0, 579, 580, 5, 93, 0, 0, 580, 582, 3, 60, 30, 0, 581, 579, 1, 0, 0, 0, 581, 582, 1, 0, 0, 0, 582, 592, 1, 0, 0, 0, 583, 584, 5, 94, 0, 0, 584, 589, 3, 118, 59, 0, 585, 586, 5, 39, 0, 0, 586, 588, 3, 118, 59, 0, 587, 585, 1, 0, 0, 0, 588, 591, 1, 0, 0, 0, 589, 587, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 593, 1, 0, 0, 0, 591, 589, 1, 0, 0, 0, 592, 583, 1, 0, 0, 0, 592, 593, 1, 0, 0, 0, 593, 117, 1, 0, 0, 0, 594, 595, 3, 60, 30, 0, 595, 596, 5, 37, 0, 0, 596, 598, 1, 0, 0, 0, 597, 594, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 599, 1, 0, 0, 0, 599, 600, 3, 60, 30, 0, 600, 119, 1, 0, 0, 0, 601, 602, 5, 18, 0, 0, 602, 603, 3, 36, 18, 0, 603, 604, 5, 93, 0, 0, 604, 605, 3, 62, 31, 0, 605, 121, 1, 0, 0, 0, 606, 607, 5, 17, 0, 0, 607, 610, 3, 54, 27, 0, 608, 609, 5, 34, 0, 0, 609, 611, 3, 30, 15, 0, 610, 608, 1, 0, 0, 0, 610, 611, 1, 0, 0, 0, 611, 123, 1, 0, 0, 0, 612, 614, 7, 8, 0, 0, 613, 612, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 615, 1, 0, 0, 0, 615, 616, 5, 20, 0, 0, 616, 617, 3, 126, 63, 0, 617, 618, 3, 128, 64, 0, 618, 125, 1, 0, 0, 0, 619, 622, 3, 64, 32, 0, 620, 621, 5, 89, 0, 0, 621, 623, 3, 64, 32, 0, 622, 620, 1, 0, 0, 0, 622, 623, 1, 0, 0, 0, 623, 127, 1, 0, 0, 0, 624, 625, 5, 93, 0, 0, 625, 630, 3, 130, 65, 0, 626, 627, 5, 39, 0, 0, 627, 629, 3, 130, 65, 0, 628, 626, 1, 0, 0, 0, 629, 632, 1, 0, 0, 0, 630, 628, 1, 0, 0, 0, 630, 631, 1, 0, 0, 0, 631, 129, 1, 0, 0, 0, 632, 630, 1, 0, 0, 0, 633, 634, 3, 16, 8, 0, 634, 131, 1, 0, 0, 0, 61, 143, 152, 172, 184, 193, 201, 207, 215, 217, 222, 229, 234, 245, 251, 259, 261, 272, 279, 290, 293, 309, 315, 325, 329, 334, 344, 352, 365, 369, 373, 380, 384, 391, 397, 404, 412, 420, 428, 445, 456, 467, 472, 476, 481, 492, 497, 501, 515, 526, 540, 551, 554, 559, 581, 589, 592, 597, 610, 613, 622, 630] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index e36184b1f07da..3bf6795c4e1dc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -8,26 +8,14 @@ * 2.0. */ -import org.antlr.v4.runtime.FailedPredicateException; -import org.antlr.v4.runtime.NoViableAltException; -import org.antlr.v4.runtime.ParserRuleContext; -import org.antlr.v4.runtime.RecognitionException; -import org.antlr.v4.runtime.RuleContext; -import org.antlr.v4.runtime.RuntimeMetaData; -import org.antlr.v4.runtime.Token; -import org.antlr.v4.runtime.TokenStream; -import org.antlr.v4.runtime.Vocabulary; -import org.antlr.v4.runtime.VocabularyImpl; -import org.antlr.v4.runtime.atn.ATN; -import org.antlr.v4.runtime.atn.ATNDeserializer; -import org.antlr.v4.runtime.atn.ParserATNSimulator; -import org.antlr.v4.runtime.atn.PredictionContextCache; +import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; -import org.antlr.v4.runtime.tree.ParseTreeListener; -import org.antlr.v4.runtime.tree.ParseTreeVisitor; -import org.antlr.v4.runtime.tree.TerminalNode; - +import org.antlr.v4.runtime.*; +import org.antlr.v4.runtime.misc.*; +import org.antlr.v4.runtime.tree.*; import java.util.List; +import java.util.Iterator; +import java.util.ArrayList; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue"}) public class EsqlBaseParser extends ParserConfig { @@ -37,114 +25,121 @@ public class EsqlBaseParser extends ParserConfig { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, - LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, - WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, UNKNOWN_CMD=20, - LINE_COMMENT=21, MULTILINE_COMMENT=22, WS=23, COLON=24, PIPE=25, QUOTED_STRING=26, - INTEGER_LITERAL=27, DECIMAL_LITERAL=28, BY=29, AND=30, ASC=31, ASSIGN=32, - CAST_OP=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, IN=39, IS=40, - LAST=41, LIKE=42, LP=43, NOT=44, NULL=45, NULLS=46, OR=47, PARAM=48, RLIKE=49, - RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, GT=57, GTE=58, - PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, NAMED_OR_POSITIONAL_PARAM=64, - OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, - EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, - EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, - FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, - PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, - AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, - ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, - ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, - ENRICH_FIELD_WS=96, MVEXPAND_LINE_COMMENT=97, MVEXPAND_MULTILINE_COMMENT=98, - MVEXPAND_WS=99, INFO=100, SHOW_LINE_COMMENT=101, SHOW_MULTILINE_COMMENT=102, - SHOW_WS=103, SETTING=104, SETTING_LINE_COMMENT=105, SETTTING_MULTILINE_COMMENT=106, - SETTING_WS=107, LOOKUP_LINE_COMMENT=108, LOOKUP_MULTILINE_COMMENT=109, - LOOKUP_WS=110, LOOKUP_FIELD_LINE_COMMENT=111, LOOKUP_FIELD_MULTILINE_COMMENT=112, - LOOKUP_FIELD_WS=113, METRICS_LINE_COMMENT=114, METRICS_MULTILINE_COMMENT=115, - METRICS_WS=116, CLOSING_METRICS_LINE_COMMENT=117, CLOSING_METRICS_MULTILINE_COMMENT=118, - CLOSING_METRICS_WS=119; + DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, + LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, + WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, DEV_JOIN=20, + DEV_JOIN_FULL=21, DEV_JOIN_LEFT=22, DEV_JOIN_RIGHT=23, DEV_JOIN_LOOKUP=24, + UNKNOWN_CMD=25, LINE_COMMENT=26, MULTILINE_COMMENT=27, WS=28, COLON=29, + PIPE=30, QUOTED_STRING=31, INTEGER_LITERAL=32, DECIMAL_LITERAL=33, BY=34, + AND=35, ASC=36, ASSIGN=37, CAST_OP=38, COMMA=39, DESC=40, DOT=41, FALSE=42, + FIRST=43, IN=44, IS=45, LAST=46, LIKE=47, LP=48, NOT=49, NULL=50, NULLS=51, + OR=52, PARAM=53, RLIKE=54, RP=55, TRUE=56, EQ=57, CIEQ=58, NEQ=59, LT=60, + LTE=61, GT=62, GTE=63, PLUS=64, MINUS=65, ASTERISK=66, SLASH=67, PERCENT=68, + NAMED_OR_POSITIONAL_PARAM=69, OPENING_BRACKET=70, CLOSING_BRACKET=71, + UNQUOTED_IDENTIFIER=72, QUOTED_IDENTIFIER=73, EXPR_LINE_COMMENT=74, EXPR_MULTILINE_COMMENT=75, + EXPR_WS=76, EXPLAIN_WS=77, EXPLAIN_LINE_COMMENT=78, EXPLAIN_MULTILINE_COMMENT=79, + METADATA=80, UNQUOTED_SOURCE=81, FROM_LINE_COMMENT=82, FROM_MULTILINE_COMMENT=83, + FROM_WS=84, ID_PATTERN=85, PROJECT_LINE_COMMENT=86, PROJECT_MULTILINE_COMMENT=87, + PROJECT_WS=88, AS=89, RENAME_LINE_COMMENT=90, RENAME_MULTILINE_COMMENT=91, + RENAME_WS=92, ON=93, WITH=94, ENRICH_POLICY_NAME=95, ENRICH_LINE_COMMENT=96, + ENRICH_MULTILINE_COMMENT=97, ENRICH_WS=98, ENRICH_FIELD_LINE_COMMENT=99, + ENRICH_FIELD_MULTILINE_COMMENT=100, ENRICH_FIELD_WS=101, MVEXPAND_LINE_COMMENT=102, + MVEXPAND_MULTILINE_COMMENT=103, MVEXPAND_WS=104, INFO=105, SHOW_LINE_COMMENT=106, + SHOW_MULTILINE_COMMENT=107, SHOW_WS=108, SETTING=109, SETTING_LINE_COMMENT=110, + SETTTING_MULTILINE_COMMENT=111, SETTING_WS=112, LOOKUP_LINE_COMMENT=113, + LOOKUP_MULTILINE_COMMENT=114, LOOKUP_WS=115, LOOKUP_FIELD_LINE_COMMENT=116, + LOOKUP_FIELD_MULTILINE_COMMENT=117, LOOKUP_FIELD_WS=118, USING=119, JOIN_LINE_COMMENT=120, + JOIN_MULTILINE_COMMENT=121, JOIN_WS=122, METRICS_LINE_COMMENT=123, METRICS_MULTILINE_COMMENT=124, + METRICS_WS=125, CLOSING_METRICS_LINE_COMMENT=126, CLOSING_METRICS_MULTILINE_COMMENT=127, + CLOSING_METRICS_WS=128; public static final int - RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, - RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, - RULE_matchBooleanExpression = 7, RULE_valueExpression = 8, RULE_operatorExpression = 9, - RULE_primaryExpression = 10, RULE_functionExpression = 11, RULE_functionName = 12, - RULE_dataType = 13, RULE_rowCommand = 14, RULE_fields = 15, RULE_field = 16, - RULE_fromCommand = 17, RULE_indexPattern = 18, RULE_clusterString = 19, - RULE_indexString = 20, RULE_metadata = 21, RULE_metadataOption = 22, RULE_deprecated_metadata = 23, - RULE_metricsCommand = 24, RULE_evalCommand = 25, RULE_statsCommand = 26, - RULE_aggFields = 27, RULE_aggField = 28, RULE_qualifiedName = 29, RULE_qualifiedNamePattern = 30, - RULE_qualifiedNamePatterns = 31, RULE_identifier = 32, RULE_identifierPattern = 33, - RULE_constant = 34, RULE_parameter = 35, RULE_identifierOrParameter = 36, - RULE_limitCommand = 37, RULE_sortCommand = 38, RULE_orderExpression = 39, - RULE_keepCommand = 40, RULE_dropCommand = 41, RULE_renameCommand = 42, - RULE_renameClause = 43, RULE_dissectCommand = 44, RULE_grokCommand = 45, - RULE_mvExpandCommand = 46, RULE_commandOptions = 47, RULE_commandOption = 48, - RULE_booleanValue = 49, RULE_numericValue = 50, RULE_decimalValue = 51, - RULE_integerValue = 52, RULE_string = 53, RULE_comparisonOperator = 54, - RULE_explainCommand = 55, RULE_subqueryExpression = 56, RULE_showCommand = 57, - RULE_enrichCommand = 58, RULE_enrichWithClause = 59, RULE_lookupCommand = 60, - RULE_inlinestatsCommand = 61; + RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, + RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, + RULE_matchBooleanExpression = 7, RULE_valueExpression = 8, RULE_operatorExpression = 9, + RULE_primaryExpression = 10, RULE_functionExpression = 11, RULE_functionName = 12, + RULE_dataType = 13, RULE_rowCommand = 14, RULE_fields = 15, RULE_field = 16, + RULE_fromCommand = 17, RULE_indexPattern = 18, RULE_clusterString = 19, + RULE_indexString = 20, RULE_metadata = 21, RULE_metadataOption = 22, RULE_deprecated_metadata = 23, + RULE_metricsCommand = 24, RULE_evalCommand = 25, RULE_statsCommand = 26, + RULE_aggFields = 27, RULE_aggField = 28, RULE_qualifiedName = 29, RULE_qualifiedNamePattern = 30, + RULE_qualifiedNamePatterns = 31, RULE_identifier = 32, RULE_identifierPattern = 33, + RULE_constant = 34, RULE_parameter = 35, RULE_identifierOrParameter = 36, + RULE_limitCommand = 37, RULE_sortCommand = 38, RULE_orderExpression = 39, + RULE_keepCommand = 40, RULE_dropCommand = 41, RULE_renameCommand = 42, + RULE_renameClause = 43, RULE_dissectCommand = 44, RULE_grokCommand = 45, + RULE_mvExpandCommand = 46, RULE_commandOptions = 47, RULE_commandOption = 48, + RULE_booleanValue = 49, RULE_numericValue = 50, RULE_decimalValue = 51, + RULE_integerValue = 52, RULE_string = 53, RULE_comparisonOperator = 54, + RULE_explainCommand = 55, RULE_subqueryExpression = 56, RULE_showCommand = 57, + RULE_enrichCommand = 58, RULE_enrichWithClause = 59, RULE_lookupCommand = 60, + RULE_inlinestatsCommand = 61, RULE_joinCommand = 62, RULE_joinTarget = 63, + RULE_joinCondition = 64, RULE_joinPredicate = 65; private static String[] makeRuleNames() { return new String[] { - "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", - "booleanExpression", "regexBooleanExpression", "matchBooleanExpression", - "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", - "functionName", "dataType", "rowCommand", "fields", "field", "fromCommand", - "indexPattern", "clusterString", "indexString", "metadata", "metadataOption", - "deprecated_metadata", "metricsCommand", "evalCommand", "statsCommand", - "aggFields", "aggField", "qualifiedName", "qualifiedNamePattern", "qualifiedNamePatterns", - "identifier", "identifierPattern", "constant", "parameter", "identifierOrParameter", - "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", - "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", - "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", - "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", - "showCommand", "enrichCommand", "enrichWithClause", "lookupCommand", - "inlinestatsCommand" + "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", + "booleanExpression", "regexBooleanExpression", "matchBooleanExpression", + "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", + "functionName", "dataType", "rowCommand", "fields", "field", "fromCommand", + "indexPattern", "clusterString", "indexString", "metadata", "metadataOption", + "deprecated_metadata", "metricsCommand", "evalCommand", "statsCommand", + "aggFields", "aggField", "qualifiedName", "qualifiedNamePattern", "qualifiedNamePatterns", + "identifier", "identifierPattern", "constant", "parameter", "identifierOrParameter", + "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", + "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", + "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", + "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", + "showCommand", "enrichCommand", "enrichWithClause", "lookupCommand", + "inlinestatsCommand", "joinCommand", "joinTarget", "joinCondition", "joinPredicate" }; } public static final String[] ruleNames = makeRuleNames(); private static String[] makeLiteralNames() { return new String[] { - null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", - "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, - "':'", "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", - "','", "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", - "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", - "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", - "'+'", "'-'", "'*'", "'/'", "'%'", null, null, "']'", null, null, null, - null, null, null, null, null, "'metadata'", null, null, null, null, null, - null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, - null, null, null, null, null, null, null, null, "'info'" + null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", + "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", + "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, + null, null, null, null, null, "':'", "'|'", null, null, null, "'by'", + "'and'", "'asc'", "'='", "'::'", "','", "'desc'", "'.'", "'false'", "'first'", + "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", + "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", + "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, null, + "']'", null, null, null, null, null, null, null, null, "'metadata'", + null, null, null, null, null, null, null, null, "'as'", null, null, null, + "'on'", "'with'", null, null, null, null, null, null, null, null, null, + null, "'info'", null, null, null, null, null, null, null, null, null, + null, null, null, null, "'USING'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { - null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", - "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "COLON", "PIPE", "QUOTED_STRING", - "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", - "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", - "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", - "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", - "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", - "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", - "LOOKUP_FIELD_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", - "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", - "CLOSING_METRICS_WS" + null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", + "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", + "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", + "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", + "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "COLON", "PIPE", + "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", + "ASSIGN", "CAST_OP", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", + "IS", "LAST", "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", + "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", + "MINUS", "ASTERISK", "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", + "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", + "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_WS", + "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "METADATA", "UNQUOTED_SOURCE", + "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", + "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", + "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", + "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", + "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", + "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", + "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", + "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", + "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", + "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", + "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_LINE_COMMENT", + "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", + "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -231,9 +226,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(124); + setState(132); query(0); - setState(125); + setState(133); match(EOF); } } @@ -255,7 +250,7 @@ public QueryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_query; } - + @SuppressWarnings("this-escape") public QueryContext() { } public void copyFrom(QueryContext ctx) { @@ -329,11 +324,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(128); + setState(136); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(135); + setState(143); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -344,16 +339,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(130); + setState(138); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(131); + setState(139); match(PIPE); - setState(132); + setState(140); processingCommand(); } - } + } } - setState(137); + setState(145); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -411,43 +406,43 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(144); + setState(152); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(138); + setState(146); explainCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(139); + setState(147); fromCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(140); + setState(148); rowCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(141); + setState(149); showCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(142); + setState(150); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(143); + setState(151); metricsCommand(); } break; @@ -508,6 +503,9 @@ public InlinestatsCommandContext inlinestatsCommand() { public LookupCommandContext lookupCommand() { return getRuleContext(LookupCommandContext.class,0); } + public JoinCommandContext joinCommand() { + return getRuleContext(JoinCommandContext.class,0); + } @SuppressWarnings("this-escape") public ProcessingCommandContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -532,111 +530,120 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(162); + setState(172); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(146); + setState(154); evalCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(147); + setState(155); whereCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(148); + setState(156); keepCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(149); + setState(157); limitCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(150); + setState(158); statsCommand(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(151); + setState(159); sortCommand(); } break; case 7: enterOuterAlt(_localctx, 7); { - setState(152); + setState(160); dropCommand(); } break; case 8: enterOuterAlt(_localctx, 8); { - setState(153); + setState(161); renameCommand(); } break; case 9: enterOuterAlt(_localctx, 9); { - setState(154); + setState(162); dissectCommand(); } break; case 10: enterOuterAlt(_localctx, 10); { - setState(155); + setState(163); grokCommand(); } break; case 11: enterOuterAlt(_localctx, 11); { - setState(156); + setState(164); enrichCommand(); } break; case 12: enterOuterAlt(_localctx, 12); { - setState(157); + setState(165); mvExpandCommand(); } break; case 13: enterOuterAlt(_localctx, 13); { - setState(158); + setState(166); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(159); + setState(167); inlinestatsCommand(); } break; case 14: enterOuterAlt(_localctx, 14); { - setState(160); + setState(168); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(161); + setState(169); lookupCommand(); } break; + case 15: + enterOuterAlt(_localctx, 15); + { + setState(170); + if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); + setState(171); + joinCommand(); + } + break; } } catch (RecognitionException re) { @@ -682,9 +689,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(164); + setState(174); match(WHERE); - setState(165); + setState(175); booleanExpression(0); } } @@ -706,7 +713,7 @@ public BooleanExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_booleanExpression; } - + @SuppressWarnings("this-escape") public BooleanExpressionContext() { } public void copyFrom(BooleanExpressionContext ctx) { @@ -900,7 +907,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(197); + setState(207); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -909,9 +916,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(168); + setState(178); match(NOT); - setState(169); + setState(179); booleanExpression(8); } break; @@ -920,7 +927,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(170); + setState(180); valueExpression(); } break; @@ -929,7 +936,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(171); + setState(181); regexBooleanExpression(); } break; @@ -938,41 +945,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(172); + setState(182); valueExpression(); - setState(174); + setState(184); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(173); + setState(183); match(NOT); } } - setState(176); + setState(186); match(IN); - setState(177); + setState(187); match(LP); - setState(178); + setState(188); valueExpression(); - setState(183); + setState(193); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(179); + setState(189); match(COMMA); - setState(180); + setState(190); valueExpression(); } } - setState(185); + setState(195); _errHandler.sync(this); _la = _input.LA(1); } - setState(186); + setState(196); match(RP); } break; @@ -981,21 +988,21 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(188); + setState(198); valueExpression(); - setState(189); + setState(199); match(IS); - setState(191); + setState(201); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(190); + setState(200); match(NOT); } } - setState(193); + setState(203); match(NULL); } break; @@ -1004,15 +1011,15 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new MatchExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(195); + setState(205); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(196); + setState(206); matchBooleanExpression(); } break; } _ctx.stop = _input.LT(-1); - setState(207); + setState(217); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1020,7 +1027,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(205); + setState(215); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -1028,11 +1035,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(199); + setState(209); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(200); + setState(210); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(201); + setState(211); ((LogicalBinaryContext)_localctx).right = booleanExpression(6); } break; @@ -1041,18 +1048,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(202); + setState(212); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(203); + setState(213); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(204); + setState(214); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; } - } + } } - setState(209); + setState(219); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1107,48 +1114,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(224); + setState(234); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(210); + setState(220); valueExpression(); - setState(212); + setState(222); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(211); + setState(221); match(NOT); } } - setState(214); + setState(224); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(215); + setState(225); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(217); + setState(227); valueExpression(); - setState(219); + setState(229); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(218); + setState(228); match(NOT); } } - setState(221); + setState(231); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(222); + setState(232); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1202,11 +1209,11 @@ public final MatchBooleanExpressionContext matchBooleanExpression() throws Recog try { enterOuterAlt(_localctx, 1); { - setState(226); + setState(236); ((MatchBooleanExpressionContext)_localctx).fieldExp = qualifiedName(); - setState(227); + setState(237); match(COLON); - setState(228); + setState(238); ((MatchBooleanExpressionContext)_localctx).queryString = constant(); } } @@ -1228,7 +1235,7 @@ public ValueExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_valueExpression; } - + @SuppressWarnings("this-escape") public ValueExpressionContext() { } public void copyFrom(ValueExpressionContext ctx) { @@ -1290,14 +1297,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 16, RULE_valueExpression); try { - setState(235); + setState(245); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(230); + setState(240); operatorExpression(0); } break; @@ -1305,11 +1312,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(231); + setState(241); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(232); + setState(242); comparisonOperator(); - setState(233); + setState(243); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1333,7 +1340,7 @@ public OperatorExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_operatorExpression; } - + @SuppressWarnings("this-escape") public OperatorExpressionContext() { } public void copyFrom(OperatorExpressionContext ctx) { @@ -1434,7 +1441,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(241); + setState(251); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1443,7 +1450,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(238); + setState(248); primaryExpression(0); } break; @@ -1452,7 +1459,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(239); + setState(249); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1463,13 +1470,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(240); + setState(250); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(251); + setState(261); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1477,7 +1484,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(249); + setState(259); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1485,12 +1492,12 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(243); + setState(253); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(244); + setState(254); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -2305843009213693952L) != 0)) ) { + if ( !(((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & 7L) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { @@ -1498,7 +1505,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(245); + setState(255); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1507,9 +1514,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(246); + setState(256); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(247); + setState(257); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1520,14 +1527,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(248); + setState(258); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } - } + } } - setState(253); + setState(263); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1551,7 +1558,7 @@ public PrimaryExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_primaryExpression; } - + @SuppressWarnings("this-escape") public PrimaryExpressionContext() { } public void copyFrom(PrimaryExpressionContext ctx) { @@ -1685,7 +1692,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(262); + setState(272); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: @@ -1694,7 +1701,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(255); + setState(265); constant(); } break; @@ -1703,7 +1710,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(256); + setState(266); qualifiedName(); } break; @@ -1712,7 +1719,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(257); + setState(267); functionExpression(); } break; @@ -1721,17 +1728,17 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(258); + setState(268); match(LP); - setState(259); + setState(269); booleanExpression(0); - setState(260); + setState(270); match(RP); } break; } _ctx.stop = _input.LT(-1); - setState(269); + setState(279); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1742,16 +1749,16 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc { _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(264); + setState(274); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(265); + setState(275); match(CAST_OP); - setState(266); + setState(276); dataType(); } - } + } } - setState(271); + setState(281); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); } @@ -1813,37 +1820,37 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(272); + setState(282); functionName(); - setState(273); - match(LP); setState(283); + match(LP); + setState(293); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,19,_ctx) ) { case 1: { - setState(274); + setState(284); match(ASTERISK); } break; case 2: { { - setState(275); + setState(285); booleanExpression(0); - setState(280); + setState(290); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(276); + setState(286); match(COMMA); - setState(277); + setState(287); booleanExpression(0); } } - setState(282); + setState(292); _errHandler.sync(this); _la = _input.LA(1); } @@ -1851,7 +1858,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx } break; } - setState(285); + setState(295); match(RP); } } @@ -1897,7 +1904,7 @@ public final FunctionNameContext functionName() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(287); + setState(297); identifierOrParameter(); } } @@ -1919,7 +1926,7 @@ public DataTypeContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_dataType; } - + @SuppressWarnings("this-escape") public DataTypeContext() { } public void copyFrom(DataTypeContext ctx) { @@ -1955,7 +1962,7 @@ public final DataTypeContext dataType() throws RecognitionException { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(289); + setState(299); identifier(); } } @@ -2002,9 +2009,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(291); + setState(301); match(ROW); - setState(292); + setState(302); fields(); } } @@ -2058,23 +2065,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(294); + setState(304); field(); - setState(299); + setState(309); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(295); + setState(305); match(COMMA); - setState(296); + setState(306); field(); } - } + } } - setState(301); + setState(311); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); } @@ -2126,19 +2133,19 @@ public final FieldContext field() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(305); + setState(315); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { case 1: { - setState(302); + setState(312); qualifiedName(); - setState(303); + setState(313); match(ASSIGN); } break; } - setState(307); + setState(317); booleanExpression(0); } } @@ -2196,34 +2203,34 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(309); + setState(319); match(FROM); - setState(310); + setState(320); indexPattern(); - setState(315); + setState(325); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(311); + setState(321); match(COMMA); - setState(312); + setState(322); indexPattern(); } - } + } } - setState(317); + setState(327); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); } - setState(319); + setState(329); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { case 1: { - setState(318); + setState(328); metadata(); } break; @@ -2276,19 +2283,19 @@ public final IndexPatternContext indexPattern() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(324); + setState(334); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(321); + setState(331); clusterString(); - setState(322); + setState(332); match(COLON); } break; } - setState(326); + setState(336); indexString(); } } @@ -2332,7 +2339,7 @@ public final ClusterStringContext clusterString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(328); + setState(338); match(UNQUOTED_SOURCE); } } @@ -2378,7 +2385,7 @@ public final IndexStringContext indexString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(330); + setState(340); _la = _input.LA(1); if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) { _errHandler.recoverInline(this); @@ -2433,20 +2440,20 @@ public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); enterRule(_localctx, 42, RULE_metadata); try { - setState(334); + setState(344); _errHandler.sync(this); switch (_input.LA(1)) { case METADATA: enterOuterAlt(_localctx, 1); { - setState(332); + setState(342); metadataOption(); } break; case OPENING_BRACKET: enterOuterAlt(_localctx, 2); { - setState(333); + setState(343); deprecated_metadata(); } break; @@ -2503,25 +2510,25 @@ public final MetadataOptionContext metadataOption() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(336); + setState(346); match(METADATA); - setState(337); + setState(347); match(UNQUOTED_SOURCE); - setState(342); + setState(352); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,26,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(338); + setState(348); match(COMMA); - setState(339); + setState(349); match(UNQUOTED_SOURCE); } - } + } } - setState(344); + setState(354); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,26,_ctx); } @@ -2570,11 +2577,11 @@ public final Deprecated_metadataContext deprecated_metadata() throws Recognition try { enterOuterAlt(_localctx, 1); { - setState(345); + setState(355); match(OPENING_BRACKET); - setState(346); + setState(356); metadataOption(); - setState(347); + setState(357); match(CLOSING_BRACKET); } } @@ -2638,46 +2645,46 @@ public final MetricsCommandContext metricsCommand() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(349); + setState(359); match(DEV_METRICS); - setState(350); + setState(360); indexPattern(); - setState(355); + setState(365); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(351); + setState(361); match(COMMA); - setState(352); + setState(362); indexPattern(); } - } + } } - setState(357); + setState(367); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } - setState(359); + setState(369); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { case 1: { - setState(358); + setState(368); ((MetricsCommandContext)_localctx).aggregates = aggFields(); } break; } - setState(363); + setState(373); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(361); + setState(371); match(BY); - setState(362); + setState(372); ((MetricsCommandContext)_localctx).grouping = fields(); } break; @@ -2727,9 +2734,9 @@ public final EvalCommandContext evalCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(365); + setState(375); match(EVAL); - setState(366); + setState(376); fields(); } } @@ -2782,26 +2789,26 @@ public final StatsCommandContext statsCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(368); + setState(378); match(STATS); - setState(370); + setState(380); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(369); + setState(379); ((StatsCommandContext)_localctx).stats = aggFields(); } break; } - setState(374); + setState(384); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(372); + setState(382); match(BY); - setState(373); + setState(383); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2858,23 +2865,23 @@ public final AggFieldsContext aggFields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(376); + setState(386); aggField(); - setState(381); + setState(391); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,32,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(377); + setState(387); match(COMMA); - setState(378); + setState(388); aggField(); } - } + } } - setState(383); + setState(393); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,32,_ctx); } @@ -2926,16 +2933,16 @@ public final AggFieldContext aggField() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(384); + setState(394); field(); - setState(387); + setState(397); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: { - setState(385); + setState(395); match(WHERE); - setState(386); + setState(396); booleanExpression(0); } break; @@ -2992,23 +2999,23 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(389); + setState(399); identifierOrParameter(); - setState(394); + setState(404); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(390); + setState(400); match(DOT); - setState(391); + setState(401); identifierOrParameter(); } - } + } } - setState(396); + setState(406); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } @@ -3064,23 +3071,23 @@ public final QualifiedNamePatternContext qualifiedNamePattern() throws Recogniti int _alt; enterOuterAlt(_localctx, 1); { - setState(397); + setState(407); identifierPattern(); - setState(402); + setState(412); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,35,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(398); + setState(408); match(DOT); - setState(399); + setState(409); identifierPattern(); } - } + } } - setState(404); + setState(414); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,35,_ctx); } @@ -3136,23 +3143,23 @@ public final QualifiedNamePatternsContext qualifiedNamePatterns() throws Recogni int _alt; enterOuterAlt(_localctx, 1); { - setState(405); + setState(415); qualifiedNamePattern(); - setState(410); + setState(420); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(406); + setState(416); match(COMMA); - setState(407); + setState(417); qualifiedNamePattern(); } - } + } } - setState(412); + setState(422); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); } @@ -3200,7 +3207,7 @@ public final IdentifierContext identifier() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(413); + setState(423); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -3253,22 +3260,22 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); enterRule(_localctx, 66, RULE_identifierPattern); try { - setState(418); + setState(428); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,37,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(415); + setState(425); match(ID_PATTERN); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(416); + setState(426); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(417); + setState(427); parameter(); } break; @@ -3292,7 +3299,7 @@ public ConstantContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_constant; } - + @SuppressWarnings("this-escape") public ConstantContext() { } public void copyFrom(ConstantContext ctx) { @@ -3541,14 +3548,14 @@ public final ConstantContext constant() throws RecognitionException { enterRule(_localctx, 68, RULE_constant); int _la; try { - setState(462); + setState(472); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(420); + setState(430); match(NULL); } break; @@ -3556,9 +3563,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(421); + setState(431); integerValue(); - setState(422); + setState(432); match(UNQUOTED_IDENTIFIER); } break; @@ -3566,7 +3573,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(424); + setState(434); decimalValue(); } break; @@ -3574,7 +3581,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(425); + setState(435); integerValue(); } break; @@ -3582,7 +3589,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(426); + setState(436); booleanValue(); } break; @@ -3590,7 +3597,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParameterContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(427); + setState(437); parameter(); } break; @@ -3598,7 +3605,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(428); + setState(438); string(); } break; @@ -3606,27 +3613,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(429); + setState(439); match(OPENING_BRACKET); - setState(430); + setState(440); numericValue(); - setState(435); + setState(445); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(431); + setState(441); match(COMMA); - setState(432); + setState(442); numericValue(); } } - setState(437); + setState(447); _errHandler.sync(this); _la = _input.LA(1); } - setState(438); + setState(448); match(CLOSING_BRACKET); } break; @@ -3634,27 +3641,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(440); + setState(450); match(OPENING_BRACKET); - setState(441); + setState(451); booleanValue(); - setState(446); + setState(456); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(442); + setState(452); match(COMMA); - setState(443); + setState(453); booleanValue(); } } - setState(448); + setState(458); _errHandler.sync(this); _la = _input.LA(1); } - setState(449); + setState(459); match(CLOSING_BRACKET); } break; @@ -3662,27 +3669,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(451); + setState(461); match(OPENING_BRACKET); - setState(452); + setState(462); string(); - setState(457); + setState(467); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(453); + setState(463); match(COMMA); - setState(454); + setState(464); string(); } } - setState(459); + setState(469); _errHandler.sync(this); _la = _input.LA(1); } - setState(460); + setState(470); match(CLOSING_BRACKET); } break; @@ -3706,7 +3713,7 @@ public ParameterContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_parameter; } - + @SuppressWarnings("this-escape") public ParameterContext() { } public void copyFrom(ParameterContext ctx) { @@ -3756,14 +3763,14 @@ public final ParameterContext parameter() throws RecognitionException { ParameterContext _localctx = new ParameterContext(_ctx, getState()); enterRule(_localctx, 70, RULE_parameter); try { - setState(466); + setState(476); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(464); + setState(474); match(PARAM); } break; @@ -3771,7 +3778,7 @@ public final ParameterContext parameter() throws RecognitionException { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(465); + setState(475); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3822,22 +3829,22 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni IdentifierOrParameterContext _localctx = new IdentifierOrParameterContext(_ctx, getState()); enterRule(_localctx, 72, RULE_identifierOrParameter); try { - setState(471); + setState(481); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(468); + setState(478); identifier(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(469); + setState(479); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(470); + setState(480); parameter(); } break; @@ -3884,9 +3891,9 @@ public final LimitCommandContext limitCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(473); + setState(483); match(LIMIT); - setState(474); + setState(484); match(INTEGER_LITERAL); } } @@ -3941,25 +3948,25 @@ public final SortCommandContext sortCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(476); + setState(486); match(SORT); - setState(477); + setState(487); orderExpression(); - setState(482); + setState(492); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,44,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(478); + setState(488); match(COMMA); - setState(479); + setState(489); orderExpression(); } - } + } } - setState(484); + setState(494); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,44,_ctx); } @@ -4015,14 +4022,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(485); + setState(495); booleanExpression(0); - setState(487); + setState(497); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { case 1: { - setState(486); + setState(496); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -4036,14 +4043,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(491); + setState(501); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: { - setState(489); + setState(499); match(NULLS); - setState(490); + setState(500); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -4102,9 +4109,9 @@ public final KeepCommandContext keepCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(493); + setState(503); match(KEEP); - setState(494); + setState(504); qualifiedNamePatterns(); } } @@ -4151,9 +4158,9 @@ public final DropCommandContext dropCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(496); + setState(506); match(DROP); - setState(497); + setState(507); qualifiedNamePatterns(); } } @@ -4208,25 +4215,25 @@ public final RenameCommandContext renameCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(499); + setState(509); match(RENAME); - setState(500); + setState(510); renameClause(); - setState(505); + setState(515); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,47,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(501); + setState(511); match(COMMA); - setState(502); + setState(512); renameClause(); } - } + } } - setState(507); + setState(517); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,47,_ctx); } @@ -4280,11 +4287,11 @@ public final RenameClauseContext renameClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(508); + setState(518); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(509); + setState(519); match(AS); - setState(510); + setState(520); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -4337,18 +4344,18 @@ public final DissectCommandContext dissectCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(512); + setState(522); match(DISSECT); - setState(513); + setState(523); primaryExpression(0); - setState(514); + setState(524); string(); - setState(516); + setState(526); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: { - setState(515); + setState(525); commandOptions(); } break; @@ -4401,11 +4408,11 @@ public final GrokCommandContext grokCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(518); + setState(528); match(GROK); - setState(519); + setState(529); primaryExpression(0); - setState(520); + setState(530); string(); } } @@ -4452,9 +4459,9 @@ public final MvExpandCommandContext mvExpandCommand() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(522); + setState(532); match(MV_EXPAND); - setState(523); + setState(533); qualifiedName(); } } @@ -4508,23 +4515,23 @@ public final CommandOptionsContext commandOptions() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(525); + setState(535); commandOption(); - setState(530); + setState(540); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,49,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(526); + setState(536); match(COMMA); - setState(527); + setState(537); commandOption(); } - } + } } - setState(532); + setState(542); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,49,_ctx); } @@ -4576,11 +4583,11 @@ public final CommandOptionContext commandOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(533); + setState(543); identifier(); - setState(534); + setState(544); match(ASSIGN); - setState(535); + setState(545); constant(); } } @@ -4626,7 +4633,7 @@ public final BooleanValueContext booleanValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(537); + setState(547); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4681,20 +4688,20 @@ public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); enterRule(_localctx, 100, RULE_numericValue); try { - setState(541); + setState(551); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(539); + setState(549); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(540); + setState(550); integerValue(); } break; @@ -4743,12 +4750,12 @@ public final DecimalValueContext decimalValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(544); + setState(554); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(543); + setState(553); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4761,7 +4768,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(546); + setState(556); match(DECIMAL_LITERAL); } } @@ -4808,12 +4815,12 @@ public final IntegerValueContext integerValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(549); + setState(559); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(548); + setState(558); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4826,7 +4833,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(551); + setState(561); match(INTEGER_LITERAL); } } @@ -4870,7 +4877,7 @@ public final StringContext string() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(553); + setState(563); match(QUOTED_STRING); } } @@ -4920,9 +4927,9 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(555); + setState(565); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 562949953421312000L) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -432345564227567616L) != 0)) ) { _errHandler.recoverInline(this); } else { @@ -4975,9 +4982,9 @@ public final ExplainCommandContext explainCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(557); + setState(567); match(EXPLAIN); - setState(558); + setState(568); subqueryExpression(); } } @@ -5025,11 +5032,11 @@ public final SubqueryExpressionContext subqueryExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(560); + setState(570); match(OPENING_BRACKET); - setState(561); + setState(571); query(0); - setState(562); + setState(572); match(CLOSING_BRACKET); } } @@ -5051,7 +5058,7 @@ public ShowCommandContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_showCommand; } - + @SuppressWarnings("this-escape") public ShowCommandContext() { } public void copyFrom(ShowCommandContext ctx) { @@ -5086,9 +5093,9 @@ public final ShowCommandContext showCommand() throws RecognitionException { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(564); + setState(574); match(SHOW); - setState(565); + setState(575); match(INFO); } } @@ -5151,46 +5158,46 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(567); + setState(577); match(ENRICH); - setState(568); + setState(578); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(571); + setState(581); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,53,_ctx) ) { case 1: { - setState(569); + setState(579); match(ON); - setState(570); + setState(580); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(582); + setState(592); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,55,_ctx) ) { case 1: { - setState(573); + setState(583); match(WITH); - setState(574); + setState(584); enrichWithClause(); - setState(579); + setState(589); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,54,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(575); + setState(585); match(COMMA); - setState(576); + setState(586); enrichWithClause(); } - } + } } - setState(581); + setState(591); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,54,_ctx); } @@ -5247,19 +5254,19 @@ public final EnrichWithClauseContext enrichWithClause() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(587); + setState(597); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) { case 1: { - setState(584); + setState(594); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(585); + setState(595); match(ASSIGN); } break; } - setState(589); + setState(599); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -5312,13 +5319,13 @@ public final LookupCommandContext lookupCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(591); + setState(601); match(DEV_LOOKUP); - setState(592); + setState(602); ((LookupCommandContext)_localctx).tableName = indexPattern(); - setState(593); + setState(603); match(ON); - setState(594); + setState(604); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5371,18 +5378,18 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(596); + setState(606); match(DEV_INLINESTATS); - setState(597); + setState(607); ((InlinestatsCommandContext)_localctx).stats = aggFields(); - setState(600); + setState(610); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) { case 1: { - setState(598); + setState(608); match(BY); - setState(599); + setState(609); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -5400,6 +5407,270 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class JoinCommandContext extends ParserRuleContext { + public Token type; + public TerminalNode DEV_JOIN() { return getToken(EsqlBaseParser.DEV_JOIN, 0); } + public JoinTargetContext joinTarget() { + return getRuleContext(JoinTargetContext.class,0); + } + public JoinConditionContext joinCondition() { + return getRuleContext(JoinConditionContext.class,0); + } + public TerminalNode DEV_JOIN_LOOKUP() { return getToken(EsqlBaseParser.DEV_JOIN_LOOKUP, 0); } + public TerminalNode DEV_JOIN_LEFT() { return getToken(EsqlBaseParser.DEV_JOIN_LEFT, 0); } + public TerminalNode DEV_JOIN_RIGHT() { return getToken(EsqlBaseParser.DEV_JOIN_RIGHT, 0); } + @SuppressWarnings("this-escape") + public JoinCommandContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_joinCommand; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterJoinCommand(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitJoinCommand(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitJoinCommand(this); + else return visitor.visitChildren(this); + } + } + + public final JoinCommandContext joinCommand() throws RecognitionException { + JoinCommandContext _localctx = new JoinCommandContext(_ctx, getState()); + enterRule(_localctx, 124, RULE_joinCommand); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(613); + _errHandler.sync(this); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) { + { + setState(612); + ((JoinCommandContext)_localctx).type = _input.LT(1); + _la = _input.LA(1); + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) ) { + ((JoinCommandContext)_localctx).type = (Token)_errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } + } + } + + setState(615); + match(DEV_JOIN); + setState(616); + joinTarget(); + setState(617); + joinCondition(); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class JoinTargetContext extends ParserRuleContext { + public IdentifierContext index; + public IdentifierContext alias; + public List identifier() { + return getRuleContexts(IdentifierContext.class); + } + public IdentifierContext identifier(int i) { + return getRuleContext(IdentifierContext.class,i); + } + public TerminalNode AS() { return getToken(EsqlBaseParser.AS, 0); } + @SuppressWarnings("this-escape") + public JoinTargetContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_joinTarget; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterJoinTarget(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitJoinTarget(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitJoinTarget(this); + else return visitor.visitChildren(this); + } + } + + public final JoinTargetContext joinTarget() throws RecognitionException { + JoinTargetContext _localctx = new JoinTargetContext(_ctx, getState()); + enterRule(_localctx, 126, RULE_joinTarget); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(619); + ((JoinTargetContext)_localctx).index = identifier(); + setState(622); + _errHandler.sync(this); + _la = _input.LA(1); + if (_la==AS) { + { + setState(620); + match(AS); + setState(621); + ((JoinTargetContext)_localctx).alias = identifier(); + } + } + + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class JoinConditionContext extends ParserRuleContext { + public TerminalNode ON() { return getToken(EsqlBaseParser.ON, 0); } + public List joinPredicate() { + return getRuleContexts(JoinPredicateContext.class); + } + public JoinPredicateContext joinPredicate(int i) { + return getRuleContext(JoinPredicateContext.class,i); + } + public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } + public TerminalNode COMMA(int i) { + return getToken(EsqlBaseParser.COMMA, i); + } + @SuppressWarnings("this-escape") + public JoinConditionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_joinCondition; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterJoinCondition(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitJoinCondition(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitJoinCondition(this); + else return visitor.visitChildren(this); + } + } + + public final JoinConditionContext joinCondition() throws RecognitionException { + JoinConditionContext _localctx = new JoinConditionContext(_ctx, getState()); + enterRule(_localctx, 128, RULE_joinCondition); + try { + int _alt; + enterOuterAlt(_localctx, 1); + { + setState(624); + match(ON); + setState(625); + joinPredicate(); + setState(630); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,60,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + { + { + setState(626); + match(COMMA); + setState(627); + joinPredicate(); + } + } + } + setState(632); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,60,_ctx); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class JoinPredicateContext extends ParserRuleContext { + public ValueExpressionContext valueExpression() { + return getRuleContext(ValueExpressionContext.class,0); + } + @SuppressWarnings("this-escape") + public JoinPredicateContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_joinPredicate; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterJoinPredicate(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitJoinPredicate(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitJoinPredicate(this); + else return visitor.visitChildren(this); + } + } + + public final JoinPredicateContext joinPredicate() throws RecognitionException { + JoinPredicateContext _localctx = new JoinPredicateContext(_ctx, getState()); + enterRule(_localctx, 130, RULE_joinPredicate); + try { + enterOuterAlt(_localctx, 1); + { + setState(633); + valueExpression(); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { case 1: @@ -5441,433 +5712,454 @@ private boolean processingCommand_sempred(ProcessingCommandContext _localctx, in return this.isDevVersion(); case 3: return this.isDevVersion(); + case 4: + return this.isDevVersion(); } return true; } private boolean booleanExpression_sempred(BooleanExpressionContext _localctx, int predIndex) { switch (predIndex) { - case 4: - return this.isDevVersion(); case 5: - return precpred(_ctx, 5); + return this.isDevVersion(); case 6: + return precpred(_ctx, 5); + case 7: return precpred(_ctx, 4); } return true; } private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, int predIndex) { switch (predIndex) { - case 7: - return precpred(_ctx, 2); case 8: + return precpred(_ctx, 2); + case 9: return precpred(_ctx, 1); } return true; } private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, int predIndex) { switch (predIndex) { - case 9: + case 10: return precpred(_ctx, 1); } return true; } private boolean identifierPattern_sempred(IdentifierPatternContext _localctx, int predIndex) { switch (predIndex) { - case 10: + case 11: return this.isDevVersion(); } return true; } private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _localctx, int predIndex) { switch (predIndex) { - case 11: + case 12: return this.isDevVersion(); } return true; } public static final String _serializedATN = - "\u0004\u0001w\u025b\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ - "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ - "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ - "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ - "\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007\u000f"+ - "\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007\u0012"+ - "\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007\u0015"+ - "\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007\u0018"+ - "\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007\u001b"+ - "\u0002\u001c\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007\u001e"+ - "\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002"+ - "#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+ - "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ - "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ - "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u0002"+ - "7\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0002"+ - "<\u0007<\u0002=\u0007=\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001"+ - "\u0086\b\u0001\n\u0001\f\u0001\u0089\t\u0001\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u0091\b\u0002\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u00a3\b\u0003\u0001"+ - "\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00af\b\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00b6"+ - "\b\u0005\n\u0005\f\u0005\u00b9\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0003\u0005\u00c0\b\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0003\u0005\u00c6\b\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00ce\b\u0005"+ - "\n\u0005\f\u0005\u00d1\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006\u00d5"+ - "\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003"+ - "\u0006\u00dc\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00e1"+ - "\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001"+ - "\b\u0001\b\u0001\b\u0001\b\u0003\b\u00ec\b\b\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0003\t\u00f2\b\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0005"+ - "\t\u00fa\b\t\n\t\f\t\u00fd\t\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n"+ - "\u0001\n\u0001\n\u0001\n\u0003\n\u0107\b\n\u0001\n\u0001\n\u0001\n\u0005"+ - "\n\u010c\b\n\n\n\f\n\u010f\t\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ - "\u000b\u0001\u000b\u0001\u000b\u0005\u000b\u0117\b\u000b\n\u000b\f\u000b"+ - "\u011a\t\u000b\u0003\u000b\u011c\b\u000b\u0001\u000b\u0001\u000b\u0001"+ - "\f\u0001\f\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f"+ - "\u0001\u000f\u0001\u000f\u0005\u000f\u012a\b\u000f\n\u000f\f\u000f\u012d"+ - "\t\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0003\u0010\u0132\b\u0010"+ - "\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0005\u0011\u013a\b\u0011\n\u0011\f\u0011\u013d\t\u0011\u0001\u0011\u0003"+ - "\u0011\u0140\b\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012\u0145"+ - "\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0014\u0001"+ - "\u0014\u0001\u0015\u0001\u0015\u0003\u0015\u014f\b\u0015\u0001\u0016\u0001"+ - "\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u0155\b\u0016\n\u0016\f\u0016"+ - "\u0158\t\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018"+ - "\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u0162\b\u0018\n\u0018"+ - "\f\u0018\u0165\t\u0018\u0001\u0018\u0003\u0018\u0168\b\u0018\u0001\u0018"+ - "\u0001\u0018\u0003\u0018\u016c\b\u0018\u0001\u0019\u0001\u0019\u0001\u0019"+ - "\u0001\u001a\u0001\u001a\u0003\u001a\u0173\b\u001a\u0001\u001a\u0001\u001a"+ - "\u0003\u001a\u0177\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005\u001b"+ - "\u017c\b\u001b\n\u001b\f\u001b\u017f\t\u001b\u0001\u001c\u0001\u001c\u0001"+ - "\u001c\u0003\u001c\u0184\b\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0005"+ - "\u001d\u0189\b\u001d\n\u001d\f\u001d\u018c\t\u001d\u0001\u001e\u0001\u001e"+ - "\u0001\u001e\u0005\u001e\u0191\b\u001e\n\u001e\f\u001e\u0194\t\u001e\u0001"+ - "\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u0199\b\u001f\n\u001f\f\u001f"+ - "\u019c\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001!\u0003!\u01a3\b!\u0001"+ - "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ - "\"\u0001\"\u0001\"\u0001\"\u0005\"\u01b2\b\"\n\"\f\"\u01b5\t\"\u0001\""+ - "\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01bd\b\"\n\"\f\"\u01c0"+ - "\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01c8\b\""+ - "\n\"\f\"\u01cb\t\"\u0001\"\u0001\"\u0003\"\u01cf\b\"\u0001#\u0001#\u0003"+ - "#\u01d3\b#\u0001$\u0001$\u0001$\u0003$\u01d8\b$\u0001%\u0001%\u0001%\u0001"+ - "&\u0001&\u0001&\u0001&\u0005&\u01e1\b&\n&\f&\u01e4\t&\u0001\'\u0001\'"+ - "\u0003\'\u01e8\b\'\u0001\'\u0001\'\u0003\'\u01ec\b\'\u0001(\u0001(\u0001"+ - "(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001*\u0005*\u01f8\b*\n*"+ - "\f*\u01fb\t*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0003"+ - ",\u0205\b,\u0001-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001"+ - "/\u0001/\u0005/\u0211\b/\n/\f/\u0214\t/\u00010\u00010\u00010\u00010\u0001"+ - "1\u00011\u00012\u00012\u00032\u021e\b2\u00013\u00033\u0221\b3\u00013\u0001"+ - "3\u00014\u00034\u0226\b4\u00014\u00014\u00015\u00015\u00016\u00016\u0001"+ - "7\u00017\u00017\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u0001"+ - ":\u0001:\u0001:\u0001:\u0003:\u023c\b:\u0001:\u0001:\u0001:\u0001:\u0005"+ - ":\u0242\b:\n:\f:\u0245\t:\u0003:\u0247\b:\u0001;\u0001;\u0001;\u0003;"+ - "\u024c\b;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001"+ - "=\u0001=\u0001=\u0003=\u0259\b=\u0001=\u0000\u0004\u0002\n\u0012\u0014"+ - ">\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a"+ - "\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz\u0000\b\u0001"+ - "\u0000;<\u0001\u0000=?\u0002\u0000\u001a\u001aLL\u0001\u0000CD\u0002\u0000"+ - "\u001f\u001f##\u0002\u0000&&))\u0002\u0000%%33\u0002\u0000446:\u0274\u0000"+ - "|\u0001\u0000\u0000\u0000\u0002\u007f\u0001\u0000\u0000\u0000\u0004\u0090"+ - "\u0001\u0000\u0000\u0000\u0006\u00a2\u0001\u0000\u0000\u0000\b\u00a4\u0001"+ - "\u0000\u0000\u0000\n\u00c5\u0001\u0000\u0000\u0000\f\u00e0\u0001\u0000"+ - "\u0000\u0000\u000e\u00e2\u0001\u0000\u0000\u0000\u0010\u00eb\u0001\u0000"+ - "\u0000\u0000\u0012\u00f1\u0001\u0000\u0000\u0000\u0014\u0106\u0001\u0000"+ - "\u0000\u0000\u0016\u0110\u0001\u0000\u0000\u0000\u0018\u011f\u0001\u0000"+ - "\u0000\u0000\u001a\u0121\u0001\u0000\u0000\u0000\u001c\u0123\u0001\u0000"+ - "\u0000\u0000\u001e\u0126\u0001\u0000\u0000\u0000 \u0131\u0001\u0000\u0000"+ - "\u0000\"\u0135\u0001\u0000\u0000\u0000$\u0144\u0001\u0000\u0000\u0000"+ - "&\u0148\u0001\u0000\u0000\u0000(\u014a\u0001\u0000\u0000\u0000*\u014e"+ - "\u0001\u0000\u0000\u0000,\u0150\u0001\u0000\u0000\u0000.\u0159\u0001\u0000"+ - "\u0000\u00000\u015d\u0001\u0000\u0000\u00002\u016d\u0001\u0000\u0000\u0000"+ - "4\u0170\u0001\u0000\u0000\u00006\u0178\u0001\u0000\u0000\u00008\u0180"+ - "\u0001\u0000\u0000\u0000:\u0185\u0001\u0000\u0000\u0000<\u018d\u0001\u0000"+ - "\u0000\u0000>\u0195\u0001\u0000\u0000\u0000@\u019d\u0001\u0000\u0000\u0000"+ - "B\u01a2\u0001\u0000\u0000\u0000D\u01ce\u0001\u0000\u0000\u0000F\u01d2"+ - "\u0001\u0000\u0000\u0000H\u01d7\u0001\u0000\u0000\u0000J\u01d9\u0001\u0000"+ - "\u0000\u0000L\u01dc\u0001\u0000\u0000\u0000N\u01e5\u0001\u0000\u0000\u0000"+ - "P\u01ed\u0001\u0000\u0000\u0000R\u01f0\u0001\u0000\u0000\u0000T\u01f3"+ - "\u0001\u0000\u0000\u0000V\u01fc\u0001\u0000\u0000\u0000X\u0200\u0001\u0000"+ - "\u0000\u0000Z\u0206\u0001\u0000\u0000\u0000\\\u020a\u0001\u0000\u0000"+ - "\u0000^\u020d\u0001\u0000\u0000\u0000`\u0215\u0001\u0000\u0000\u0000b"+ - "\u0219\u0001\u0000\u0000\u0000d\u021d\u0001\u0000\u0000\u0000f\u0220\u0001"+ - "\u0000\u0000\u0000h\u0225\u0001\u0000\u0000\u0000j\u0229\u0001\u0000\u0000"+ - "\u0000l\u022b\u0001\u0000\u0000\u0000n\u022d\u0001\u0000\u0000\u0000p"+ - "\u0230\u0001\u0000\u0000\u0000r\u0234\u0001\u0000\u0000\u0000t\u0237\u0001"+ - "\u0000\u0000\u0000v\u024b\u0001\u0000\u0000\u0000x\u024f\u0001\u0000\u0000"+ - "\u0000z\u0254\u0001\u0000\u0000\u0000|}\u0003\u0002\u0001\u0000}~\u0005"+ - "\u0000\u0000\u0001~\u0001\u0001\u0000\u0000\u0000\u007f\u0080\u0006\u0001"+ - "\uffff\uffff\u0000\u0080\u0081\u0003\u0004\u0002\u0000\u0081\u0087\u0001"+ - "\u0000\u0000\u0000\u0082\u0083\n\u0001\u0000\u0000\u0083\u0084\u0005\u0019"+ - "\u0000\u0000\u0084\u0086\u0003\u0006\u0003\u0000\u0085\u0082\u0001\u0000"+ - "\u0000\u0000\u0086\u0089\u0001\u0000\u0000\u0000\u0087\u0085\u0001\u0000"+ - "\u0000\u0000\u0087\u0088\u0001\u0000\u0000\u0000\u0088\u0003\u0001\u0000"+ - "\u0000\u0000\u0089\u0087\u0001\u0000\u0000\u0000\u008a\u0091\u0003n7\u0000"+ - "\u008b\u0091\u0003\"\u0011\u0000\u008c\u0091\u0003\u001c\u000e\u0000\u008d"+ - "\u0091\u0003r9\u0000\u008e\u008f\u0004\u0002\u0001\u0000\u008f\u0091\u0003"+ - "0\u0018\u0000\u0090\u008a\u0001\u0000\u0000\u0000\u0090\u008b\u0001\u0000"+ - "\u0000\u0000\u0090\u008c\u0001\u0000\u0000\u0000\u0090\u008d\u0001\u0000"+ - "\u0000\u0000\u0090\u008e\u0001\u0000\u0000\u0000\u0091\u0005\u0001\u0000"+ - "\u0000\u0000\u0092\u00a3\u00032\u0019\u0000\u0093\u00a3\u0003\b\u0004"+ - "\u0000\u0094\u00a3\u0003P(\u0000\u0095\u00a3\u0003J%\u0000\u0096\u00a3"+ - "\u00034\u001a\u0000\u0097\u00a3\u0003L&\u0000\u0098\u00a3\u0003R)\u0000"+ - "\u0099\u00a3\u0003T*\u0000\u009a\u00a3\u0003X,\u0000\u009b\u00a3\u0003"+ - "Z-\u0000\u009c\u00a3\u0003t:\u0000\u009d\u00a3\u0003\\.\u0000\u009e\u009f"+ - "\u0004\u0003\u0002\u0000\u009f\u00a3\u0003z=\u0000\u00a0\u00a1\u0004\u0003"+ - "\u0003\u0000\u00a1\u00a3\u0003x<\u0000\u00a2\u0092\u0001\u0000\u0000\u0000"+ - "\u00a2\u0093\u0001\u0000\u0000\u0000\u00a2\u0094\u0001\u0000\u0000\u0000"+ - "\u00a2\u0095\u0001\u0000\u0000\u0000\u00a2\u0096\u0001\u0000\u0000\u0000"+ - "\u00a2\u0097\u0001\u0000\u0000\u0000\u00a2\u0098\u0001\u0000\u0000\u0000"+ - "\u00a2\u0099\u0001\u0000\u0000\u0000\u00a2\u009a\u0001\u0000\u0000\u0000"+ - "\u00a2\u009b\u0001\u0000\u0000\u0000\u00a2\u009c\u0001\u0000\u0000\u0000"+ - "\u00a2\u009d\u0001\u0000\u0000\u0000\u00a2\u009e\u0001\u0000\u0000\u0000"+ - "\u00a2\u00a0\u0001\u0000\u0000\u0000\u00a3\u0007\u0001\u0000\u0000\u0000"+ - "\u00a4\u00a5\u0005\u0010\u0000\u0000\u00a5\u00a6\u0003\n\u0005\u0000\u00a6"+ - "\t\u0001\u0000\u0000\u0000\u00a7\u00a8\u0006\u0005\uffff\uffff\u0000\u00a8"+ - "\u00a9\u0005,\u0000\u0000\u00a9\u00c6\u0003\n\u0005\b\u00aa\u00c6\u0003"+ - "\u0010\b\u0000\u00ab\u00c6\u0003\f\u0006\u0000\u00ac\u00ae\u0003\u0010"+ - "\b\u0000\u00ad\u00af\u0005,\u0000\u0000\u00ae\u00ad\u0001\u0000\u0000"+ - "\u0000\u00ae\u00af\u0001\u0000\u0000\u0000\u00af\u00b0\u0001\u0000\u0000"+ - "\u0000\u00b0\u00b1\u0005\'\u0000\u0000\u00b1\u00b2\u0005+\u0000\u0000"+ - "\u00b2\u00b7\u0003\u0010\b\u0000\u00b3\u00b4\u0005\"\u0000\u0000\u00b4"+ - "\u00b6\u0003\u0010\b\u0000\u00b5\u00b3\u0001\u0000\u0000\u0000\u00b6\u00b9"+ - "\u0001\u0000\u0000\u0000\u00b7\u00b5\u0001\u0000\u0000\u0000\u00b7\u00b8"+ - "\u0001\u0000\u0000\u0000\u00b8\u00ba\u0001\u0000\u0000\u0000\u00b9\u00b7"+ - "\u0001\u0000\u0000\u0000\u00ba\u00bb\u00052\u0000\u0000\u00bb\u00c6\u0001"+ - "\u0000\u0000\u0000\u00bc\u00bd\u0003\u0010\b\u0000\u00bd\u00bf\u0005("+ - "\u0000\u0000\u00be\u00c0\u0005,\u0000\u0000\u00bf\u00be\u0001\u0000\u0000"+ - "\u0000\u00bf\u00c0\u0001\u0000\u0000\u0000\u00c0\u00c1\u0001\u0000\u0000"+ - "\u0000\u00c1\u00c2\u0005-\u0000\u0000\u00c2\u00c6\u0001\u0000\u0000\u0000"+ - "\u00c3\u00c4\u0004\u0005\u0004\u0000\u00c4\u00c6\u0003\u000e\u0007\u0000"+ - "\u00c5\u00a7\u0001\u0000\u0000\u0000\u00c5\u00aa\u0001\u0000\u0000\u0000"+ - "\u00c5\u00ab\u0001\u0000\u0000\u0000\u00c5\u00ac\u0001\u0000\u0000\u0000"+ - "\u00c5\u00bc\u0001\u0000\u0000\u0000\u00c5\u00c3\u0001\u0000\u0000\u0000"+ - "\u00c6\u00cf\u0001\u0000\u0000\u0000\u00c7\u00c8\n\u0005\u0000\u0000\u00c8"+ - "\u00c9\u0005\u001e\u0000\u0000\u00c9\u00ce\u0003\n\u0005\u0006\u00ca\u00cb"+ - "\n\u0004\u0000\u0000\u00cb\u00cc\u0005/\u0000\u0000\u00cc\u00ce\u0003"+ - "\n\u0005\u0005\u00cd\u00c7\u0001\u0000\u0000\u0000\u00cd\u00ca\u0001\u0000"+ - "\u0000\u0000\u00ce\u00d1\u0001\u0000\u0000\u0000\u00cf\u00cd\u0001\u0000"+ - "\u0000\u0000\u00cf\u00d0\u0001\u0000\u0000\u0000\u00d0\u000b\u0001\u0000"+ - "\u0000\u0000\u00d1\u00cf\u0001\u0000\u0000\u0000\u00d2\u00d4\u0003\u0010"+ - "\b\u0000\u00d3\u00d5\u0005,\u0000\u0000\u00d4\u00d3\u0001\u0000\u0000"+ - "\u0000\u00d4\u00d5\u0001\u0000\u0000\u0000\u00d5\u00d6\u0001\u0000\u0000"+ - "\u0000\u00d6\u00d7\u0005*\u0000\u0000\u00d7\u00d8\u0003j5\u0000\u00d8"+ - "\u00e1\u0001\u0000\u0000\u0000\u00d9\u00db\u0003\u0010\b\u0000\u00da\u00dc"+ - "\u0005,\u0000\u0000\u00db\u00da\u0001\u0000\u0000\u0000\u00db\u00dc\u0001"+ - "\u0000\u0000\u0000\u00dc\u00dd\u0001\u0000\u0000\u0000\u00dd\u00de\u0005"+ - "1\u0000\u0000\u00de\u00df\u0003j5\u0000\u00df\u00e1\u0001\u0000\u0000"+ - "\u0000\u00e0\u00d2\u0001\u0000\u0000\u0000\u00e0\u00d9\u0001\u0000\u0000"+ - "\u0000\u00e1\r\u0001\u0000\u0000\u0000\u00e2\u00e3\u0003:\u001d\u0000"+ - "\u00e3\u00e4\u0005\u0018\u0000\u0000\u00e4\u00e5\u0003D\"\u0000\u00e5"+ - "\u000f\u0001\u0000\u0000\u0000\u00e6\u00ec\u0003\u0012\t\u0000\u00e7\u00e8"+ - "\u0003\u0012\t\u0000\u00e8\u00e9\u0003l6\u0000\u00e9\u00ea\u0003\u0012"+ - "\t\u0000\u00ea\u00ec\u0001\u0000\u0000\u0000\u00eb\u00e6\u0001\u0000\u0000"+ - "\u0000\u00eb\u00e7\u0001\u0000\u0000\u0000\u00ec\u0011\u0001\u0000\u0000"+ - "\u0000\u00ed\u00ee\u0006\t\uffff\uffff\u0000\u00ee\u00f2\u0003\u0014\n"+ - "\u0000\u00ef\u00f0\u0007\u0000\u0000\u0000\u00f0\u00f2\u0003\u0012\t\u0003"+ - "\u00f1\u00ed\u0001\u0000\u0000\u0000\u00f1\u00ef\u0001\u0000\u0000\u0000"+ - "\u00f2\u00fb\u0001\u0000\u0000\u0000\u00f3\u00f4\n\u0002\u0000\u0000\u00f4"+ - "\u00f5\u0007\u0001\u0000\u0000\u00f5\u00fa\u0003\u0012\t\u0003\u00f6\u00f7"+ - "\n\u0001\u0000\u0000\u00f7\u00f8\u0007\u0000\u0000\u0000\u00f8\u00fa\u0003"+ - "\u0012\t\u0002\u00f9\u00f3\u0001\u0000\u0000\u0000\u00f9\u00f6\u0001\u0000"+ - "\u0000\u0000\u00fa\u00fd\u0001\u0000\u0000\u0000\u00fb\u00f9\u0001\u0000"+ - "\u0000\u0000\u00fb\u00fc\u0001\u0000\u0000\u0000\u00fc\u0013\u0001\u0000"+ - "\u0000\u0000\u00fd\u00fb\u0001\u0000\u0000\u0000\u00fe\u00ff\u0006\n\uffff"+ - "\uffff\u0000\u00ff\u0107\u0003D\"\u0000\u0100\u0107\u0003:\u001d\u0000"+ - "\u0101\u0107\u0003\u0016\u000b\u0000\u0102\u0103\u0005+\u0000\u0000\u0103"+ - "\u0104\u0003\n\u0005\u0000\u0104\u0105\u00052\u0000\u0000\u0105\u0107"+ - "\u0001\u0000\u0000\u0000\u0106\u00fe\u0001\u0000\u0000\u0000\u0106\u0100"+ - "\u0001\u0000\u0000\u0000\u0106\u0101\u0001\u0000\u0000\u0000\u0106\u0102"+ - "\u0001\u0000\u0000\u0000\u0107\u010d\u0001\u0000\u0000\u0000\u0108\u0109"+ - "\n\u0001\u0000\u0000\u0109\u010a\u0005!\u0000\u0000\u010a\u010c\u0003"+ - "\u001a\r\u0000\u010b\u0108\u0001\u0000\u0000\u0000\u010c\u010f\u0001\u0000"+ - "\u0000\u0000\u010d\u010b\u0001\u0000\u0000\u0000\u010d\u010e\u0001\u0000"+ - "\u0000\u0000\u010e\u0015\u0001\u0000\u0000\u0000\u010f\u010d\u0001\u0000"+ - "\u0000\u0000\u0110\u0111\u0003\u0018\f\u0000\u0111\u011b\u0005+\u0000"+ - "\u0000\u0112\u011c\u0005=\u0000\u0000\u0113\u0118\u0003\n\u0005\u0000"+ - "\u0114\u0115\u0005\"\u0000\u0000\u0115\u0117\u0003\n\u0005\u0000\u0116"+ - "\u0114\u0001\u0000\u0000\u0000\u0117\u011a\u0001\u0000\u0000\u0000\u0118"+ - "\u0116\u0001\u0000\u0000\u0000\u0118\u0119\u0001\u0000\u0000\u0000\u0119"+ - "\u011c\u0001\u0000\u0000\u0000\u011a\u0118\u0001\u0000\u0000\u0000\u011b"+ - "\u0112\u0001\u0000\u0000\u0000\u011b\u0113\u0001\u0000\u0000\u0000\u011b"+ - "\u011c\u0001\u0000\u0000\u0000\u011c\u011d\u0001\u0000\u0000\u0000\u011d"+ - "\u011e\u00052\u0000\u0000\u011e\u0017\u0001\u0000\u0000\u0000\u011f\u0120"+ - "\u0003H$\u0000\u0120\u0019\u0001\u0000\u0000\u0000\u0121\u0122\u0003@"+ - " \u0000\u0122\u001b\u0001\u0000\u0000\u0000\u0123\u0124\u0005\f\u0000"+ - "\u0000\u0124\u0125\u0003\u001e\u000f\u0000\u0125\u001d\u0001\u0000\u0000"+ - "\u0000\u0126\u012b\u0003 \u0010\u0000\u0127\u0128\u0005\"\u0000\u0000"+ - "\u0128\u012a\u0003 \u0010\u0000\u0129\u0127\u0001\u0000\u0000\u0000\u012a"+ - "\u012d\u0001\u0000\u0000\u0000\u012b\u0129\u0001\u0000\u0000\u0000\u012b"+ - "\u012c\u0001\u0000\u0000\u0000\u012c\u001f\u0001\u0000\u0000\u0000\u012d"+ - "\u012b\u0001\u0000\u0000\u0000\u012e\u012f\u0003:\u001d\u0000\u012f\u0130"+ - "\u0005 \u0000\u0000\u0130\u0132\u0001\u0000\u0000\u0000\u0131\u012e\u0001"+ - "\u0000\u0000\u0000\u0131\u0132\u0001\u0000\u0000\u0000\u0132\u0133\u0001"+ - "\u0000\u0000\u0000\u0133\u0134\u0003\n\u0005\u0000\u0134!\u0001\u0000"+ - "\u0000\u0000\u0135\u0136\u0005\u0006\u0000\u0000\u0136\u013b\u0003$\u0012"+ - "\u0000\u0137\u0138\u0005\"\u0000\u0000\u0138\u013a\u0003$\u0012\u0000"+ - "\u0139\u0137\u0001\u0000\u0000\u0000\u013a\u013d\u0001\u0000\u0000\u0000"+ - "\u013b\u0139\u0001\u0000\u0000\u0000\u013b\u013c\u0001\u0000\u0000\u0000"+ - "\u013c\u013f\u0001\u0000\u0000\u0000\u013d\u013b\u0001\u0000\u0000\u0000"+ - "\u013e\u0140\u0003*\u0015\u0000\u013f\u013e\u0001\u0000\u0000\u0000\u013f"+ - "\u0140\u0001\u0000\u0000\u0000\u0140#\u0001\u0000\u0000\u0000\u0141\u0142"+ - "\u0003&\u0013\u0000\u0142\u0143\u0005\u0018\u0000\u0000\u0143\u0145\u0001"+ - "\u0000\u0000\u0000\u0144\u0141\u0001\u0000\u0000\u0000\u0144\u0145\u0001"+ - "\u0000\u0000\u0000\u0145\u0146\u0001\u0000\u0000\u0000\u0146\u0147\u0003"+ - "(\u0014\u0000\u0147%\u0001\u0000\u0000\u0000\u0148\u0149\u0005L\u0000"+ - "\u0000\u0149\'\u0001\u0000\u0000\u0000\u014a\u014b\u0007\u0002\u0000\u0000"+ - "\u014b)\u0001\u0000\u0000\u0000\u014c\u014f\u0003,\u0016\u0000\u014d\u014f"+ - "\u0003.\u0017\u0000\u014e\u014c\u0001\u0000\u0000\u0000\u014e\u014d\u0001"+ - "\u0000\u0000\u0000\u014f+\u0001\u0000\u0000\u0000\u0150\u0151\u0005K\u0000"+ - "\u0000\u0151\u0156\u0005L\u0000\u0000\u0152\u0153\u0005\"\u0000\u0000"+ - "\u0153\u0155\u0005L\u0000\u0000\u0154\u0152\u0001\u0000\u0000\u0000\u0155"+ - "\u0158\u0001\u0000\u0000\u0000\u0156\u0154\u0001\u0000\u0000\u0000\u0156"+ - "\u0157\u0001\u0000\u0000\u0000\u0157-\u0001\u0000\u0000\u0000\u0158\u0156"+ - "\u0001\u0000\u0000\u0000\u0159\u015a\u0005A\u0000\u0000\u015a\u015b\u0003"+ - ",\u0016\u0000\u015b\u015c\u0005B\u0000\u0000\u015c/\u0001\u0000\u0000"+ - "\u0000\u015d\u015e\u0005\u0013\u0000\u0000\u015e\u0163\u0003$\u0012\u0000"+ - "\u015f\u0160\u0005\"\u0000\u0000\u0160\u0162\u0003$\u0012\u0000\u0161"+ - "\u015f\u0001\u0000\u0000\u0000\u0162\u0165\u0001\u0000\u0000\u0000\u0163"+ - "\u0161\u0001\u0000\u0000\u0000\u0163\u0164\u0001\u0000\u0000\u0000\u0164"+ - "\u0167\u0001\u0000\u0000\u0000\u0165\u0163\u0001\u0000\u0000\u0000\u0166"+ - "\u0168\u00036\u001b\u0000\u0167\u0166\u0001\u0000\u0000\u0000\u0167\u0168"+ - "\u0001\u0000\u0000\u0000\u0168\u016b\u0001\u0000\u0000\u0000\u0169\u016a"+ - "\u0005\u001d\u0000\u0000\u016a\u016c\u0003\u001e\u000f\u0000\u016b\u0169"+ - "\u0001\u0000\u0000\u0000\u016b\u016c\u0001\u0000\u0000\u0000\u016c1\u0001"+ - "\u0000\u0000\u0000\u016d\u016e\u0005\u0004\u0000\u0000\u016e\u016f\u0003"+ - "\u001e\u000f\u0000\u016f3\u0001\u0000\u0000\u0000\u0170\u0172\u0005\u000f"+ - "\u0000\u0000\u0171\u0173\u00036\u001b\u0000\u0172\u0171\u0001\u0000\u0000"+ - "\u0000\u0172\u0173\u0001\u0000\u0000\u0000\u0173\u0176\u0001\u0000\u0000"+ - "\u0000\u0174\u0175\u0005\u001d\u0000\u0000\u0175\u0177\u0003\u001e\u000f"+ - "\u0000\u0176\u0174\u0001\u0000\u0000\u0000\u0176\u0177\u0001\u0000\u0000"+ - "\u0000\u01775\u0001\u0000\u0000\u0000\u0178\u017d\u00038\u001c\u0000\u0179"+ - "\u017a\u0005\"\u0000\u0000\u017a\u017c\u00038\u001c\u0000\u017b\u0179"+ - "\u0001\u0000\u0000\u0000\u017c\u017f\u0001\u0000\u0000\u0000\u017d\u017b"+ - "\u0001\u0000\u0000\u0000\u017d\u017e\u0001\u0000\u0000\u0000\u017e7\u0001"+ - "\u0000\u0000\u0000\u017f\u017d\u0001\u0000\u0000\u0000\u0180\u0183\u0003"+ - " \u0010\u0000\u0181\u0182\u0005\u0010\u0000\u0000\u0182\u0184\u0003\n"+ - "\u0005\u0000\u0183\u0181\u0001\u0000\u0000\u0000\u0183\u0184\u0001\u0000"+ - "\u0000\u0000\u01849\u0001\u0000\u0000\u0000\u0185\u018a\u0003H$\u0000"+ - "\u0186\u0187\u0005$\u0000\u0000\u0187\u0189\u0003H$\u0000\u0188\u0186"+ - "\u0001\u0000\u0000\u0000\u0189\u018c\u0001\u0000\u0000\u0000\u018a\u0188"+ - "\u0001\u0000\u0000\u0000\u018a\u018b\u0001\u0000\u0000\u0000\u018b;\u0001"+ - "\u0000\u0000\u0000\u018c\u018a\u0001\u0000\u0000\u0000\u018d\u0192\u0003"+ - "B!\u0000\u018e\u018f\u0005$\u0000\u0000\u018f\u0191\u0003B!\u0000\u0190"+ - "\u018e\u0001\u0000\u0000\u0000\u0191\u0194\u0001\u0000\u0000\u0000\u0192"+ - "\u0190\u0001\u0000\u0000\u0000\u0192\u0193\u0001\u0000\u0000\u0000\u0193"+ - "=\u0001\u0000\u0000\u0000\u0194\u0192\u0001\u0000\u0000\u0000\u0195\u019a"+ - "\u0003<\u001e\u0000\u0196\u0197\u0005\"\u0000\u0000\u0197\u0199\u0003"+ - "<\u001e\u0000\u0198\u0196\u0001\u0000\u0000\u0000\u0199\u019c\u0001\u0000"+ - "\u0000\u0000\u019a\u0198\u0001\u0000\u0000\u0000\u019a\u019b\u0001\u0000"+ - "\u0000\u0000\u019b?\u0001\u0000\u0000\u0000\u019c\u019a\u0001\u0000\u0000"+ - "\u0000\u019d\u019e\u0007\u0003\u0000\u0000\u019eA\u0001\u0000\u0000\u0000"+ - "\u019f\u01a3\u0005P\u0000\u0000\u01a0\u01a1\u0004!\n\u0000\u01a1\u01a3"+ - "\u0003F#\u0000\u01a2\u019f\u0001\u0000\u0000\u0000\u01a2\u01a0\u0001\u0000"+ - "\u0000\u0000\u01a3C\u0001\u0000\u0000\u0000\u01a4\u01cf\u0005-\u0000\u0000"+ - "\u01a5\u01a6\u0003h4\u0000\u01a6\u01a7\u0005C\u0000\u0000\u01a7\u01cf"+ - "\u0001\u0000\u0000\u0000\u01a8\u01cf\u0003f3\u0000\u01a9\u01cf\u0003h"+ - "4\u0000\u01aa\u01cf\u0003b1\u0000\u01ab\u01cf\u0003F#\u0000\u01ac\u01cf"+ - "\u0003j5\u0000\u01ad\u01ae\u0005A\u0000\u0000\u01ae\u01b3\u0003d2\u0000"+ - "\u01af\u01b0\u0005\"\u0000\u0000\u01b0\u01b2\u0003d2\u0000\u01b1\u01af"+ - "\u0001\u0000\u0000\u0000\u01b2\u01b5\u0001\u0000\u0000\u0000\u01b3\u01b1"+ - "\u0001\u0000\u0000\u0000\u01b3\u01b4\u0001\u0000\u0000\u0000\u01b4\u01b6"+ - "\u0001\u0000\u0000\u0000\u01b5\u01b3\u0001\u0000\u0000\u0000\u01b6\u01b7"+ - "\u0005B\u0000\u0000\u01b7\u01cf\u0001\u0000\u0000\u0000\u01b8\u01b9\u0005"+ - "A\u0000\u0000\u01b9\u01be\u0003b1\u0000\u01ba\u01bb\u0005\"\u0000\u0000"+ - "\u01bb\u01bd\u0003b1\u0000\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bd\u01c0"+ - "\u0001\u0000\u0000\u0000\u01be\u01bc\u0001\u0000\u0000\u0000\u01be\u01bf"+ - "\u0001\u0000\u0000\u0000\u01bf\u01c1\u0001\u0000\u0000\u0000\u01c0\u01be"+ - "\u0001\u0000\u0000\u0000\u01c1\u01c2\u0005B\u0000\u0000\u01c2\u01cf\u0001"+ - "\u0000\u0000\u0000\u01c3\u01c4\u0005A\u0000\u0000\u01c4\u01c9\u0003j5"+ - "\u0000\u01c5\u01c6\u0005\"\u0000\u0000\u01c6\u01c8\u0003j5\u0000\u01c7"+ - "\u01c5\u0001\u0000\u0000\u0000\u01c8\u01cb\u0001\u0000\u0000\u0000\u01c9"+ - "\u01c7\u0001\u0000\u0000\u0000\u01c9\u01ca\u0001\u0000\u0000\u0000\u01ca"+ - "\u01cc\u0001\u0000\u0000\u0000\u01cb\u01c9\u0001\u0000\u0000\u0000\u01cc"+ - "\u01cd\u0005B\u0000\u0000\u01cd\u01cf\u0001\u0000\u0000\u0000\u01ce\u01a4"+ - "\u0001\u0000\u0000\u0000\u01ce\u01a5\u0001\u0000\u0000\u0000\u01ce\u01a8"+ - "\u0001\u0000\u0000\u0000\u01ce\u01a9\u0001\u0000\u0000\u0000\u01ce\u01aa"+ - "\u0001\u0000\u0000\u0000\u01ce\u01ab\u0001\u0000\u0000\u0000\u01ce\u01ac"+ - "\u0001\u0000\u0000\u0000\u01ce\u01ad\u0001\u0000\u0000\u0000\u01ce\u01b8"+ - "\u0001\u0000\u0000\u0000\u01ce\u01c3\u0001\u0000\u0000\u0000\u01cfE\u0001"+ - "\u0000\u0000\u0000\u01d0\u01d3\u00050\u0000\u0000\u01d1\u01d3\u0005@\u0000"+ - "\u0000\u01d2\u01d0\u0001\u0000\u0000\u0000\u01d2\u01d1\u0001\u0000\u0000"+ - "\u0000\u01d3G\u0001\u0000\u0000\u0000\u01d4\u01d8\u0003@ \u0000\u01d5"+ - "\u01d6\u0004$\u000b\u0000\u01d6\u01d8\u0003F#\u0000\u01d7\u01d4\u0001"+ - "\u0000\u0000\u0000\u01d7\u01d5\u0001\u0000\u0000\u0000\u01d8I\u0001\u0000"+ - "\u0000\u0000\u01d9\u01da\u0005\t\u0000\u0000\u01da\u01db\u0005\u001b\u0000"+ - "\u0000\u01dbK\u0001\u0000\u0000\u0000\u01dc\u01dd\u0005\u000e\u0000\u0000"+ - "\u01dd\u01e2\u0003N\'\u0000\u01de\u01df\u0005\"\u0000\u0000\u01df\u01e1"+ - "\u0003N\'\u0000\u01e0\u01de\u0001\u0000\u0000\u0000\u01e1\u01e4\u0001"+ - "\u0000\u0000\u0000\u01e2\u01e0\u0001\u0000\u0000\u0000\u01e2\u01e3\u0001"+ - "\u0000\u0000\u0000\u01e3M\u0001\u0000\u0000\u0000\u01e4\u01e2\u0001\u0000"+ - "\u0000\u0000\u01e5\u01e7\u0003\n\u0005\u0000\u01e6\u01e8\u0007\u0004\u0000"+ - "\u0000\u01e7\u01e6\u0001\u0000\u0000\u0000\u01e7\u01e8\u0001\u0000\u0000"+ - "\u0000\u01e8\u01eb\u0001\u0000\u0000\u0000\u01e9\u01ea\u0005.\u0000\u0000"+ - "\u01ea\u01ec\u0007\u0005\u0000\u0000\u01eb\u01e9\u0001\u0000\u0000\u0000"+ - "\u01eb\u01ec\u0001\u0000\u0000\u0000\u01ecO\u0001\u0000\u0000\u0000\u01ed"+ - "\u01ee\u0005\b\u0000\u0000\u01ee\u01ef\u0003>\u001f\u0000\u01efQ\u0001"+ - "\u0000\u0000\u0000\u01f0\u01f1\u0005\u0002\u0000\u0000\u01f1\u01f2\u0003"+ - ">\u001f\u0000\u01f2S\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005\u000b\u0000"+ - "\u0000\u01f4\u01f9\u0003V+\u0000\u01f5\u01f6\u0005\"\u0000\u0000\u01f6"+ - "\u01f8\u0003V+\u0000\u01f7\u01f5\u0001\u0000\u0000\u0000\u01f8\u01fb\u0001"+ - "\u0000\u0000\u0000\u01f9\u01f7\u0001\u0000\u0000\u0000\u01f9\u01fa\u0001"+ - "\u0000\u0000\u0000\u01faU\u0001\u0000\u0000\u0000\u01fb\u01f9\u0001\u0000"+ - "\u0000\u0000\u01fc\u01fd\u0003<\u001e\u0000\u01fd\u01fe\u0005T\u0000\u0000"+ - "\u01fe\u01ff\u0003<\u001e\u0000\u01ffW\u0001\u0000\u0000\u0000\u0200\u0201"+ - "\u0005\u0001\u0000\u0000\u0201\u0202\u0003\u0014\n\u0000\u0202\u0204\u0003"+ - "j5\u0000\u0203\u0205\u0003^/\u0000\u0204\u0203\u0001\u0000\u0000\u0000"+ - "\u0204\u0205\u0001\u0000\u0000\u0000\u0205Y\u0001\u0000\u0000\u0000\u0206"+ - "\u0207\u0005\u0007\u0000\u0000\u0207\u0208\u0003\u0014\n\u0000\u0208\u0209"+ - "\u0003j5\u0000\u0209[\u0001\u0000\u0000\u0000\u020a\u020b\u0005\n\u0000"+ - "\u0000\u020b\u020c\u0003:\u001d\u0000\u020c]\u0001\u0000\u0000\u0000\u020d"+ - "\u0212\u0003`0\u0000\u020e\u020f\u0005\"\u0000\u0000\u020f\u0211\u0003"+ - "`0\u0000\u0210\u020e\u0001\u0000\u0000\u0000\u0211\u0214\u0001\u0000\u0000"+ - "\u0000\u0212\u0210\u0001\u0000\u0000\u0000\u0212\u0213\u0001\u0000\u0000"+ - "\u0000\u0213_\u0001\u0000\u0000\u0000\u0214\u0212\u0001\u0000\u0000\u0000"+ - "\u0215\u0216\u0003@ \u0000\u0216\u0217\u0005 \u0000\u0000\u0217\u0218"+ - "\u0003D\"\u0000\u0218a\u0001\u0000\u0000\u0000\u0219\u021a\u0007\u0006"+ - "\u0000\u0000\u021ac\u0001\u0000\u0000\u0000\u021b\u021e\u0003f3\u0000"+ - "\u021c\u021e\u0003h4\u0000\u021d\u021b\u0001\u0000\u0000\u0000\u021d\u021c"+ - "\u0001\u0000\u0000\u0000\u021ee\u0001\u0000\u0000\u0000\u021f\u0221\u0007"+ - "\u0000\u0000\u0000\u0220\u021f\u0001\u0000\u0000\u0000\u0220\u0221\u0001"+ - "\u0000\u0000\u0000\u0221\u0222\u0001\u0000\u0000\u0000\u0222\u0223\u0005"+ - "\u001c\u0000\u0000\u0223g\u0001\u0000\u0000\u0000\u0224\u0226\u0007\u0000"+ - "\u0000\u0000\u0225\u0224\u0001\u0000\u0000\u0000\u0225\u0226\u0001\u0000"+ - "\u0000\u0000\u0226\u0227\u0001\u0000\u0000\u0000\u0227\u0228\u0005\u001b"+ - "\u0000\u0000\u0228i\u0001\u0000\u0000\u0000\u0229\u022a\u0005\u001a\u0000"+ - "\u0000\u022ak\u0001\u0000\u0000\u0000\u022b\u022c\u0007\u0007\u0000\u0000"+ - "\u022cm\u0001\u0000\u0000\u0000\u022d\u022e\u0005\u0005\u0000\u0000\u022e"+ - "\u022f\u0003p8\u0000\u022fo\u0001\u0000\u0000\u0000\u0230\u0231\u0005"+ - "A\u0000\u0000\u0231\u0232\u0003\u0002\u0001\u0000\u0232\u0233\u0005B\u0000"+ - "\u0000\u0233q\u0001\u0000\u0000\u0000\u0234\u0235\u0005\r\u0000\u0000"+ - "\u0235\u0236\u0005d\u0000\u0000\u0236s\u0001\u0000\u0000\u0000\u0237\u0238"+ - "\u0005\u0003\u0000\u0000\u0238\u023b\u0005Z\u0000\u0000\u0239\u023a\u0005"+ - "X\u0000\u0000\u023a\u023c\u0003<\u001e\u0000\u023b\u0239\u0001\u0000\u0000"+ - "\u0000\u023b\u023c\u0001\u0000\u0000\u0000\u023c\u0246\u0001\u0000\u0000"+ - "\u0000\u023d\u023e\u0005Y\u0000\u0000\u023e\u0243\u0003v;\u0000\u023f"+ - "\u0240\u0005\"\u0000\u0000\u0240\u0242\u0003v;\u0000\u0241\u023f\u0001"+ - "\u0000\u0000\u0000\u0242\u0245\u0001\u0000\u0000\u0000\u0243\u0241\u0001"+ - "\u0000\u0000\u0000\u0243\u0244\u0001\u0000\u0000\u0000\u0244\u0247\u0001"+ - "\u0000\u0000\u0000\u0245\u0243\u0001\u0000\u0000\u0000\u0246\u023d\u0001"+ - "\u0000\u0000\u0000\u0246\u0247\u0001\u0000\u0000\u0000\u0247u\u0001\u0000"+ - "\u0000\u0000\u0248\u0249\u0003<\u001e\u0000\u0249\u024a\u0005 \u0000\u0000"+ - "\u024a\u024c\u0001\u0000\u0000\u0000\u024b\u0248\u0001\u0000\u0000\u0000"+ - "\u024b\u024c\u0001\u0000\u0000\u0000\u024c\u024d\u0001\u0000\u0000\u0000"+ - "\u024d\u024e\u0003<\u001e\u0000\u024ew\u0001\u0000\u0000\u0000\u024f\u0250"+ - "\u0005\u0012\u0000\u0000\u0250\u0251\u0003$\u0012\u0000\u0251\u0252\u0005"+ - "X\u0000\u0000\u0252\u0253\u0003>\u001f\u0000\u0253y\u0001\u0000\u0000"+ - "\u0000\u0254\u0255\u0005\u0011\u0000\u0000\u0255\u0258\u00036\u001b\u0000"+ - "\u0256\u0257\u0005\u001d\u0000\u0000\u0257\u0259\u0003\u001e\u000f\u0000"+ - "\u0258\u0256\u0001\u0000\u0000\u0000\u0258\u0259\u0001\u0000\u0000\u0000"+ - "\u0259{\u0001\u0000\u0000\u0000:\u0087\u0090\u00a2\u00ae\u00b7\u00bf\u00c5"+ - "\u00cd\u00cf\u00d4\u00db\u00e0\u00eb\u00f1\u00f9\u00fb\u0106\u010d\u0118"+ - "\u011b\u012b\u0131\u013b\u013f\u0144\u014e\u0156\u0163\u0167\u016b\u0172"+ - "\u0176\u017d\u0183\u018a\u0192\u019a\u01a2\u01b3\u01be\u01c9\u01ce\u01d2"+ - "\u01d7\u01e2\u01e7\u01eb\u01f9\u0204\u0212\u021d\u0220\u0225\u023b\u0243"+ - "\u0246\u024b\u0258"; + "\u0004\u0001\u0080\u027c\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ + "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ + "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ + "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ + "\u0002\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007"+ + "\u000f\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007"+ + "\u0012\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007"+ + "\u0015\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007"+ + "\u0018\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007"+ + "\u001b\u0002\u001c\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007"+ + "\u001e\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007"+ + "\"\u0002#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007"+ + "\'\u0002(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007"+ + ",\u0002-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u0007"+ + "1\u00022\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u0007"+ + "6\u00027\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007"+ + ";\u0002<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007"+ + "@\u0002A\u0007A\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001\u008e"+ + "\b\u0001\n\u0001\f\u0001\u0091\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u0099\b\u0002\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003"+ + "\u00ad\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005"+ + "\u00b9\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0005\u0005\u00c0\b\u0005\n\u0005\f\u0005\u00c3\t\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00ca\b\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00d0\b\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005"+ + "\u0005\u00d8\b\u0005\n\u0005\f\u0005\u00db\t\u0005\u0001\u0006\u0001\u0006"+ + "\u0003\u0006\u00df\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ + "\u0001\u0006\u0003\u0006\u00e6\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ + "\u0003\u0006\u00eb\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ + "\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00f6\b\b\u0001\t\u0001"+ + "\t\u0001\t\u0001\t\u0003\t\u00fc\b\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0005\t\u0104\b\t\n\t\f\t\u0107\t\t\u0001\n\u0001\n\u0001\n"+ + "\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0003\n\u0111\b\n\u0001\n\u0001"+ + "\n\u0001\n\u0005\n\u0116\b\n\n\n\f\n\u0119\t\n\u0001\u000b\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0005\u000b\u0121\b\u000b"+ + "\n\u000b\f\u000b\u0124\t\u000b\u0003\u000b\u0126\b\u000b\u0001\u000b\u0001"+ + "\u000b\u0001\f\u0001\f\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e"+ + "\u0001\u000f\u0001\u000f\u0001\u000f\u0005\u000f\u0134\b\u000f\n\u000f"+ + "\f\u000f\u0137\t\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0003\u0010"+ + "\u013c\b\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011"+ + "\u0001\u0011\u0005\u0011\u0144\b\u0011\n\u0011\f\u0011\u0147\t\u0011\u0001"+ + "\u0011\u0003\u0011\u014a\b\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0003"+ + "\u0012\u014f\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001"+ + "\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0003\u0015\u0159\b\u0015\u0001"+ + "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u015f\b\u0016\n"+ + "\u0016\f\u0016\u0162\t\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u016c"+ + "\b\u0018\n\u0018\f\u0018\u016f\t\u0018\u0001\u0018\u0003\u0018\u0172\b"+ + "\u0018\u0001\u0018\u0001\u0018\u0003\u0018\u0176\b\u0018\u0001\u0019\u0001"+ + "\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0003\u001a\u017d\b\u001a\u0001"+ + "\u001a\u0001\u001a\u0003\u001a\u0181\b\u001a\u0001\u001b\u0001\u001b\u0001"+ + "\u001b\u0005\u001b\u0186\b\u001b\n\u001b\f\u001b\u0189\t\u001b\u0001\u001c"+ + "\u0001\u001c\u0001\u001c\u0003\u001c\u018e\b\u001c\u0001\u001d\u0001\u001d"+ + "\u0001\u001d\u0005\u001d\u0193\b\u001d\n\u001d\f\u001d\u0196\t\u001d\u0001"+ + "\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u019b\b\u001e\n\u001e\f\u001e"+ + "\u019e\t\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01a3\b"+ + "\u001f\n\u001f\f\u001f\u01a6\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001"+ + "!\u0003!\u01ad\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ + "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01bc\b\"\n"+ + "\"\f\"\u01bf\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005"+ + "\"\u01c7\b\"\n\"\f\"\u01ca\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\""+ + "\u0001\"\u0005\"\u01d2\b\"\n\"\f\"\u01d5\t\"\u0001\"\u0001\"\u0003\"\u01d9"+ + "\b\"\u0001#\u0001#\u0003#\u01dd\b#\u0001$\u0001$\u0001$\u0003$\u01e2\b"+ + "$\u0001%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01eb\b&\n&"+ + "\f&\u01ee\t&\u0001\'\u0001\'\u0003\'\u01f2\b\'\u0001\'\u0001\'\u0003\'"+ + "\u01f6\b\'\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001"+ + "*\u0001*\u0005*\u0202\b*\n*\f*\u0205\t*\u0001+\u0001+\u0001+\u0001+\u0001"+ + ",\u0001,\u0001,\u0001,\u0003,\u020f\b,\u0001-\u0001-\u0001-\u0001-\u0001"+ + ".\u0001.\u0001.\u0001/\u0001/\u0001/\u0005/\u021b\b/\n/\f/\u021e\t/\u0001"+ + "0\u00010\u00010\u00010\u00011\u00011\u00012\u00012\u00032\u0228\b2\u0001"+ + "3\u00033\u022b\b3\u00013\u00013\u00014\u00034\u0230\b4\u00014\u00014\u0001"+ + "5\u00015\u00016\u00016\u00017\u00017\u00017\u00018\u00018\u00018\u0001"+ + "8\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u0246\b:\u0001"+ + ":\u0001:\u0001:\u0001:\u0005:\u024c\b:\n:\f:\u024f\t:\u0003:\u0251\b:"+ + "\u0001;\u0001;\u0001;\u0003;\u0256\b;\u0001;\u0001;\u0001<\u0001<\u0001"+ + "<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001=\u0003=\u0263\b=\u0001>\u0003"+ + ">\u0266\b>\u0001>\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0003?\u026f"+ + "\b?\u0001@\u0001@\u0001@\u0001@\u0005@\u0275\b@\n@\f@\u0278\t@\u0001A"+ + "\u0001A\u0001A\u0000\u0004\u0002\n\u0012\u0014B\u0000\u0002\u0004\u0006"+ + "\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,."+ + "02468:<>@BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0000\t\u0001\u0000"+ + "@A\u0001\u0000BD\u0002\u0000\u001f\u001fQQ\u0001\u0000HI\u0002\u0000$"+ + "$((\u0002\u0000++..\u0002\u0000**88\u0002\u000099;?\u0001\u0000\u0016"+ + "\u0018\u0295\u0000\u0084\u0001\u0000\u0000\u0000\u0002\u0087\u0001\u0000"+ + "\u0000\u0000\u0004\u0098\u0001\u0000\u0000\u0000\u0006\u00ac\u0001\u0000"+ + "\u0000\u0000\b\u00ae\u0001\u0000\u0000\u0000\n\u00cf\u0001\u0000\u0000"+ + "\u0000\f\u00ea\u0001\u0000\u0000\u0000\u000e\u00ec\u0001\u0000\u0000\u0000"+ + "\u0010\u00f5\u0001\u0000\u0000\u0000\u0012\u00fb\u0001\u0000\u0000\u0000"+ + "\u0014\u0110\u0001\u0000\u0000\u0000\u0016\u011a\u0001\u0000\u0000\u0000"+ + "\u0018\u0129\u0001\u0000\u0000\u0000\u001a\u012b\u0001\u0000\u0000\u0000"+ + "\u001c\u012d\u0001\u0000\u0000\u0000\u001e\u0130\u0001\u0000\u0000\u0000"+ + " \u013b\u0001\u0000\u0000\u0000\"\u013f\u0001\u0000\u0000\u0000$\u014e"+ + "\u0001\u0000\u0000\u0000&\u0152\u0001\u0000\u0000\u0000(\u0154\u0001\u0000"+ + "\u0000\u0000*\u0158\u0001\u0000\u0000\u0000,\u015a\u0001\u0000\u0000\u0000"+ + ".\u0163\u0001\u0000\u0000\u00000\u0167\u0001\u0000\u0000\u00002\u0177"+ + "\u0001\u0000\u0000\u00004\u017a\u0001\u0000\u0000\u00006\u0182\u0001\u0000"+ + "\u0000\u00008\u018a\u0001\u0000\u0000\u0000:\u018f\u0001\u0000\u0000\u0000"+ + "<\u0197\u0001\u0000\u0000\u0000>\u019f\u0001\u0000\u0000\u0000@\u01a7"+ + "\u0001\u0000\u0000\u0000B\u01ac\u0001\u0000\u0000\u0000D\u01d8\u0001\u0000"+ + "\u0000\u0000F\u01dc\u0001\u0000\u0000\u0000H\u01e1\u0001\u0000\u0000\u0000"+ + "J\u01e3\u0001\u0000\u0000\u0000L\u01e6\u0001\u0000\u0000\u0000N\u01ef"+ + "\u0001\u0000\u0000\u0000P\u01f7\u0001\u0000\u0000\u0000R\u01fa\u0001\u0000"+ + "\u0000\u0000T\u01fd\u0001\u0000\u0000\u0000V\u0206\u0001\u0000\u0000\u0000"+ + "X\u020a\u0001\u0000\u0000\u0000Z\u0210\u0001\u0000\u0000\u0000\\\u0214"+ + "\u0001\u0000\u0000\u0000^\u0217\u0001\u0000\u0000\u0000`\u021f\u0001\u0000"+ + "\u0000\u0000b\u0223\u0001\u0000\u0000\u0000d\u0227\u0001\u0000\u0000\u0000"+ + "f\u022a\u0001\u0000\u0000\u0000h\u022f\u0001\u0000\u0000\u0000j\u0233"+ + "\u0001\u0000\u0000\u0000l\u0235\u0001\u0000\u0000\u0000n\u0237\u0001\u0000"+ + "\u0000\u0000p\u023a\u0001\u0000\u0000\u0000r\u023e\u0001\u0000\u0000\u0000"+ + "t\u0241\u0001\u0000\u0000\u0000v\u0255\u0001\u0000\u0000\u0000x\u0259"+ + "\u0001\u0000\u0000\u0000z\u025e\u0001\u0000\u0000\u0000|\u0265\u0001\u0000"+ + "\u0000\u0000~\u026b\u0001\u0000\u0000\u0000\u0080\u0270\u0001\u0000\u0000"+ + "\u0000\u0082\u0279\u0001\u0000\u0000\u0000\u0084\u0085\u0003\u0002\u0001"+ + "\u0000\u0085\u0086\u0005\u0000\u0000\u0001\u0086\u0001\u0001\u0000\u0000"+ + "\u0000\u0087\u0088\u0006\u0001\uffff\uffff\u0000\u0088\u0089\u0003\u0004"+ + "\u0002\u0000\u0089\u008f\u0001\u0000\u0000\u0000\u008a\u008b\n\u0001\u0000"+ + "\u0000\u008b\u008c\u0005\u001e\u0000\u0000\u008c\u008e\u0003\u0006\u0003"+ + "\u0000\u008d\u008a\u0001\u0000\u0000\u0000\u008e\u0091\u0001\u0000\u0000"+ + "\u0000\u008f\u008d\u0001\u0000\u0000\u0000\u008f\u0090\u0001\u0000\u0000"+ + "\u0000\u0090\u0003\u0001\u0000\u0000\u0000\u0091\u008f\u0001\u0000\u0000"+ + "\u0000\u0092\u0099\u0003n7\u0000\u0093\u0099\u0003\"\u0011\u0000\u0094"+ + "\u0099\u0003\u001c\u000e\u0000\u0095\u0099\u0003r9\u0000\u0096\u0097\u0004"+ + "\u0002\u0001\u0000\u0097\u0099\u00030\u0018\u0000\u0098\u0092\u0001\u0000"+ + "\u0000\u0000\u0098\u0093\u0001\u0000\u0000\u0000\u0098\u0094\u0001\u0000"+ + "\u0000\u0000\u0098\u0095\u0001\u0000\u0000\u0000\u0098\u0096\u0001\u0000"+ + "\u0000\u0000\u0099\u0005\u0001\u0000\u0000\u0000\u009a\u00ad\u00032\u0019"+ + "\u0000\u009b\u00ad\u0003\b\u0004\u0000\u009c\u00ad\u0003P(\u0000\u009d"+ + "\u00ad\u0003J%\u0000\u009e\u00ad\u00034\u001a\u0000\u009f\u00ad\u0003"+ + "L&\u0000\u00a0\u00ad\u0003R)\u0000\u00a1\u00ad\u0003T*\u0000\u00a2\u00ad"+ + "\u0003X,\u0000\u00a3\u00ad\u0003Z-\u0000\u00a4\u00ad\u0003t:\u0000\u00a5"+ + "\u00ad\u0003\\.\u0000\u00a6\u00a7\u0004\u0003\u0002\u0000\u00a7\u00ad"+ + "\u0003z=\u0000\u00a8\u00a9\u0004\u0003\u0003\u0000\u00a9\u00ad\u0003x"+ + "<\u0000\u00aa\u00ab\u0004\u0003\u0004\u0000\u00ab\u00ad\u0003|>\u0000"+ + "\u00ac\u009a\u0001\u0000\u0000\u0000\u00ac\u009b\u0001\u0000\u0000\u0000"+ + "\u00ac\u009c\u0001\u0000\u0000\u0000\u00ac\u009d\u0001\u0000\u0000\u0000"+ + "\u00ac\u009e\u0001\u0000\u0000\u0000\u00ac\u009f\u0001\u0000\u0000\u0000"+ + "\u00ac\u00a0\u0001\u0000\u0000\u0000\u00ac\u00a1\u0001\u0000\u0000\u0000"+ + "\u00ac\u00a2\u0001\u0000\u0000\u0000\u00ac\u00a3\u0001\u0000\u0000\u0000"+ + "\u00ac\u00a4\u0001\u0000\u0000\u0000\u00ac\u00a5\u0001\u0000\u0000\u0000"+ + "\u00ac\u00a6\u0001\u0000\u0000\u0000\u00ac\u00a8\u0001\u0000\u0000\u0000"+ + "\u00ac\u00aa\u0001\u0000\u0000\u0000\u00ad\u0007\u0001\u0000\u0000\u0000"+ + "\u00ae\u00af\u0005\u0010\u0000\u0000\u00af\u00b0\u0003\n\u0005\u0000\u00b0"+ + "\t\u0001\u0000\u0000\u0000\u00b1\u00b2\u0006\u0005\uffff\uffff\u0000\u00b2"+ + "\u00b3\u00051\u0000\u0000\u00b3\u00d0\u0003\n\u0005\b\u00b4\u00d0\u0003"+ + "\u0010\b\u0000\u00b5\u00d0\u0003\f\u0006\u0000\u00b6\u00b8\u0003\u0010"+ + "\b\u0000\u00b7\u00b9\u00051\u0000\u0000\u00b8\u00b7\u0001\u0000\u0000"+ + "\u0000\u00b8\u00b9\u0001\u0000\u0000\u0000\u00b9\u00ba\u0001\u0000\u0000"+ + "\u0000\u00ba\u00bb\u0005,\u0000\u0000\u00bb\u00bc\u00050\u0000\u0000\u00bc"+ + "\u00c1\u0003\u0010\b\u0000\u00bd\u00be\u0005\'\u0000\u0000\u00be\u00c0"+ + "\u0003\u0010\b\u0000\u00bf\u00bd\u0001\u0000\u0000\u0000\u00c0\u00c3\u0001"+ + "\u0000\u0000\u0000\u00c1\u00bf\u0001\u0000\u0000\u0000\u00c1\u00c2\u0001"+ + "\u0000\u0000\u0000\u00c2\u00c4\u0001\u0000\u0000\u0000\u00c3\u00c1\u0001"+ + "\u0000\u0000\u0000\u00c4\u00c5\u00057\u0000\u0000\u00c5\u00d0\u0001\u0000"+ + "\u0000\u0000\u00c6\u00c7\u0003\u0010\b\u0000\u00c7\u00c9\u0005-\u0000"+ + "\u0000\u00c8\u00ca\u00051\u0000\u0000\u00c9\u00c8\u0001\u0000\u0000\u0000"+ + "\u00c9\u00ca\u0001\u0000\u0000\u0000\u00ca\u00cb\u0001\u0000\u0000\u0000"+ + "\u00cb\u00cc\u00052\u0000\u0000\u00cc\u00d0\u0001\u0000\u0000\u0000\u00cd"+ + "\u00ce\u0004\u0005\u0005\u0000\u00ce\u00d0\u0003\u000e\u0007\u0000\u00cf"+ + "\u00b1\u0001\u0000\u0000\u0000\u00cf\u00b4\u0001\u0000\u0000\u0000\u00cf"+ + "\u00b5\u0001\u0000\u0000\u0000\u00cf\u00b6\u0001\u0000\u0000\u0000\u00cf"+ + "\u00c6\u0001\u0000\u0000\u0000\u00cf\u00cd\u0001\u0000\u0000\u0000\u00d0"+ + "\u00d9\u0001\u0000\u0000\u0000\u00d1\u00d2\n\u0005\u0000\u0000\u00d2\u00d3"+ + "\u0005#\u0000\u0000\u00d3\u00d8\u0003\n\u0005\u0006\u00d4\u00d5\n\u0004"+ + "\u0000\u0000\u00d5\u00d6\u00054\u0000\u0000\u00d6\u00d8\u0003\n\u0005"+ + "\u0005\u00d7\u00d1\u0001\u0000\u0000\u0000\u00d7\u00d4\u0001\u0000\u0000"+ + "\u0000\u00d8\u00db\u0001\u0000\u0000\u0000\u00d9\u00d7\u0001\u0000\u0000"+ + "\u0000\u00d9\u00da\u0001\u0000\u0000\u0000\u00da\u000b\u0001\u0000\u0000"+ + "\u0000\u00db\u00d9\u0001\u0000\u0000\u0000\u00dc\u00de\u0003\u0010\b\u0000"+ + "\u00dd\u00df\u00051\u0000\u0000\u00de\u00dd\u0001\u0000\u0000\u0000\u00de"+ + "\u00df\u0001\u0000\u0000\u0000\u00df\u00e0\u0001\u0000\u0000\u0000\u00e0"+ + "\u00e1\u0005/\u0000\u0000\u00e1\u00e2\u0003j5\u0000\u00e2\u00eb\u0001"+ + "\u0000\u0000\u0000\u00e3\u00e5\u0003\u0010\b\u0000\u00e4\u00e6\u00051"+ + "\u0000\u0000\u00e5\u00e4\u0001\u0000\u0000\u0000\u00e5\u00e6\u0001\u0000"+ + "\u0000\u0000\u00e6\u00e7\u0001\u0000\u0000\u0000\u00e7\u00e8\u00056\u0000"+ + "\u0000\u00e8\u00e9\u0003j5\u0000\u00e9\u00eb\u0001\u0000\u0000\u0000\u00ea"+ + "\u00dc\u0001\u0000\u0000\u0000\u00ea\u00e3\u0001\u0000\u0000\u0000\u00eb"+ + "\r\u0001\u0000\u0000\u0000\u00ec\u00ed\u0003:\u001d\u0000\u00ed\u00ee"+ + "\u0005\u001d\u0000\u0000\u00ee\u00ef\u0003D\"\u0000\u00ef\u000f\u0001"+ + "\u0000\u0000\u0000\u00f0\u00f6\u0003\u0012\t\u0000\u00f1\u00f2\u0003\u0012"+ + "\t\u0000\u00f2\u00f3\u0003l6\u0000\u00f3\u00f4\u0003\u0012\t\u0000\u00f4"+ + "\u00f6\u0001\u0000\u0000\u0000\u00f5\u00f0\u0001\u0000\u0000\u0000\u00f5"+ + "\u00f1\u0001\u0000\u0000\u0000\u00f6\u0011\u0001\u0000\u0000\u0000\u00f7"+ + "\u00f8\u0006\t\uffff\uffff\u0000\u00f8\u00fc\u0003\u0014\n\u0000\u00f9"+ + "\u00fa\u0007\u0000\u0000\u0000\u00fa\u00fc\u0003\u0012\t\u0003\u00fb\u00f7"+ + "\u0001\u0000\u0000\u0000\u00fb\u00f9\u0001\u0000\u0000\u0000\u00fc\u0105"+ + "\u0001\u0000\u0000\u0000\u00fd\u00fe\n\u0002\u0000\u0000\u00fe\u00ff\u0007"+ + "\u0001\u0000\u0000\u00ff\u0104\u0003\u0012\t\u0003\u0100\u0101\n\u0001"+ + "\u0000\u0000\u0101\u0102\u0007\u0000\u0000\u0000\u0102\u0104\u0003\u0012"+ + "\t\u0002\u0103\u00fd\u0001\u0000\u0000\u0000\u0103\u0100\u0001\u0000\u0000"+ + "\u0000\u0104\u0107\u0001\u0000\u0000\u0000\u0105\u0103\u0001\u0000\u0000"+ + "\u0000\u0105\u0106\u0001\u0000\u0000\u0000\u0106\u0013\u0001\u0000\u0000"+ + "\u0000\u0107\u0105\u0001\u0000\u0000\u0000\u0108\u0109\u0006\n\uffff\uffff"+ + "\u0000\u0109\u0111\u0003D\"\u0000\u010a\u0111\u0003:\u001d\u0000\u010b"+ + "\u0111\u0003\u0016\u000b\u0000\u010c\u010d\u00050\u0000\u0000\u010d\u010e"+ + "\u0003\n\u0005\u0000\u010e\u010f\u00057\u0000\u0000\u010f\u0111\u0001"+ + "\u0000\u0000\u0000\u0110\u0108\u0001\u0000\u0000\u0000\u0110\u010a\u0001"+ + "\u0000\u0000\u0000\u0110\u010b\u0001\u0000\u0000\u0000\u0110\u010c\u0001"+ + "\u0000\u0000\u0000\u0111\u0117\u0001\u0000\u0000\u0000\u0112\u0113\n\u0001"+ + "\u0000\u0000\u0113\u0114\u0005&\u0000\u0000\u0114\u0116\u0003\u001a\r"+ + "\u0000\u0115\u0112\u0001\u0000\u0000\u0000\u0116\u0119\u0001\u0000\u0000"+ + "\u0000\u0117\u0115\u0001\u0000\u0000\u0000\u0117\u0118\u0001\u0000\u0000"+ + "\u0000\u0118\u0015\u0001\u0000\u0000\u0000\u0119\u0117\u0001\u0000\u0000"+ + "\u0000\u011a\u011b\u0003\u0018\f\u0000\u011b\u0125\u00050\u0000\u0000"+ + "\u011c\u0126\u0005B\u0000\u0000\u011d\u0122\u0003\n\u0005\u0000\u011e"+ + "\u011f\u0005\'\u0000\u0000\u011f\u0121\u0003\n\u0005\u0000\u0120\u011e"+ + "\u0001\u0000\u0000\u0000\u0121\u0124\u0001\u0000\u0000\u0000\u0122\u0120"+ + "\u0001\u0000\u0000\u0000\u0122\u0123\u0001\u0000\u0000\u0000\u0123\u0126"+ + "\u0001\u0000\u0000\u0000\u0124\u0122\u0001\u0000\u0000\u0000\u0125\u011c"+ + "\u0001\u0000\u0000\u0000\u0125\u011d\u0001\u0000\u0000\u0000\u0125\u0126"+ + "\u0001\u0000\u0000\u0000\u0126\u0127\u0001\u0000\u0000\u0000\u0127\u0128"+ + "\u00057\u0000\u0000\u0128\u0017\u0001\u0000\u0000\u0000\u0129\u012a\u0003"+ + "H$\u0000\u012a\u0019\u0001\u0000\u0000\u0000\u012b\u012c\u0003@ \u0000"+ + "\u012c\u001b\u0001\u0000\u0000\u0000\u012d\u012e\u0005\f\u0000\u0000\u012e"+ + "\u012f\u0003\u001e\u000f\u0000\u012f\u001d\u0001\u0000\u0000\u0000\u0130"+ + "\u0135\u0003 \u0010\u0000\u0131\u0132\u0005\'\u0000\u0000\u0132\u0134"+ + "\u0003 \u0010\u0000\u0133\u0131\u0001\u0000\u0000\u0000\u0134\u0137\u0001"+ + "\u0000\u0000\u0000\u0135\u0133\u0001\u0000\u0000\u0000\u0135\u0136\u0001"+ + "\u0000\u0000\u0000\u0136\u001f\u0001\u0000\u0000\u0000\u0137\u0135\u0001"+ + "\u0000\u0000\u0000\u0138\u0139\u0003:\u001d\u0000\u0139\u013a\u0005%\u0000"+ + "\u0000\u013a\u013c\u0001\u0000\u0000\u0000\u013b\u0138\u0001\u0000\u0000"+ + "\u0000\u013b\u013c\u0001\u0000\u0000\u0000\u013c\u013d\u0001\u0000\u0000"+ + "\u0000\u013d\u013e\u0003\n\u0005\u0000\u013e!\u0001\u0000\u0000\u0000"+ + "\u013f\u0140\u0005\u0006\u0000\u0000\u0140\u0145\u0003$\u0012\u0000\u0141"+ + "\u0142\u0005\'\u0000\u0000\u0142\u0144\u0003$\u0012\u0000\u0143\u0141"+ + "\u0001\u0000\u0000\u0000\u0144\u0147\u0001\u0000\u0000\u0000\u0145\u0143"+ + "\u0001\u0000\u0000\u0000\u0145\u0146\u0001\u0000\u0000\u0000\u0146\u0149"+ + "\u0001\u0000\u0000\u0000\u0147\u0145\u0001\u0000\u0000\u0000\u0148\u014a"+ + "\u0003*\u0015\u0000\u0149\u0148\u0001\u0000\u0000\u0000\u0149\u014a\u0001"+ + "\u0000\u0000\u0000\u014a#\u0001\u0000\u0000\u0000\u014b\u014c\u0003&\u0013"+ + "\u0000\u014c\u014d\u0005\u001d\u0000\u0000\u014d\u014f\u0001\u0000\u0000"+ + "\u0000\u014e\u014b\u0001\u0000\u0000\u0000\u014e\u014f\u0001\u0000\u0000"+ + "\u0000\u014f\u0150\u0001\u0000\u0000\u0000\u0150\u0151\u0003(\u0014\u0000"+ + "\u0151%\u0001\u0000\u0000\u0000\u0152\u0153\u0005Q\u0000\u0000\u0153\'"+ + "\u0001\u0000\u0000\u0000\u0154\u0155\u0007\u0002\u0000\u0000\u0155)\u0001"+ + "\u0000\u0000\u0000\u0156\u0159\u0003,\u0016\u0000\u0157\u0159\u0003.\u0017"+ + "\u0000\u0158\u0156\u0001\u0000\u0000\u0000\u0158\u0157\u0001\u0000\u0000"+ + "\u0000\u0159+\u0001\u0000\u0000\u0000\u015a\u015b\u0005P\u0000\u0000\u015b"+ + "\u0160\u0005Q\u0000\u0000\u015c\u015d\u0005\'\u0000\u0000\u015d\u015f"+ + "\u0005Q\u0000\u0000\u015e\u015c\u0001\u0000\u0000\u0000\u015f\u0162\u0001"+ + "\u0000\u0000\u0000\u0160\u015e\u0001\u0000\u0000\u0000\u0160\u0161\u0001"+ + "\u0000\u0000\u0000\u0161-\u0001\u0000\u0000\u0000\u0162\u0160\u0001\u0000"+ + "\u0000\u0000\u0163\u0164\u0005F\u0000\u0000\u0164\u0165\u0003,\u0016\u0000"+ + "\u0165\u0166\u0005G\u0000\u0000\u0166/\u0001\u0000\u0000\u0000\u0167\u0168"+ + "\u0005\u0013\u0000\u0000\u0168\u016d\u0003$\u0012\u0000\u0169\u016a\u0005"+ + "\'\u0000\u0000\u016a\u016c\u0003$\u0012\u0000\u016b\u0169\u0001\u0000"+ + "\u0000\u0000\u016c\u016f\u0001\u0000\u0000\u0000\u016d\u016b\u0001\u0000"+ + "\u0000\u0000\u016d\u016e\u0001\u0000\u0000\u0000\u016e\u0171\u0001\u0000"+ + "\u0000\u0000\u016f\u016d\u0001\u0000\u0000\u0000\u0170\u0172\u00036\u001b"+ + "\u0000\u0171\u0170\u0001\u0000\u0000\u0000\u0171\u0172\u0001\u0000\u0000"+ + "\u0000\u0172\u0175\u0001\u0000\u0000\u0000\u0173\u0174\u0005\"\u0000\u0000"+ + "\u0174\u0176\u0003\u001e\u000f\u0000\u0175\u0173\u0001\u0000\u0000\u0000"+ + "\u0175\u0176\u0001\u0000\u0000\u0000\u01761\u0001\u0000\u0000\u0000\u0177"+ + "\u0178\u0005\u0004\u0000\u0000\u0178\u0179\u0003\u001e\u000f\u0000\u0179"+ + "3\u0001\u0000\u0000\u0000\u017a\u017c\u0005\u000f\u0000\u0000\u017b\u017d"+ + "\u00036\u001b\u0000\u017c\u017b\u0001\u0000\u0000\u0000\u017c\u017d\u0001"+ + "\u0000\u0000\u0000\u017d\u0180\u0001\u0000\u0000\u0000\u017e\u017f\u0005"+ + "\"\u0000\u0000\u017f\u0181\u0003\u001e\u000f\u0000\u0180\u017e\u0001\u0000"+ + "\u0000\u0000\u0180\u0181\u0001\u0000\u0000\u0000\u01815\u0001\u0000\u0000"+ + "\u0000\u0182\u0187\u00038\u001c\u0000\u0183\u0184\u0005\'\u0000\u0000"+ + "\u0184\u0186\u00038\u001c\u0000\u0185\u0183\u0001\u0000\u0000\u0000\u0186"+ + "\u0189\u0001\u0000\u0000\u0000\u0187\u0185\u0001\u0000\u0000\u0000\u0187"+ + "\u0188\u0001\u0000\u0000\u0000\u01887\u0001\u0000\u0000\u0000\u0189\u0187"+ + "\u0001\u0000\u0000\u0000\u018a\u018d\u0003 \u0010\u0000\u018b\u018c\u0005"+ + "\u0010\u0000\u0000\u018c\u018e\u0003\n\u0005\u0000\u018d\u018b\u0001\u0000"+ + "\u0000\u0000\u018d\u018e\u0001\u0000\u0000\u0000\u018e9\u0001\u0000\u0000"+ + "\u0000\u018f\u0194\u0003H$\u0000\u0190\u0191\u0005)\u0000\u0000\u0191"+ + "\u0193\u0003H$\u0000\u0192\u0190\u0001\u0000\u0000\u0000\u0193\u0196\u0001"+ + "\u0000\u0000\u0000\u0194\u0192\u0001\u0000\u0000\u0000\u0194\u0195\u0001"+ + "\u0000\u0000\u0000\u0195;\u0001\u0000\u0000\u0000\u0196\u0194\u0001\u0000"+ + "\u0000\u0000\u0197\u019c\u0003B!\u0000\u0198\u0199\u0005)\u0000\u0000"+ + "\u0199\u019b\u0003B!\u0000\u019a\u0198\u0001\u0000\u0000\u0000\u019b\u019e"+ + "\u0001\u0000\u0000\u0000\u019c\u019a\u0001\u0000\u0000\u0000\u019c\u019d"+ + "\u0001\u0000\u0000\u0000\u019d=\u0001\u0000\u0000\u0000\u019e\u019c\u0001"+ + "\u0000\u0000\u0000\u019f\u01a4\u0003<\u001e\u0000\u01a0\u01a1\u0005\'"+ + "\u0000\u0000\u01a1\u01a3\u0003<\u001e\u0000\u01a2\u01a0\u0001\u0000\u0000"+ + "\u0000\u01a3\u01a6\u0001\u0000\u0000\u0000\u01a4\u01a2\u0001\u0000\u0000"+ + "\u0000\u01a4\u01a5\u0001\u0000\u0000\u0000\u01a5?\u0001\u0000\u0000\u0000"+ + "\u01a6\u01a4\u0001\u0000\u0000\u0000\u01a7\u01a8\u0007\u0003\u0000\u0000"+ + "\u01a8A\u0001\u0000\u0000\u0000\u01a9\u01ad\u0005U\u0000\u0000\u01aa\u01ab"+ + "\u0004!\u000b\u0000\u01ab\u01ad\u0003F#\u0000\u01ac\u01a9\u0001\u0000"+ + "\u0000\u0000\u01ac\u01aa\u0001\u0000\u0000\u0000\u01adC\u0001\u0000\u0000"+ + "\u0000\u01ae\u01d9\u00052\u0000\u0000\u01af\u01b0\u0003h4\u0000\u01b0"+ + "\u01b1\u0005H\u0000\u0000\u01b1\u01d9\u0001\u0000\u0000\u0000\u01b2\u01d9"+ + "\u0003f3\u0000\u01b3\u01d9\u0003h4\u0000\u01b4\u01d9\u0003b1\u0000\u01b5"+ + "\u01d9\u0003F#\u0000\u01b6\u01d9\u0003j5\u0000\u01b7\u01b8\u0005F\u0000"+ + "\u0000\u01b8\u01bd\u0003d2\u0000\u01b9\u01ba\u0005\'\u0000\u0000\u01ba"+ + "\u01bc\u0003d2\u0000\u01bb\u01b9\u0001\u0000\u0000\u0000\u01bc\u01bf\u0001"+ + "\u0000\u0000\u0000\u01bd\u01bb\u0001\u0000\u0000\u0000\u01bd\u01be\u0001"+ + "\u0000\u0000\u0000\u01be\u01c0\u0001\u0000\u0000\u0000\u01bf\u01bd\u0001"+ + "\u0000\u0000\u0000\u01c0\u01c1\u0005G\u0000\u0000\u01c1\u01d9\u0001\u0000"+ + "\u0000\u0000\u01c2\u01c3\u0005F\u0000\u0000\u01c3\u01c8\u0003b1\u0000"+ + "\u01c4\u01c5\u0005\'\u0000\u0000\u01c5\u01c7\u0003b1\u0000\u01c6\u01c4"+ + "\u0001\u0000\u0000\u0000\u01c7\u01ca\u0001\u0000\u0000\u0000\u01c8\u01c6"+ + "\u0001\u0000\u0000\u0000\u01c8\u01c9\u0001\u0000\u0000\u0000\u01c9\u01cb"+ + "\u0001\u0000\u0000\u0000\u01ca\u01c8\u0001\u0000\u0000\u0000\u01cb\u01cc"+ + "\u0005G\u0000\u0000\u01cc\u01d9\u0001\u0000\u0000\u0000\u01cd\u01ce\u0005"+ + "F\u0000\u0000\u01ce\u01d3\u0003j5\u0000\u01cf\u01d0\u0005\'\u0000\u0000"+ + "\u01d0\u01d2\u0003j5\u0000\u01d1\u01cf\u0001\u0000\u0000\u0000\u01d2\u01d5"+ + "\u0001\u0000\u0000\u0000\u01d3\u01d1\u0001\u0000\u0000\u0000\u01d3\u01d4"+ + "\u0001\u0000\u0000\u0000\u01d4\u01d6\u0001\u0000\u0000\u0000\u01d5\u01d3"+ + "\u0001\u0000\u0000\u0000\u01d6\u01d7\u0005G\u0000\u0000\u01d7\u01d9\u0001"+ + "\u0000\u0000\u0000\u01d8\u01ae\u0001\u0000\u0000\u0000\u01d8\u01af\u0001"+ + "\u0000\u0000\u0000\u01d8\u01b2\u0001\u0000\u0000\u0000\u01d8\u01b3\u0001"+ + "\u0000\u0000\u0000\u01d8\u01b4\u0001\u0000\u0000\u0000\u01d8\u01b5\u0001"+ + "\u0000\u0000\u0000\u01d8\u01b6\u0001\u0000\u0000\u0000\u01d8\u01b7\u0001"+ + "\u0000\u0000\u0000\u01d8\u01c2\u0001\u0000\u0000\u0000\u01d8\u01cd\u0001"+ + "\u0000\u0000\u0000\u01d9E\u0001\u0000\u0000\u0000\u01da\u01dd\u00055\u0000"+ + "\u0000\u01db\u01dd\u0005E\u0000\u0000\u01dc\u01da\u0001\u0000\u0000\u0000"+ + "\u01dc\u01db\u0001\u0000\u0000\u0000\u01ddG\u0001\u0000\u0000\u0000\u01de"+ + "\u01e2\u0003@ \u0000\u01df\u01e0\u0004$\f\u0000\u01e0\u01e2\u0003F#\u0000"+ + "\u01e1\u01de\u0001\u0000\u0000\u0000\u01e1\u01df\u0001\u0000\u0000\u0000"+ + "\u01e2I\u0001\u0000\u0000\u0000\u01e3\u01e4\u0005\t\u0000\u0000\u01e4"+ + "\u01e5\u0005 \u0000\u0000\u01e5K\u0001\u0000\u0000\u0000\u01e6\u01e7\u0005"+ + "\u000e\u0000\u0000\u01e7\u01ec\u0003N\'\u0000\u01e8\u01e9\u0005\'\u0000"+ + "\u0000\u01e9\u01eb\u0003N\'\u0000\u01ea\u01e8\u0001\u0000\u0000\u0000"+ + "\u01eb\u01ee\u0001\u0000\u0000\u0000\u01ec\u01ea\u0001\u0000\u0000\u0000"+ + "\u01ec\u01ed\u0001\u0000\u0000\u0000\u01edM\u0001\u0000\u0000\u0000\u01ee"+ + "\u01ec\u0001\u0000\u0000\u0000\u01ef\u01f1\u0003\n\u0005\u0000\u01f0\u01f2"+ + "\u0007\u0004\u0000\u0000\u01f1\u01f0\u0001\u0000\u0000\u0000\u01f1\u01f2"+ + "\u0001\u0000\u0000\u0000\u01f2\u01f5\u0001\u0000\u0000\u0000\u01f3\u01f4"+ + "\u00053\u0000\u0000\u01f4\u01f6\u0007\u0005\u0000\u0000\u01f5\u01f3\u0001"+ + "\u0000\u0000\u0000\u01f5\u01f6\u0001\u0000\u0000\u0000\u01f6O\u0001\u0000"+ + "\u0000\u0000\u01f7\u01f8\u0005\b\u0000\u0000\u01f8\u01f9\u0003>\u001f"+ + "\u0000\u01f9Q\u0001\u0000\u0000\u0000\u01fa\u01fb\u0005\u0002\u0000\u0000"+ + "\u01fb\u01fc\u0003>\u001f\u0000\u01fcS\u0001\u0000\u0000\u0000\u01fd\u01fe"+ + "\u0005\u000b\u0000\u0000\u01fe\u0203\u0003V+\u0000\u01ff\u0200\u0005\'"+ + "\u0000\u0000\u0200\u0202\u0003V+\u0000\u0201\u01ff\u0001\u0000\u0000\u0000"+ + "\u0202\u0205\u0001\u0000\u0000\u0000\u0203\u0201\u0001\u0000\u0000\u0000"+ + "\u0203\u0204\u0001\u0000\u0000\u0000\u0204U\u0001\u0000\u0000\u0000\u0205"+ + "\u0203\u0001\u0000\u0000\u0000\u0206\u0207\u0003<\u001e\u0000\u0207\u0208"+ + "\u0005Y\u0000\u0000\u0208\u0209\u0003<\u001e\u0000\u0209W\u0001\u0000"+ + "\u0000\u0000\u020a\u020b\u0005\u0001\u0000\u0000\u020b\u020c\u0003\u0014"+ + "\n\u0000\u020c\u020e\u0003j5\u0000\u020d\u020f\u0003^/\u0000\u020e\u020d"+ + "\u0001\u0000\u0000\u0000\u020e\u020f\u0001\u0000\u0000\u0000\u020fY\u0001"+ + "\u0000\u0000\u0000\u0210\u0211\u0005\u0007\u0000\u0000\u0211\u0212\u0003"+ + "\u0014\n\u0000\u0212\u0213\u0003j5\u0000\u0213[\u0001\u0000\u0000\u0000"+ + "\u0214\u0215\u0005\n\u0000\u0000\u0215\u0216\u0003:\u001d\u0000\u0216"+ + "]\u0001\u0000\u0000\u0000\u0217\u021c\u0003`0\u0000\u0218\u0219\u0005"+ + "\'\u0000\u0000\u0219\u021b\u0003`0\u0000\u021a\u0218\u0001\u0000\u0000"+ + "\u0000\u021b\u021e\u0001\u0000\u0000\u0000\u021c\u021a\u0001\u0000\u0000"+ + "\u0000\u021c\u021d\u0001\u0000\u0000\u0000\u021d_\u0001\u0000\u0000\u0000"+ + "\u021e\u021c\u0001\u0000\u0000\u0000\u021f\u0220\u0003@ \u0000\u0220\u0221"+ + "\u0005%\u0000\u0000\u0221\u0222\u0003D\"\u0000\u0222a\u0001\u0000\u0000"+ + "\u0000\u0223\u0224\u0007\u0006\u0000\u0000\u0224c\u0001\u0000\u0000\u0000"+ + "\u0225\u0228\u0003f3\u0000\u0226\u0228\u0003h4\u0000\u0227\u0225\u0001"+ + "\u0000\u0000\u0000\u0227\u0226\u0001\u0000\u0000\u0000\u0228e\u0001\u0000"+ + "\u0000\u0000\u0229\u022b\u0007\u0000\u0000\u0000\u022a\u0229\u0001\u0000"+ + "\u0000\u0000\u022a\u022b\u0001\u0000\u0000\u0000\u022b\u022c\u0001\u0000"+ + "\u0000\u0000\u022c\u022d\u0005!\u0000\u0000\u022dg\u0001\u0000\u0000\u0000"+ + "\u022e\u0230\u0007\u0000\u0000\u0000\u022f\u022e\u0001\u0000\u0000\u0000"+ + "\u022f\u0230\u0001\u0000\u0000\u0000\u0230\u0231\u0001\u0000\u0000\u0000"+ + "\u0231\u0232\u0005 \u0000\u0000\u0232i\u0001\u0000\u0000\u0000\u0233\u0234"+ + "\u0005\u001f\u0000\u0000\u0234k\u0001\u0000\u0000\u0000\u0235\u0236\u0007"+ + "\u0007\u0000\u0000\u0236m\u0001\u0000\u0000\u0000\u0237\u0238\u0005\u0005"+ + "\u0000\u0000\u0238\u0239\u0003p8\u0000\u0239o\u0001\u0000\u0000\u0000"+ + "\u023a\u023b\u0005F\u0000\u0000\u023b\u023c\u0003\u0002\u0001\u0000\u023c"+ + "\u023d\u0005G\u0000\u0000\u023dq\u0001\u0000\u0000\u0000\u023e\u023f\u0005"+ + "\r\u0000\u0000\u023f\u0240\u0005i\u0000\u0000\u0240s\u0001\u0000\u0000"+ + "\u0000\u0241\u0242\u0005\u0003\u0000\u0000\u0242\u0245\u0005_\u0000\u0000"+ + "\u0243\u0244\u0005]\u0000\u0000\u0244\u0246\u0003<\u001e\u0000\u0245\u0243"+ + "\u0001\u0000\u0000\u0000\u0245\u0246\u0001\u0000\u0000\u0000\u0246\u0250"+ + "\u0001\u0000\u0000\u0000\u0247\u0248\u0005^\u0000\u0000\u0248\u024d\u0003"+ + "v;\u0000\u0249\u024a\u0005\'\u0000\u0000\u024a\u024c\u0003v;\u0000\u024b"+ + "\u0249\u0001\u0000\u0000\u0000\u024c\u024f\u0001\u0000\u0000\u0000\u024d"+ + "\u024b\u0001\u0000\u0000\u0000\u024d\u024e\u0001\u0000\u0000\u0000\u024e"+ + "\u0251\u0001\u0000\u0000\u0000\u024f\u024d\u0001\u0000\u0000\u0000\u0250"+ + "\u0247\u0001\u0000\u0000\u0000\u0250\u0251\u0001\u0000\u0000\u0000\u0251"+ + "u\u0001\u0000\u0000\u0000\u0252\u0253\u0003<\u001e\u0000\u0253\u0254\u0005"+ + "%\u0000\u0000\u0254\u0256\u0001\u0000\u0000\u0000\u0255\u0252\u0001\u0000"+ + "\u0000\u0000\u0255\u0256\u0001\u0000\u0000\u0000\u0256\u0257\u0001\u0000"+ + "\u0000\u0000\u0257\u0258\u0003<\u001e\u0000\u0258w\u0001\u0000\u0000\u0000"+ + "\u0259\u025a\u0005\u0012\u0000\u0000\u025a\u025b\u0003$\u0012\u0000\u025b"+ + "\u025c\u0005]\u0000\u0000\u025c\u025d\u0003>\u001f\u0000\u025dy\u0001"+ + "\u0000\u0000\u0000\u025e\u025f\u0005\u0011\u0000\u0000\u025f\u0262\u0003"+ + "6\u001b\u0000\u0260\u0261\u0005\"\u0000\u0000\u0261\u0263\u0003\u001e"+ + "\u000f\u0000\u0262\u0260\u0001\u0000\u0000\u0000\u0262\u0263\u0001\u0000"+ + "\u0000\u0000\u0263{\u0001\u0000\u0000\u0000\u0264\u0266\u0007\b\u0000"+ + "\u0000\u0265\u0264\u0001\u0000\u0000\u0000\u0265\u0266\u0001\u0000\u0000"+ + "\u0000\u0266\u0267\u0001\u0000\u0000\u0000\u0267\u0268\u0005\u0014\u0000"+ + "\u0000\u0268\u0269\u0003~?\u0000\u0269\u026a\u0003\u0080@\u0000\u026a"+ + "}\u0001\u0000\u0000\u0000\u026b\u026e\u0003@ \u0000\u026c\u026d\u0005"+ + "Y\u0000\u0000\u026d\u026f\u0003@ \u0000\u026e\u026c\u0001\u0000\u0000"+ + "\u0000\u026e\u026f\u0001\u0000\u0000\u0000\u026f\u007f\u0001\u0000\u0000"+ + "\u0000\u0270\u0271\u0005]\u0000\u0000\u0271\u0276\u0003\u0082A\u0000\u0272"+ + "\u0273\u0005\'\u0000\u0000\u0273\u0275\u0003\u0082A\u0000\u0274\u0272"+ + "\u0001\u0000\u0000\u0000\u0275\u0278\u0001\u0000\u0000\u0000\u0276\u0274"+ + "\u0001\u0000\u0000\u0000\u0276\u0277\u0001\u0000\u0000\u0000\u0277\u0081"+ + "\u0001\u0000\u0000\u0000\u0278\u0276\u0001\u0000\u0000\u0000\u0279\u027a"+ + "\u0003\u0010\b\u0000\u027a\u0083\u0001\u0000\u0000\u0000=\u008f\u0098"+ + "\u00ac\u00b8\u00c1\u00c9\u00cf\u00d7\u00d9\u00de\u00e5\u00ea\u00f5\u00fb"+ + "\u0103\u0105\u0110\u0117\u0122\u0125\u0135\u013b\u0145\u0149\u014e\u0158"+ + "\u0160\u016d\u0171\u0175\u017c\u0180\u0187\u018d\u0194\u019c\u01a4\u01ac"+ + "\u01bd\u01c8\u01d3\u01d8\u01dc\u01e1\u01ec\u01f1\u01f5\u0203\u020e\u021c"+ + "\u0227\u022a\u022f\u0245\u024d\u0250\u0255\u0262\u0265\u026e\u0276"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index 556a97657635a..6071219839bab 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -1052,6 +1052,54 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

    The default implementation does nothing.

    */ @Override public void exitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterJoinTarget(EsqlBaseParser.JoinTargetContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitJoinTarget(EsqlBaseParser.JoinTargetContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterJoinCondition(EsqlBaseParser.JoinConditionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitJoinCondition(EsqlBaseParser.JoinConditionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx) { } /** * {@inheritDoc} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index 56b6999615f50..afe7146923791 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -622,4 +622,32 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitJoinTarget(EsqlBaseParser.JoinTargetContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitJoinCondition(EsqlBaseParser.JoinConditionContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx) { return visitChildren(ctx); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index cf658c4a73141..0faca2541c9ad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -941,4 +941,44 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#joinCommand}. + * @param ctx the parse tree + */ + void enterJoinCommand(EsqlBaseParser.JoinCommandContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#joinCommand}. + * @param ctx the parse tree + */ + void exitJoinCommand(EsqlBaseParser.JoinCommandContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#joinTarget}. + * @param ctx the parse tree + */ + void enterJoinTarget(EsqlBaseParser.JoinTargetContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#joinTarget}. + * @param ctx the parse tree + */ + void exitJoinTarget(EsqlBaseParser.JoinTargetContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#joinCondition}. + * @param ctx the parse tree + */ + void enterJoinCondition(EsqlBaseParser.JoinConditionContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#joinCondition}. + * @param ctx the parse tree + */ + void exitJoinCondition(EsqlBaseParser.JoinConditionContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#joinPredicate}. + * @param ctx the parse tree + */ + void enterJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#joinPredicate}. + * @param ctx the parse tree + */ + void exitJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index 86c1d1aafc33a..e91cd6670e971 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -567,4 +567,28 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#joinCommand}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitJoinCommand(EsqlBaseParser.JoinCommandContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#joinTarget}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitJoinTarget(EsqlBaseParser.JoinTargetContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#joinCondition}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitJoinCondition(EsqlBaseParser.JoinConditionContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#joinPredicate}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index f83af534eaa72..99e03b3653f79 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -53,6 +53,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Rename; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.UnresolvedRelation; +import org.elasticsearch.xpack.esql.plan.logical.join.LookupJoin; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.joni.exception.SyntaxException; @@ -68,6 +69,7 @@ import java.util.Set; import java.util.function.Function; +import static java.util.Collections.emptyList; import static org.elasticsearch.common.logging.HeaderWarning.addWarning; import static org.elasticsearch.xpack.esql.core.util.StringUtils.WILDCARD; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputExpressions; @@ -502,7 +504,7 @@ public LogicalPlan visitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) @Override public PlanFactory visitLookupCommand(EsqlBaseParser.LookupCommandContext ctx) { if (false == Build.current().isSnapshot()) { - throw new ParsingException(source(ctx), "LOOKUP is in preview and only available in SNAPSHOT build"); + throw new ParsingException(source(ctx), "LOOKUP__ is in preview and only available in SNAPSHOT build"); } var source = source(ctx); @@ -524,4 +526,42 @@ public PlanFactory visitLookupCommand(EsqlBaseParser.LookupCommandContext ctx) { return p -> new Lookup(source, p, tableName, matchFields, null /* localRelation will be resolved later*/); } + public PlanFactory visitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { + var source = source(ctx); + if (false == Build.current().isSnapshot()) { + throw new ParsingException(source, "JOIN is in preview and only available in SNAPSHOT build"); + } + + if (ctx.type != null && ctx.type.getType() != EsqlBaseParser.DEV_JOIN_LOOKUP) { + String joinType = ctx.type == null ? "(INNER)" : ctx.type.getText(); + throw new ParsingException(source, "only LOOKUP JOIN available, {} JOIN unsupported at the moment", joinType); + } + + var target = ctx.joinTarget(); + UnresolvedRelation right = new UnresolvedRelation( + source(target), + new TableIdentifier(source(target.index), null, visitIdentifier(target.index)), + false, + emptyList(), + IndexMode.LOOKUP, + null, + "???" + ); + + var condition = ctx.joinCondition(); + + // ON only with qualified names + var predicates = expressions(condition.joinPredicate()); + List joinFields = new ArrayList<>(predicates.size()); + for (var f : predicates) { + // verify each field is an unresolved attribute + if (f instanceof UnresolvedAttribute ua) { + joinFields.add(ua); + } else { + throw new ParsingException(f.source(), "JOIN ON clause only supports fields at the moment, found [{}]", f.sourceText()); + } + } + + return p -> new LookupJoin(source, p, right, joinFields); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/QueryPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/QueryPlan.java index e34e0b8e27863..ef8c3983faf2e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/QueryPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/QueryPlan.java @@ -59,12 +59,17 @@ public AttributeSet inputSet() { */ public List expressions() { if (lazyExpressions == null) { - lazyExpressions = new ArrayList<>(); - forEachPropertyOnly(Object.class, e -> doForEachExpression(e, lazyExpressions::add)); + lazyExpressions = computeExpressions(); } return lazyExpressions; } + protected List computeExpressions() { + List expressions = new ArrayList<>(); + forEachPropertyOnly(Object.class, e -> doForEachExpression(e, expressions::add)); + return expressions; + } + /** * The attributes required to be in the {@link QueryPlan#inputSet()} for this plan to be valid. * Excludes generated references. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java index eb72009638396..794a52b8e3f89 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java @@ -181,7 +181,12 @@ public boolean equals(Object obj) { @Override public String nodeString() { - return nodeName() + "[" + index + "]" + NodeUtils.limitedToString(attrs); + return nodeName() + + "[" + + index + + "]" + + (indexMode != IndexMode.STANDARD ? "[" + indexMode.name() + "]" : "") + + NodeUtils.limitedToString(attrs); } public static IndexMode readIndexMode(StreamInput in) throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java index 9e854450a2d34..4211f8a0d45b6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java @@ -20,7 +20,7 @@ import org.elasticsearch.xpack.esql.plan.logical.join.InlineJoin; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import java.io.IOException; import java.util.ArrayList; @@ -118,7 +118,7 @@ private JoinConfig joinConfig() { } } } - return new JoinConfig(JoinType.LEFT, namedGroupings, leftFields, rightFields); + return new JoinConfig(JoinTypes.LEFT, namedGroupings, leftFields, rightFields); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java index 70f8a24cfc87e..6e7f421003292 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java @@ -19,7 +19,7 @@ import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import java.io.IOException; @@ -114,7 +114,7 @@ public JoinConfig joinConfig() { } } } - return new JoinConfig(JoinType.LEFT, matchFields, leftFields, rightFields); + return new JoinConfig(JoinTypes.LEFT, matchFields, leftFields, rightFields); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java index 0dfbe4936e4e3..384c3f7a340ae 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java @@ -25,6 +25,10 @@ public class UnresolvedRelation extends LeafPlan implements Unresolvable { private final TableIdentifier table; private final boolean frozen; private final List metadataFields; + /* + * Expected indexMode based on the declaration - used later for verification + * at resolution time. + */ private final IndexMode indexMode; private final String unresolvedMsg; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java index f9be61ed2c8d7..0e182646d914a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java @@ -10,9 +10,8 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.AttributeSet; -import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -23,12 +22,12 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Objects; -import java.util.Set; -import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; +import static org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.LEFT; +import static org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.RIGHT; public class Join extends BinaryPlan { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Join", Join::new); @@ -92,11 +91,6 @@ protected NodeInfo info() { ); } - @Override - public Join replaceChildren(LogicalPlan left, LogicalPlan right) { - return new Join(source(), left, right, config); - } - @Override public List output() { if (lazyOutput == null) { @@ -106,35 +100,42 @@ public List output() { } /** - * Merge output fields. - * Currently only implemented for LEFT JOINs; the rightOutput shadows the leftOutput, except for any attributes that - * occur in the join's matchFields. + * Combine the two lists of attributes into one. + * In case of (name) conflicts, specify which sides wins, that is overrides the other column - the left or the right. */ public static List computeOutput(List leftOutput, List rightOutput, JoinConfig config) { - AttributeSet matchFieldSet = new AttributeSet(config.matchFields()); - Set matchFieldNames = new HashSet<>(Expressions.names(config.matchFields())); - return switch (config.type()) { - case LEFT -> { - // Right side becomes nullable. - List fieldsAddedFromRight = removeCollisionsWithMatchFields(rightOutput, matchFieldSet, matchFieldNames); - yield mergeOutputAttributes(fieldsAddedFromRight, leftOutput); - } - default -> throw new UnsupportedOperationException("Other JOINs than LEFT not supported"); - }; + JoinType joinType = config.type(); + List output; + // TODO: make the other side nullable + if (LEFT.equals(joinType)) { + // right side becomes nullable and overrides left + // output = merge(leftOutput, makeNullable(rightOutput)); + output = merge(leftOutput, rightOutput); + } else if (RIGHT.equals(joinType)) { + // left side becomes nullable and overrides right + // output = merge(makeNullable(leftOutput), rightOutput); + output = merge(leftOutput, rightOutput); + } else { + throw new IllegalArgumentException(joinType.joinName() + " unsupported"); + } + return output; } - private static List removeCollisionsWithMatchFields( - List attributes, - AttributeSet matchFields, - Set matchFieldNames - ) { - List result = new ArrayList<>(); - for (Attribute attr : attributes) { - if ((matchFields.contains(attr) || matchFieldNames.contains(attr.name())) == false) { - result.add(attr); - } + /** + * Merge the two lists of attributes into one and preserves order. + */ + private static List merge(List left, List right) { + // use linked hash map to preserve order + Map nameToAttribute = Maps.newLinkedHashMapWithExpectedSize(left.size() + right.size()); + for (Attribute a : left) { + nameToAttribute.put(a.name(), a); + } + for (Attribute a : right) { + // override the existing entry in place + nameToAttribute.compute(a.name(), (name, existing) -> a); } - return result; + + return new ArrayList<>(nameToAttribute.values()); } /** @@ -160,7 +161,7 @@ public static List makeReference(List output) { return out; } - public static List makeNullable(List output) { + private static List makeNullable(List output) { List out = new ArrayList<>(output.size()); for (Attribute a : output) { out.add(a.withNullability(Nullability.TRUE)); @@ -181,6 +182,15 @@ public boolean resolved() { return childrenResolved() && expressionsResolved(); } + public Join withConfig(JoinConfig config) { + return new Join(source(), left(), right(), config); + } + + @Override + public Join replaceChildren(LogicalPlan left, LogicalPlan right) { + return new Join(source(), left, right, config); + } + @Override public String commandName() { return "JOIN"; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinConfig.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinConfig.java index 68ad50f2f67a0..383606d6ccbed 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinConfig.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinConfig.java @@ -22,12 +22,16 @@ * @param leftFields matched with the right fields * @param rightFields matched with the left fields */ +// TODO: this class needs refactoring into a more general form (expressions) since it's currently contains +// both the condition (equi-join) between the left and right field as well as the output of the join keys +// which makes sense only for USING clause - which is better resolved in the analyzer (based on the names) +// hence why for now the attributes are set inside the analyzer public record JoinConfig(JoinType type, List matchFields, List leftFields, List rightFields) implements Writeable { public JoinConfig(StreamInput in) throws IOException { this( - JoinType.readFrom(in), + JoinTypes.readFrom(in), in.readNamedWriteableCollectionAsList(Attribute.class), in.readNamedWriteableCollectionAsList(Attribute.class), in.readNamedWriteableCollectionAsList(Attribute.class) @@ -43,6 +47,9 @@ public void writeTo(StreamOutput out) throws IOException { } public boolean expressionsResolved() { - return Resolvables.resolved(matchFields) && Resolvables.resolved(leftFields) && Resolvables.resolved(rightFields); + return type.resolved() + && Resolvables.resolved(matchFields) + && Resolvables.resolved(leftFields) + && Resolvables.resolved(rightFields); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinType.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinType.java index c3095efc9e623..a309387b1f0a2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinType.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinType.java @@ -7,46 +7,15 @@ package org.elasticsearch.xpack.esql.plan.logical.join; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import java.io.IOException; +public interface JoinType extends Writeable { -public enum JoinType implements Writeable { - INNER(0, "INNER"), - LEFT(1, "LEFT OUTER"), - RIGHT(2, "RIGHT OUTER"), - FULL(3, "FULL OUTER"), - CROSS(4, "CROSS"); - - private final byte id; - private final String name; - - JoinType(int id, String name) { - this.id = (byte) id; - this.name = name; - } - - @Override - public String toString() { - return name; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeByte(id); + default String joinName() { + return getClass().getSimpleName(); } - public static JoinType readFrom(StreamInput in) throws IOException { - byte id = in.readByte(); - return switch (id) { - case 0 -> INNER; - case 1 -> LEFT; - case 2 -> RIGHT; - case 3 -> FULL; - case 4 -> CROSS; - default -> throw new IllegalArgumentException("unsupported join [" + id + "]"); - }; + default boolean resolved() { + return true; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinTypes.java new file mode 100644 index 0000000000000..9d3471bc356f7 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinTypes.java @@ -0,0 +1,155 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.plan.logical.join; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.Maps; +import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; +import org.elasticsearch.xpack.esql.core.expression.Attribute; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Utility class defining the concrete types of joins supported by ESQL. + */ +public class JoinTypes { + + private JoinTypes() {} + + public static JoinType INNER = CoreJoinType.INNER; + public static JoinType LEFT = CoreJoinType.LEFT; + public static JoinType RIGHT = CoreJoinType.RIGHT; + public static JoinType FULL = CoreJoinType.FULL; + public static JoinType CROSS = CoreJoinType.CROSS; + + private static Map JOIN_TYPES; + + static { + CoreJoinType[] types = CoreJoinType.values(); + JOIN_TYPES = Maps.newMapWithExpectedSize(types.length); + for (CoreJoinType type : types) { + JOIN_TYPES.put(type.id, type); + } + } + + /** + * The predefined core join types. Implements as enum for easy comparison and serialization. + */ + private enum CoreJoinType implements JoinType { + INNER(1, "INNER"), + LEFT(2, "LEFT OUTER"), + RIGHT(3, "RIGHT OUTER"), + FULL(4, "FULL OUTER"), + CROSS(5, "CROSS"); + + private final String name; + private final byte id; + + CoreJoinType(int id, String name) { + this.id = (byte) id; + this.name = name; + } + + @Override + public String joinName() { + return name; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeByte(id); + } + } + + /** + * Join type for the USING clause - shorthand for defining an equi-join (equality join meaning the condition checks if columns across + * each side of the join are equal). + * One important difference is that the USING clause returns the join column only once, at the beginning of the result set. + */ + public static class UsingJoinType implements JoinType { + private final List columns; + private final JoinType coreJoin; + + public UsingJoinType(JoinType coreJoin, List columns) { + this.columns = columns; + this.coreJoin = coreJoin; + } + + @Override + public String joinName() { + return coreJoin.joinName() + " USING " + columns.toString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new IllegalArgumentException("USING join type should not be serialized due to being rewritten"); + } + + public JoinType coreJoin() { + return coreJoin; + } + + public List columns() { + return columns; + } + + @Override + public boolean resolved() { + return Resolvables.resolved(columns); + } + + @Override + public int hashCode() { + return Objects.hash(columns, coreJoin); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UsingJoinType that = (UsingJoinType) o; + return Objects.equals(columns, that.columns) && coreJoin == that.coreJoin; + } + + @Override + public String toString() { + return joinName(); + } + } + + /** + * Private class so it doesn't get used yet it is defined to showcase why the join type was defined as an interface instead of a simpler + * enum. + */ + private abstract static class NaturalJoinType implements JoinType { + + private final JoinType joinType; + + private NaturalJoinType(JoinType joinType) { + this.joinType = joinType; + } + + @Override + public String joinName() { + return "NATURAL " + joinType.joinName(); + } + } + + public static JoinType readFrom(StreamInput in) throws IOException { + byte id = in.readByte(); + JoinType type = JOIN_TYPES.get(id); + if (type == null) { + throw new IllegalArgumentException("unsupported join [" + id + "]"); + } + ; + return type; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java new file mode 100644 index 0000000000000..2ee9213f45b36 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java @@ -0,0 +1,103 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.logical.join; + +import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.Project; +import org.elasticsearch.xpack.esql.plan.logical.SurrogateLogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.UsingJoinType; + +import java.util.List; +import java.util.Objects; + +import static java.util.Collections.emptyList; +import static org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.LEFT; + +/** + * Lookup join - specialized LEFT (OUTER) JOIN between the main left side and a lookup index (index_mode = lookup) on the right. + */ +public class LookupJoin extends Join implements SurrogateLogicalPlan { + + private final List output; + + public LookupJoin(Source source, LogicalPlan left, LogicalPlan right, List joinFields) { + this(source, left, right, new UsingJoinType(LEFT, joinFields), emptyList(), emptyList(), emptyList(), emptyList()); + } + + public LookupJoin( + Source source, + LogicalPlan left, + LogicalPlan right, + JoinType type, + List joinFields, + List leftFields, + List rightFields, + List output + ) { + this(source, left, right, new JoinConfig(type, joinFields, leftFields, rightFields), output); + } + + public LookupJoin(Source source, LogicalPlan left, LogicalPlan right, JoinConfig joinConfig, List output) { + super(source, left, right, joinConfig); + this.output = output; + } + + /** + * Translate the expression into a regular join with a Projection on top, to deal with serialization & co. + */ + @Override + public LogicalPlan surrogate() { + JoinConfig cfg = config(); + JoinConfig newConfig = new JoinConfig(LEFT, cfg.matchFields(), cfg.leftFields(), cfg.rightFields()); + Join normalized = new Join(source(), left(), right(), newConfig); + // TODO: decide whether to introduce USING or just basic ON semantics - keep the ordering out for now + return new Project(source(), normalized, output); + } + + public List output() { + return output; + } + + @Override + public Join replaceChildren(LogicalPlan left, LogicalPlan right) { + return new LookupJoin(source(), left, right, config(), output); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create( + this, + LookupJoin::new, + left(), + right(), + config().type(), + config().matchFields(), + config().leftFields(), + config().rightFields(), + output + ); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), output); + } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj) == false) { + return false; + } + + LookupJoin other = (LookupJoin) obj; + return Objects.equals(output, other.output); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java new file mode 100644 index 0000000000000..e01451ceaecac --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java @@ -0,0 +1,162 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.physical; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.AttributeSet; +import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.Objects; + +public class LookupJoinExec extends BinaryExec implements EstimatesRowSize { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + PhysicalPlan.class, + "LookupJoinExec", + LookupJoinExec::new + ); + + private final List matchFields; + private final List leftFields; + private final List rightFields; + private final List output; + private List lazyAddedFields; + + public LookupJoinExec( + Source source, + PhysicalPlan left, + PhysicalPlan lookup, + List matchFields, + List leftFields, + List rightFields, + List output + ) { + super(source, left, lookup); + this.matchFields = matchFields; + this.leftFields = leftFields; + this.rightFields = rightFields; + this.output = output; + } + + private LookupJoinExec(StreamInput in) throws IOException { + super(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(PhysicalPlan.class), in.readNamedWriteable(PhysicalPlan.class)); + this.matchFields = in.readNamedWriteableCollectionAsList(Attribute.class); + this.leftFields = in.readNamedWriteableCollectionAsList(Attribute.class); + this.rightFields = in.readNamedWriteableCollectionAsList(Attribute.class); + this.output = in.readNamedWriteableCollectionAsList(Attribute.class); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeNamedWriteableCollection(matchFields); + out.writeNamedWriteableCollection(leftFields); + out.writeNamedWriteableCollection(rightFields); + out.writeNamedWriteableCollection(output); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + public PhysicalPlan lookup() { + return right(); + } + + public List matchFields() { + return matchFields; + } + + public List leftFields() { + return leftFields; + } + + public List rightFields() { + return rightFields; + } + + public List addedFields() { + if (lazyAddedFields == null) { + AttributeSet set = outputSet(); + set.removeAll(left().output()); + for (Attribute m : matchFields) { + set.removeIf(a -> a.name().equals(m.name())); + } + lazyAddedFields = new ArrayList<>(set); + lazyAddedFields.sort(Comparator.comparing(Attribute::name)); + } + return lazyAddedFields; + } + + @Override + public PhysicalPlan estimateRowSize(State state) { + state.add(false, output); + return this; + } + + @Override + public List output() { + return output; + } + + @Override + public AttributeSet inputSet() { + // TODO: this is a hack since the right side is always materialized - instead this should + // return the _doc so the extraction can happen lazily + return left().outputSet(); + } + + @Override + protected AttributeSet computeReferences() { + // TODO: same as above - once lazy materialization of both sides lands, this needs updating + return Expressions.references(leftFields); + } + + @Override + public LookupJoinExec replaceChildren(PhysicalPlan left, PhysicalPlan right) { + return new LookupJoinExec(source(), left, right, matchFields, leftFields, rightFields, output); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, LookupJoinExec::new, left(), right(), matchFields, leftFields, rightFields, output); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (super.equals(o) == false) { + return false; + } + LookupJoinExec hash = (LookupJoinExec) o; + return matchFields.equals(hash.matchFields) + && leftFields.equals(hash.leftFields) + && rightFields.equals(hash.rightFields) + && output.equals(hash.output); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), matchFields, leftFields, rightFields, output); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 1e441826240c9..c181f434368e0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -47,6 +47,7 @@ import org.elasticsearch.compute.operator.topn.TopNOperator.TopNOperatorFactory; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.tasks.CancellableTask; @@ -63,6 +64,8 @@ import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.enrich.EnrichLookupOperator; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; +import org.elasticsearch.xpack.esql.enrich.LookupFromIndexOperator; +import org.elasticsearch.xpack.esql.enrich.LookupFromIndexService; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.evaluator.command.GrokEvaluatorExtracter; import org.elasticsearch.xpack.esql.expression.Order; @@ -81,6 +84,7 @@ import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; import org.elasticsearch.xpack.esql.plan.physical.OutputExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; @@ -125,6 +129,7 @@ public class LocalExecutionPlanner { private final ExchangeSourceHandler exchangeSourceHandler; private final ExchangeSinkHandler exchangeSinkHandler; private final EnrichLookupService enrichLookupService; + private final LookupFromIndexService lookupFromIndexService; private final PhysicalOperationProviders physicalOperationProviders; public LocalExecutionPlanner( @@ -138,6 +143,7 @@ public LocalExecutionPlanner( ExchangeSourceHandler exchangeSourceHandler, ExchangeSinkHandler exchangeSinkHandler, EnrichLookupService enrichLookupService, + LookupFromIndexService lookupFromIndexService, PhysicalOperationProviders physicalOperationProviders ) { this.sessionId = sessionId; @@ -149,6 +155,7 @@ public LocalExecutionPlanner( this.exchangeSourceHandler = exchangeSourceHandler; this.exchangeSinkHandler = exchangeSinkHandler; this.enrichLookupService = enrichLookupService; + this.lookupFromIndexService = lookupFromIndexService; this.physicalOperationProviders = physicalOperationProviders; this.configuration = configuration; } @@ -225,8 +232,10 @@ else if (node instanceof EsQueryExec esQuery) { // lookups and joins else if (node instanceof EnrichExec enrich) { return planEnrich(enrich, context); - } else if (node instanceof HashJoinExec lookup) { - return planHashJoin(lookup, context); + } else if (node instanceof HashJoinExec join) { + return planHashJoin(join, context); + } else if (node instanceof LookupJoinExec join) { + return planLookupJoin(join, context); } // output else if (node instanceof OutputExec outputExec) { @@ -559,6 +568,55 @@ private PhysicalOperation planHashJoin(HashJoinExec join, LocalExecutionPlannerC return source.with(new ProjectOperatorFactory(projection), layout); } + private PhysicalOperation planLookupJoin(LookupJoinExec join, LocalExecutionPlannerContext context) { + PhysicalOperation source = plan(join.left(), context); + Layout.Builder layoutBuilder = source.layout.builder(); + for (Attribute f : join.addedFields()) { + layoutBuilder.append(f); + } + Layout layout = layoutBuilder.build(); + + // TODO: this works when the join happens on the coordinator + /* + * But when it happens on the data node we get a + * \_FieldExtractExec[language_code{f}#15, language_name{f}#16]<[]> + * \_EsQueryExec[languages_lookup], indexMode[lookup], query[][_doc{f}#18], limit[], sort[] estimatedRowSize[62] + * Which we'd prefer not to do - at least for now. We already know the fields we're loading + * and don't want any local planning. + */ + EsQueryExec localSourceExec = (EsQueryExec) join.lookup(); + if (localSourceExec.indexMode() != IndexMode.LOOKUP) { + throw new IllegalArgumentException("can't plan [" + join + "]"); + } + List matchFields = new ArrayList<>(join.matchFields().size()); + for (Attribute m : join.matchFields()) { + Layout.ChannelAndType t = source.layout.get(m.id()); + if (t == null) { + throw new IllegalArgumentException("can't plan [" + join + "][" + m + "]"); + } + matchFields.add(t); + } + if (matchFields.size() != 1) { + throw new IllegalArgumentException("can't plan [" + join + "]"); + } + + return source.with( + new LookupFromIndexOperator.Factory( + sessionId, + parentTask, + context.queryPragmas().enrichMaxWorkers(), + matchFields.getFirst().channel(), + lookupFromIndexService, + matchFields.getFirst().type(), + localSourceExec.index().name(), + join.matchFields().getFirst().name(), + join.addedFields().stream().map(f -> (NamedExpression) f).toList(), + join.source() + ), + layout + ); + } + private ExpressionEvaluator.Factory toEvaluator(Expression exp, Layout layout) { return EvalMapper.toEvaluator(exp, layout); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java index ceffae704cff0..fc52f2d5a9d23 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.planner.mapper; import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; @@ -21,11 +22,12 @@ import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec; import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; import org.elasticsearch.xpack.esql.plan.physical.OrderExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; @@ -98,26 +100,36 @@ private PhysicalPlan mapBinary(BinaryPlan binary) { // special handling for inlinejoin - join + subquery which has to be executed first (async) and replaced by its result if (binary instanceof Join join) { JoinConfig config = join.config(); - if (config.type() != JoinType.LEFT) { + if (config.type() != JoinTypes.LEFT) { throw new EsqlIllegalArgumentException("unsupported join type [" + config.type() + "]"); } PhysicalPlan left = map(binary.left()); PhysicalPlan right = map(binary.right()); - if (right instanceof LocalSourceExec == false) { - throw new EsqlIllegalArgumentException("right side of a join must be a local source"); + // if the right is data we can use a hash join directly + if (right instanceof LocalSourceExec localData) { + return new HashJoinExec( + join.source(), + left, + localData, + config.matchFields(), + config.leftFields(), + config.rightFields(), + join.output() + ); + } + if (right instanceof EsSourceExec source && source.indexMode() == IndexMode.LOOKUP) { + return new LookupJoinExec( + join.source(), + left, + right, + config.matchFields(), + config.leftFields(), + config.rightFields(), + join.output() + ); } - - return new HashJoinExec( - join.source(), - left, - right, - config.matchFields(), - config.leftFields(), - config.rightFields(), - join.output() - ); } return MapperUtils.unsupported(binary); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java index b717af650b7a6..23e6f4fb91d18 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.planner.mapper; import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.util.Holder; @@ -23,13 +24,14 @@ import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec; import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; import org.elasticsearch.xpack.esql.plan.physical.OrderExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; @@ -178,7 +180,7 @@ private PhysicalPlan mapUnary(UnaryPlan unary) { private PhysicalPlan mapBinary(BinaryPlan bp) { if (bp instanceof Join join) { JoinConfig config = join.config(); - if (config.type() != JoinType.LEFT) { + if (config.type() != JoinTypes.LEFT) { throw new EsqlIllegalArgumentException("unsupported join type [" + config.type() + "]"); } @@ -190,7 +192,7 @@ private PhysicalPlan mapBinary(BinaryPlan bp) { } PhysicalPlan right = map(bp.right()); - // no fragment means lookup + // if the right is data we can use a hash join directly if (right instanceof LocalSourceExec localData) { return new HashJoinExec( join.source(), @@ -202,6 +204,19 @@ private PhysicalPlan mapBinary(BinaryPlan bp) { join.output() ); } + if (right instanceof FragmentExec fragment + && fragment.fragment() instanceof EsRelation relation + && relation.indexMode() == IndexMode.LOOKUP) { + return new LookupJoinExec( + join.source(), + left, + right, + config.matchFields(), + config.leftFields(), + config.rightFields(), + join.output() + ); + } } return MapperUtils.unsupported(bp); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java index 213e33f3712b1..ea21943aced9b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java @@ -9,7 +9,10 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -27,6 +30,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; +import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.DissectExec; @@ -39,10 +43,11 @@ import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; -import org.elasticsearch.xpack.esql.plan.physical.RowExec; import org.elasticsearch.xpack.esql.plan.physical.ShowExec; import org.elasticsearch.xpack.esql.planner.AbstractPhysicalOperationProviders; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; +import java.util.ArrayList; import java.util.List; /** @@ -53,7 +58,15 @@ private MapperUtils() {} static PhysicalPlan mapLeaf(LeafPlan p) { if (p instanceof Row row) { - return new RowExec(row.source(), row.fields()); + // return new RowExec(row.source(), row.fields()); + // convert row into local relation + List fields = row.fields(); + List values = new ArrayList<>(fields.size()); + for (Alias field : fields) { + values.add(field.child().fold()); + } + Block[] blocks = BlockUtils.fromListRow(PlannerUtils.NON_BREAKING_BLOCK_FACTORY, values); + p = new LocalRelation(row.source(), row.output(), LocalSupplier.of(blocks)); } if (p instanceof LocalRelation local) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 76de337ded5c6..fc4c057e52ab6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -61,6 +61,7 @@ import org.elasticsearch.xpack.esql.action.EsqlSearchShardsAction; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; +import org.elasticsearch.xpack.esql.enrich.LookupFromIndexService; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSourceExec; import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; @@ -98,6 +99,7 @@ public class ComputeService { private final DriverTaskRunner driverRunner; private final ExchangeService exchangeService; private final EnrichLookupService enrichLookupService; + private final LookupFromIndexService lookupFromIndexService; private final ClusterService clusterService; public ComputeService( @@ -105,6 +107,7 @@ public ComputeService( TransportService transportService, ExchangeService exchangeService, EnrichLookupService enrichLookupService, + LookupFromIndexService lookupFromIndexService, ClusterService clusterService, ThreadPool threadPool, BigArrays bigArrays, @@ -125,6 +128,7 @@ public ComputeService( this.driverRunner = new DriverTaskRunner(transportService, this.esqlExecutor); this.exchangeService = exchangeService; this.enrichLookupService = enrichLookupService; + this.lookupFromIndexService = lookupFromIndexService; this.clusterService = clusterService; } @@ -429,6 +433,7 @@ void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, context.exchangeSource(), context.exchangeSink(), enrichLookupService, + lookupFromIndexService, new EsPhysicalOperationProviders(contexts) ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index 04e5fdc4b3bd2..fdc6e06a11032 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -101,6 +101,7 @@ public TransportEsqlQueryAction( transportService, exchangeService, enrichLookupService, + lookupFromIndexService, clusterService, threadPool, bigArrays, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index c576d15f92608..9630a520e8654 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -292,10 +292,10 @@ private void preAnalyze( var unresolvedPolicies = preAnalysis.enriches.stream() .map(e -> new EnrichPolicyResolver.UnresolvedPolicy((String) e.policyName().fold(), e.mode())) .collect(Collectors.toSet()); + final List indices = preAnalysis.indices; + // TODO: make a separate call for lookup indices final Set targetClusters = enrichPolicyResolver.groupIndicesPerCluster( - preAnalysis.indices.stream() - .flatMap(t -> Arrays.stream(Strings.commaDelimitedListToStringArray(t.id().index()))) - .toArray(String[]::new) + indices.stream().flatMap(t -> Arrays.stream(Strings.commaDelimitedListToStringArray(t.id().index()))).toArray(String[]::new) ).keySet(); enrichPolicyResolver.resolvePolicies(targetClusters, unresolvedPolicies, listener.delegateFailureAndWrap((l, enrichResolution) -> { // first we need the match_fields names from enrich policies and THEN, with an updated list of fields, we call field_caps API diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 348ca4acd100e..ff0c0d5a5d14c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -58,6 +58,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; +import org.elasticsearch.xpack.esql.enrich.LookupFromIndexService; import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.index.EsIndex; @@ -253,7 +254,10 @@ public final void test() throws Throwable { "can't use MATCH function in csv tests", testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.MATCH_FUNCTION.capabilityName()) ); - + assumeFalse( + "lookup join disabled for csv tests", + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP.capabilityName()) + ); if (Build.current().isSnapshot()) { assertThat( "Capability is not included in the enabled list capabilities on a snapshot build. Spelling mistake?", @@ -542,6 +546,7 @@ void executeSubPlan( exchangeSource, exchangeSink, Mockito.mock(EnrichLookupService.class), + Mockito.mock(LookupFromIndexService.class), physicalOperationProviders ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index c1b2adddfc838..a389923afee79 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1909,11 +1909,11 @@ public void testLookup() { String query = """ FROM test | RENAME languages AS int - | LOOKUP int_number_names ON int + | LOOKUP_🐔 int_number_names ON int """; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); - assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP_🐔' expecting {")); return; } LogicalPlan plan = analyze(query); @@ -1945,18 +1945,14 @@ public void testLookup() { .item(startsWith("job{f}")) .item(startsWith("job.raw{f}")) /* - * Int is a reference here because we renamed it in project. - * If we hadn't it'd be a field and that'd be fine. + * Int key is returned as a full field (despite the rename) */ - .item(containsString("int{r}")) + .item(containsString("int{f}")) .item(startsWith("last_name{f}")) .item(startsWith("long_noidx{f}")) .item(startsWith("salary{f}")) /* - * It's important that name is returned as a *reference* here - * instead of a field. If it were a field we'd use SearchStats - * on it and discover that it doesn't exist in the index. It doesn't! - * We don't expect it to. It exists only in the lookup table. + * As is the name column from the right side. */ .item(containsString("name{f}")) ); @@ -1965,11 +1961,11 @@ public void testLookup() { public void testLookupMissingField() { String query = """ FROM test - | LOOKUP int_number_names ON garbage + | LOOKUP_🐔 int_number_names ON garbage """; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); - assertThat(e.getMessage(), containsString("line 2:3: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 2:3: mismatched input 'LOOKUP_🐔' expecting {")); return; } var e = expectThrows(VerificationException.class, () -> analyze(query)); @@ -1979,11 +1975,11 @@ public void testLookupMissingField() { public void testLookupMissingTable() { String query = """ FROM test - | LOOKUP garbage ON a + | LOOKUP_🐔 garbage ON a """; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); - assertThat(e.getMessage(), containsString("line 2:3: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 2:3: mismatched input 'LOOKUP_🐔' expecting {")); return; } var e = expectThrows(VerificationException.class, () -> analyze(query)); @@ -1994,11 +1990,11 @@ public void testLookupMatchTypeWrong() { String query = """ FROM test | RENAME last_name AS int - | LOOKUP int_number_names ON int + | LOOKUP_🐔 int_number_names ON int """; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); - assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP_🐔' expecting {")); return; } var e = expectThrows(VerificationException.class, () -> analyze(query)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 8b364a603405c..ca50ece7fa08b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -195,13 +195,13 @@ public void testUnsupportedAndMultiTypedFields() { if (EsqlCapabilities.Cap.LOOKUP_V4.isEnabled()) { // LOOKUP with unsupported type assertEquals( - "1:41: column type mismatch, table column was [integer] and original column was [unsupported]", - error("from test* | lookup int_number_names on int", analyzer) + "1:43: column type mismatch, table column was [integer] and original column was [unsupported]", + error("from test* | lookup_🐔 int_number_names on int", analyzer) ); // LOOKUP with multi-typed field assertEquals( - "1:44: column type mismatch, table column was [double] and original column was [unsupported]", - error("from test* | lookup double_number_names on double", analyzer) + "1:46: column type mismatch, table column was [double] and original column was [unsupported]", + error("from test* | lookup_🐔 double_number_names on double", analyzer) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index c29f111488f96..2a55379db69d1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -111,7 +111,7 @@ import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.InlineJoin; import org.elasticsearch.xpack.esql.plan.logical.join.Join; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; @@ -5624,6 +5624,7 @@ protected List filteredWarnings() { * 9]]], BytesRefVectorBlock[vector=BytesRefArrayVector[positions=10]]]] * } */ + @AwaitsFix(bugUrl = "lookup functionality is not yet implemented") public void testLookupSimple() { String query = """ FROM test @@ -5650,7 +5651,7 @@ public void testLookupSimple() { var limit = as(left.child(), Limit.class); assertThat(limit.limit().fold(), equalTo(1000)); - assertThat(join.config().type(), equalTo(JoinType.LEFT)); + assertThat(join.config().type(), equalTo(JoinTypes.LEFT)); assertThat(join.config().matchFields().stream().map(Object::toString).toList(), matchesList().item(startsWith("int{r}"))); assertThat(join.config().leftFields().size(), equalTo(1)); assertThat(join.config().rightFields().size(), equalTo(1)); @@ -5703,6 +5704,7 @@ public void testLookupSimple() { * 9]]], BytesRefVectorBlock[vector=BytesRefArrayVector[positions=10]]]] * } */ + @AwaitsFix(bugUrl = "lookup functionality is not yet implemented") public void testLookupStats() { String query = """ FROM test @@ -5738,7 +5740,7 @@ public void testLookupStats() { assertThat(left.output().toString(), containsString("int{r}")); as(left.child(), EsRelation.class); - assertThat(join.config().type(), equalTo(JoinType.LEFT)); + assertThat(join.config().type(), equalTo(JoinTypes.LEFT)); assertThat(join.config().matchFields().stream().map(Object::toString).toList(), matchesList().item(startsWith("int{r}"))); assertThat(join.config().leftFields().size(), equalTo(1)); assertThat(join.config().rightFields().size(), equalTo(1)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index eb115ed7b2948..f3ba11457a715 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -93,7 +93,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.TopN; import org.elasticsearch.xpack.esql.plan.logical.join.Join; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; @@ -114,7 +114,6 @@ import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; -import org.elasticsearch.xpack.esql.plan.physical.RowExec; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; import org.elasticsearch.xpack.esql.plan.physical.UnaryExec; import org.elasticsearch.xpack.esql.planner.PlannerUtils; @@ -2751,7 +2750,7 @@ public void testSpatialTypesAndStatsUseDocValuesNestedLiteral() { assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); var eval = as(agg.child(), EvalExec.class); - as(eval.child(), RowExec.class); + as(eval.child(), LocalSourceExec.class); // Now optimize the plan and assert the same plan again, since no FieldExtractExec is added var optimized = optimizedPlan(plan); @@ -2765,7 +2764,7 @@ public void testSpatialTypesAndStatsUseDocValuesNestedLiteral() { assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); eval = as(agg.child(), EvalExec.class); - as(eval.child(), RowExec.class); + as(eval.child(), LocalSourceExec.class); } /** @@ -6423,11 +6422,12 @@ public void testMaxQueryDepthPlusExpressionDepth() { assertThat(e.getMessage(), containsString("ESQL statement exceeded the maximum query depth allowed (" + MAX_QUERY_DEPTH + ")")); } + @AwaitsFix(bugUrl = "lookup functionality is not yet implemented") public void testLookupSimple() { String query = """ FROM test | RENAME languages AS int - | LOOKUP int_number_names ON int"""; + | LOOKUP_🐔 int_number_names ON int"""; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP' expecting {")); @@ -6468,18 +6468,19 @@ public void testLookupSimple() { * \_EsQueryExec[...] * } */ + @AwaitsFix(bugUrl = "lookup functionality is not yet implemented") public void testLookupThenProject() { String query = """ FROM employees | SORT emp_no | LIMIT 4 | RENAME languages AS int - | LOOKUP int_number_names ON int + | LOOKUP_🐔 int_number_names ON int | RENAME int AS languages, name AS lang_name | KEEP emp_no, languages, lang_name"""; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); - assertThat(e.getMessage(), containsString("line 5:3: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 5:3: mismatched input 'LOOKUP_🐔' expecting {")); return; } PhysicalPlan plan = optimizedPlan(physicalPlan(query)); @@ -6526,17 +6527,18 @@ public void testLookupThenProject() { * \_LocalRelation[[int{f}#24, name{f}#25],[...]] * } */ + @AwaitsFix(bugUrl = "lookup functionality is not yet implemented") public void testLookupThenTopN() { String query = """ FROM employees | RENAME languages AS int - | LOOKUP int_number_names ON int + | LOOKUP_🐔 int_number_names ON int | RENAME name AS languages | KEEP languages, emp_no | SORT languages ASC, emp_no ASC"""; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); - assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP_🐔' expecting {")); return; } var plan = physicalPlan(query); @@ -6553,7 +6555,7 @@ public void testLookupThenTopN() { matchesList().item(startsWith("name{f}")).item(startsWith("emp_no{f}")) ); Join join = as(innerTopN.child(), Join.class); - assertThat(join.config().type(), equalTo(JoinType.LEFT)); + assertThat(join.config().type(), equalTo(JoinTypes.LEFT)); assertMap(join.config().matchFields().stream().map(Objects::toString).toList(), matchesList().item(startsWith("int{r}"))); Project innerProject = as(join.left(), Project.class); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 0f46c1f44e8d3..4c1a9228b26e2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -491,25 +491,25 @@ private void clusterAndIndexAsIndexPattern(String command, String clusterAndInde public void testStringAsLookupIndexPattern() { assumeTrue("requires snapshot build", Build.current().isSnapshot()); - assertStringAsLookupIndexPattern("foo", "ROW x = 1 | LOOKUP \"foo\" ON j"); + assertStringAsLookupIndexPattern("foo", "ROW x = 1 | LOOKUP_🐔 \"foo\" ON j"); assertStringAsLookupIndexPattern("test-*", """ - ROW x = 1 | LOOKUP "test-*" ON j + ROW x = 1 | LOOKUP_🐔 "test-*" ON j """); - assertStringAsLookupIndexPattern("test-*", "ROW x = 1 | LOOKUP test-* ON j"); - assertStringAsLookupIndexPattern("123-test@foo_bar+baz1", "ROW x = 1 | LOOKUP 123-test@foo_bar+baz1 ON j"); + assertStringAsLookupIndexPattern("test-*", "ROW x = 1 | LOOKUP_🐔 test-* ON j"); + assertStringAsLookupIndexPattern("123-test@foo_bar+baz1", "ROW x = 1 | LOOKUP_🐔 123-test@foo_bar+baz1 ON j"); assertStringAsLookupIndexPattern("foo, test-*, abc, xyz", """ - ROW x = 1 | LOOKUP "foo, test-*, abc, xyz" ON j + ROW x = 1 | LOOKUP_🐔 "foo, test-*, abc, xyz" ON j """); - assertStringAsLookupIndexPattern("", "ROW x = 1 | LOOKUP ON j"); + assertStringAsLookupIndexPattern("", "ROW x = 1 | LOOKUP_🐔 ON j"); assertStringAsLookupIndexPattern( "", - "ROW x = 1 | LOOKUP \"\" ON j" + "ROW x = 1 | LOOKUP_🐔 \"\" ON j" ); - assertStringAsLookupIndexPattern("foo", "ROW x = 1 | LOOKUP \"\"\"foo\"\"\" ON j"); - assertStringAsLookupIndexPattern("`backtick`", "ROW x = 1 | LOOKUP `backtick` ON j"); - assertStringAsLookupIndexPattern("``multiple`back``ticks```", "ROW x = 1 | LOOKUP ``multiple`back``ticks``` ON j"); - assertStringAsLookupIndexPattern(".dot", "ROW x = 1 | LOOKUP .dot ON j"); + assertStringAsLookupIndexPattern("foo", "ROW x = 1 | LOOKUP_🐔 \"\"\"foo\"\"\" ON j"); + assertStringAsLookupIndexPattern("`backtick`", "ROW x = 1 | LOOKUP_🐔 `backtick` ON j"); + assertStringAsLookupIndexPattern("``multiple`back``ticks```", "ROW x = 1 | LOOKUP_🐔 ``multiple`back``ticks``` ON j"); + assertStringAsLookupIndexPattern(".dot", "ROW x = 1 | LOOKUP_🐔 .dot ON j"); clusterAndIndexAsLookupIndexPattern("cluster:index"); clusterAndIndexAsLookupIndexPattern("cluster:.index"); clusterAndIndexAsLookupIndexPattern("cluster*:index*"); @@ -519,8 +519,8 @@ public void testStringAsLookupIndexPattern() { } private void clusterAndIndexAsLookupIndexPattern(String clusterAndIndex) { - assertStringAsLookupIndexPattern(clusterAndIndex, "ROW x = 1 | LOOKUP " + clusterAndIndex + " ON j"); - assertStringAsLookupIndexPattern(clusterAndIndex, "ROW x = 1 | LOOKUP \"" + clusterAndIndex + "\"" + " ON j"); + assertStringAsLookupIndexPattern(clusterAndIndex, "ROW x = 1 | LOOKUP_🐔 " + clusterAndIndex + " ON j"); + assertStringAsLookupIndexPattern(clusterAndIndex, "ROW x = 1 | LOOKUP_🐔 \"" + clusterAndIndex + "\"" + " ON j"); } public void testInvalidCharacterInIndexPattern() { @@ -528,7 +528,7 @@ public void testInvalidCharacterInIndexPattern() { commands.put("FROM {}", "line 1:7: "); if (Build.current().isSnapshot()) { commands.put("METRICS {}", "line 1:10: "); - commands.put("ROW x = 1 | LOOKUP {} ON j", "line 1:21: "); + commands.put("ROW x = 1 | LOOKUP_🐔 {} ON j", "line 1:23: "); } String lineNumber; for (String command : commands.keySet()) { @@ -568,7 +568,7 @@ public void testInvalidCharacterInIndexPattern() { // comma separated indices, with exclusions // Invalid index names after removing exclusion fail, when there is no index name with wildcard before it for (String command : commands.keySet()) { - if (command.contains("LOOKUP")) { + if (command.contains("LOOKUP_🐔")) { continue; } @@ -582,7 +582,7 @@ public void testInvalidCharacterInIndexPattern() { // Invalid index names, except invalid DateMath, are ignored if there is an index name with wildcard before it String dateMathError = "unit [D] not supported for date math [/D]"; for (String command : commands.keySet()) { - if (command.contains("LOOKUP")) { + if (command.contains("LOOKUP_🐔")) { continue; } lineNumber = command.contains("FROM") ? "line 1:10: " : "line 1:13: "; @@ -646,17 +646,17 @@ public void testInvalidQuotingAsMetricsIndexPattern() { public void testInvalidQuotingAsLookupIndexPattern() { assumeTrue("requires snapshot builds", Build.current().isSnapshot()); - expectError("ROW x = 1 | LOOKUP \"foo ON j", ": token recognition error at: '\"foo ON j'"); - expectError("ROW x = 1 | LOOKUP \"\"\"foo ON j", ": token recognition error at: '\"foo ON j'"); + expectError("ROW x = 1 | LOOKUP_🐔 \"foo ON j", ": token recognition error at: '\"foo ON j'"); + expectError("ROW x = 1 | LOOKUP_🐔 \"\"\"foo ON j", ": token recognition error at: '\"foo ON j'"); - expectError("ROW x = 1 | LOOKUP foo\" ON j", ": token recognition error at: '\" ON j'"); - expectError("ROW x = 1 | LOOKUP foo\"\"\" ON j", ": token recognition error at: '\" ON j'"); + expectError("ROW x = 1 | LOOKUP_🐔 foo\" ON j", ": token recognition error at: '\" ON j'"); + expectError("ROW x = 1 | LOOKUP_🐔 foo\"\"\" ON j", ": token recognition error at: '\" ON j'"); - expectError("ROW x = 1 | LOOKUP \"foo\"bar\" ON j", ": token recognition error at: '\" ON j'"); - expectError("ROW x = 1 | LOOKUP \"foo\"\"bar\" ON j", ": extraneous input '\"bar\"' expecting 'on'"); + expectError("ROW x = 1 | LOOKUP_🐔 \"foo\"bar\" ON j", ": token recognition error at: '\" ON j'"); + expectError("ROW x = 1 | LOOKUP_🐔 \"foo\"\"bar\" ON j", ": extraneous input '\"bar\"' expecting 'on'"); - expectError("ROW x = 1 | LOOKUP \"\"\"foo\"\"\"bar\"\"\" ON j", ": mismatched input 'bar' expecting 'on'"); - expectError("ROW x = 1 | LOOKUP \"\"\"foo\"\"\"\"\"\"bar\"\"\" ON j", "line 1:31: mismatched input '\"bar\"' expecting 'on'"); + expectError("ROW x = 1 | LOOKUP_🐔 \"\"\"foo\"\"\"bar\"\"\" ON j", ": mismatched input 'bar' expecting 'on'"); + expectError("ROW x = 1 | LOOKUP_🐔 \"\"\"foo\"\"\"\"\"\"bar\"\"\" ON j", ": mismatched input '\"bar\"' expecting 'on'"); } public void testIdentifierAsFieldName() { @@ -2050,7 +2050,7 @@ private void assertStringAsIndexPattern(String string, String statement) { private void assertStringAsLookupIndexPattern(String string, String statement) { if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> statement(statement)); - assertThat(e.getMessage(), containsString("line 1:14: LOOKUP is in preview and only available in SNAPSHOT build")); + assertThat(e.getMessage(), containsString("line 1:14: LOOKUP_🐔 is in preview and only available in SNAPSHOT build")); return; } var plan = statement(statement); @@ -2115,10 +2115,10 @@ public void testInlineConvertWithNonexistentType() { } public void testLookup() { - String query = "ROW a = 1 | LOOKUP t ON j"; + String query = "ROW a = 1 | LOOKUP_🐔 t ON j"; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> statement(query)); - assertThat(e.getMessage(), containsString("line 1:13: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 1:13: mismatched input 'LOOKUP_🐔' expecting {")); return; } var plan = statement(query); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinSerializationTests.java index 6b17e4efd4de7..7c75ea623b34f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinSerializationTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import java.io.IOException; import java.util.List; @@ -27,7 +28,7 @@ protected Join createTestInstance() { } private static JoinConfig randomJoinConfig() { - JoinType type = randomFrom(JoinType.values()); + JoinType type = randomFrom(JoinTypes.LEFT, JoinTypes.RIGHT, JoinTypes.INNER, JoinTypes.FULL, JoinTypes.CROSS); List matchFields = randomFieldAttributes(1, 10, false); List leftFields = randomFieldAttributes(1, 10, false); List rightFields = randomFieldAttributes(1, 10, false); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinTests.java index dde70d85ba259..13887fbd1740c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import java.util.ArrayList; import java.util.List; @@ -48,7 +48,7 @@ public void testExpressionsAndReferences() { Row left = new Row(Source.EMPTY, leftFields); Row right = new Row(Source.EMPTY, rightFields); - JoinConfig joinConfig = new JoinConfig(JoinType.LEFT, matchFields, leftAttributes, rightAttributes); + JoinConfig joinConfig = new JoinConfig(JoinTypes.LEFT, matchFields, leftAttributes, rightAttributes); Join join = new Join(Source.EMPTY, left, right, joinConfig); // matchfields are a subset of the left and right fields, so they don't contribute to the size of the references set. @@ -88,7 +88,7 @@ public void testTransformExprs() { Row left = new Row(Source.EMPTY, leftFields); Row right = new Row(Source.EMPTY, rightFields); - JoinConfig joinConfig = new JoinConfig(JoinType.LEFT, matchFields, leftAttributes, rightAttributes); + JoinConfig joinConfig = new JoinConfig(JoinTypes.LEFT, matchFields, leftAttributes, rightAttributes); Join join = new Join(Source.EMPTY, left, right, joinConfig); assertTrue(join.config().matchFields().stream().allMatch(ref -> ref.dataType().equals(DataType.INTEGER))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index f60e5384e1a6f..ff9e45a9f9233 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -146,6 +146,7 @@ private LocalExecutionPlanner planner() throws IOException { null, null, null, + null, esPhysicalOperationProviders() ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java index 82f0ebf316508..c1d94933537f0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java @@ -43,7 +43,9 @@ import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Grok; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.Stat; import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.StatsType; @@ -436,8 +438,9 @@ public void accept(Page page) { } else if (argClass == Integer.class) { return randomInt(); } else if (argClass == JoinType.class) { - return JoinType.LEFT; + return JoinTypes.LEFT; } + if (Expression.class == argClass) { /* * Rather than use any old subclass of expression lets @@ -488,6 +491,15 @@ public void accept(Page page) { if (argClass == Configuration.class) { return randomConfiguration(); } + if (argClass == JoinConfig.class) { + return new JoinConfig( + JoinTypes.LEFT, + List.of(UnresolvedAttributeTests.randomUnresolvedAttribute()), + List.of(UnresolvedAttributeTests.randomUnresolvedAttribute()), + List.of(UnresolvedAttributeTests.randomUnresolvedAttribute()) + ); + } + try { return mock(argClass); } catch (MockitoException e) { From 0d7b90e22a0000eaea291f14aa4a62d4c18ffe05 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 19 Nov 2024 17:53:52 -0800 Subject: [PATCH 072/386] Deprecate _source.mode in mappings (#116689) This change deprecates _source.mode in mappings, replacing it with the index.mapping.source.mode index setting. --- .../compat/RestCompatTestTransformTask.java | 2 +- docs/changelog/116689.yaml | 10 ++ .../LogsIndexModeFullClusterRestartIT.java | 16 +--- .../LogsIndexModeRollingUpgradeIT.java | 20 +--- rest-api-spec/build.gradle | 6 ++ .../rest-api-spec/test/logsdb/10_settings.yml | 5 - .../test/logsdb/20_source_mapping.yml | 15 ++- .../rest-api-spec/test/tsdb/20_mapping.yml | 11 --- .../index/IndexSettingProvider.java | 3 +- .../elasticsearch/index/IndexVersions.java | 1 + .../index/mapper/SourceFieldMapper.java | 92 ++++++++++++------- .../elasticsearch/node/NodeConstruction.java | 2 +- .../mapper/DynamicFieldsBuilderTests.java | 2 +- .../index/mapper/SourceFieldMapperTests.java | 4 +- .../query/SearchExecutionContextTests.java | 2 +- .../test/rest/ESRestTestCase.java | 35 ++++++- .../test/rest/yaml/section/DoSection.java | 3 + .../xpack/ccr/FollowIndexIT.java | 8 +- .../esql/qa/rest/FieldExtractorTestCase.java | 7 +- .../xpack/logsdb/LogsDBPlugin.java | 9 +- .../SyntheticSourceIndexSettingsProvider.java | 8 +- ...heticSourceIndexSettingsProviderTests.java | 6 +- .../test/40_source_mode_setting.yml | 29 +----- 23 files changed, 162 insertions(+), 134 deletions(-) create mode 100644 docs/changelog/116689.yaml diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java index ef93dafa913cd..ba242a8e23861 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java @@ -137,7 +137,7 @@ public void skipTest(String fullTestName, String reason) { // However, the folder can be arbitrarily nest so, a == a1/a2/a3, and the test name can include forward slashes, so c == c1/c2/c3 // So we also need to support a1/a2/a3/b/c1/c2/c3 - String[] testParts = fullTestName.split("/"); + String[] testParts = fullTestName.split("/", 3); if (testParts.length < 3) { throw new IllegalArgumentException( "To skip tests, all 3 parts [folder/file/test name] must be defined. found [" + fullTestName + "]" diff --git a/docs/changelog/116689.yaml b/docs/changelog/116689.yaml new file mode 100644 index 0000000000000..0b1d1646868aa --- /dev/null +++ b/docs/changelog/116689.yaml @@ -0,0 +1,10 @@ +pr: 116689 +summary: Deprecate `_source.mode` in mappings +area: Mapping +type: deprecation +issues: [] +deprecation: + title: Deprecate `_source.mode` in mappings + area: Mapping + details: Configuring `_source.mode` in mappings is deprecated and will be removed in future versions. Use `index.mapping.source.mode` index setting instead. + impact: Use `index.mapping.source.mode` index setting instead diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java index 3459a29e98649..9866d94dccc3c 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; -import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.hamcrest.Matcher; @@ -30,9 +29,6 @@ import java.util.Map; import java.util.function.Supplier; -import static org.elasticsearch.test.MapMatcher.assertMap; -import static org.elasticsearch.test.MapMatcher.matchesMap; - public class LogsIndexModeFullClusterRestartIT extends ParameterizedFullClusterRestartTestCase { @ClassRule @@ -169,22 +165,16 @@ public void testLogsIndexing() throws IOException { assertOK(bulkIndexResponse); assertThat(entityAsMap(bulkIndexResponse).get("errors"), Matchers.is(false)); - assertIndexMappingsAndSettings(0, Matchers.nullValue(), matchesMap().extraOk()); - assertIndexMappingsAndSettings( - 1, - Matchers.equalTo("logsdb"), - matchesMap().extraOk().entry("_source", Map.of("mode", "synthetic")) - ); + assertIndexSettings(0, Matchers.nullValue()); + assertIndexSettings(1, Matchers.equalTo("logsdb")); } } - private void assertIndexMappingsAndSettings(int backingIndex, final Matcher indexModeMatcher, final MapMatcher mappingsMatcher) - throws IOException { + private void assertIndexSettings(int backingIndex, final Matcher indexModeMatcher) throws IOException { assertThat( getSettings(client(), getWriteBackingIndex(client(), "logs-apache-production", backingIndex)).get("index.mode"), indexModeMatcher ); - assertMap(getIndexMappingAsMap(getWriteBackingIndex(client(), "logs-apache-production", backingIndex)), mappingsMatcher); } private static Request createDataStream(final String dataStreamName) { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java index 8c369ebc9950d..1eb7cbd3f70c2 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; -import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.hamcrest.Matcher; @@ -30,9 +29,6 @@ import java.util.Map; import java.util.function.Supplier; -import static org.elasticsearch.test.MapMatcher.assertMap; -import static org.elasticsearch.test.MapMatcher.matchesMap; - public class LogsIndexModeRollingUpgradeIT extends AbstractRollingUpgradeTestCase { @ClassRule() @@ -160,14 +156,10 @@ public void testLogsIndexing() throws IOException { assertOK(bulkIndexResponse); assertThat(entityAsMap(bulkIndexResponse).get("errors"), Matchers.is(false)); - assertIndexMappingsAndSettings(0, Matchers.nullValue(), matchesMap().extraOk()); - assertIndexMappingsAndSettings(1, Matchers.nullValue(), matchesMap().extraOk()); - assertIndexMappingsAndSettings(2, Matchers.nullValue(), matchesMap().extraOk()); - assertIndexMappingsAndSettings( - 3, - Matchers.equalTo("logsdb"), - matchesMap().extraOk().entry("_source", Map.of("mode", "synthetic")) - ); + assertIndexSettings(0, Matchers.nullValue()); + assertIndexSettings(1, Matchers.nullValue()); + assertIndexSettings(2, Matchers.nullValue()); + assertIndexSettings(3, Matchers.equalTo("logsdb")); } } @@ -183,13 +175,11 @@ static void enableLogsdbByDefault() throws IOException { assertOK(client().performRequest(request)); } - private void assertIndexMappingsAndSettings(int backingIndex, final Matcher indexModeMatcher, final MapMatcher mappingsMatcher) - throws IOException { + private void assertIndexSettings(int backingIndex, final Matcher indexModeMatcher) throws IOException { assertThat( getSettings(client(), getWriteBackingIndex(client(), "logs-apache-production", backingIndex)).get("index.mode"), indexModeMatcher ); - assertMap(getIndexMappingAsMap(getWriteBackingIndex(client(), "logs-apache-production", backingIndex)), mappingsMatcher); } private static Request createDataStream(final String dataStreamName) { diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 439960228cef6..650d17e41de7f 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -60,4 +60,10 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.skipTest("cat.aliases/10_basic/Deprecated local parameter", "CAT APIs not covered by compatibility policy") task.skipTest("cat.shards/10_basic/Help", "sync_id is removed in 9.0") task.skipTest("search/500_date_range/from, to, include_lower, include_upper deprecated", "deprecated parameters are removed in 9.0") + task.skipTest("tsdb/20_mapping/stored source is supported", "no longer serialize source_mode") + task.skipTest("tsdb/20_mapping/Synthetic source", "no longer serialize source_mode") + task.skipTest("logsdb/10_settings/create logs index", "no longer serialize source_mode") + task.skipTest("logsdb/20_source_mapping/stored _source mode is supported", "no longer serialize source_mode") + task.skipTest("logsdb/20_source_mapping/include/exclude is supported with stored _source", "no longer serialize source_mode") + task.skipTest("logsdb/20_source_mapping/synthetic _source is default", "no longer serialize source_mode") }) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml index d0f89b1b8b6cb..463df7d2ab1bb 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml @@ -76,11 +76,6 @@ create logs index: - is_true: test - match: { test.settings.index.mode: "logsdb" } - - do: - indices.get_mapping: - index: test - - match: { test.mappings._source.mode: synthetic } - --- using default timestamp field mapping: - requires: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml index 27146557bb1be..06a007b8aaca5 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml @@ -13,10 +13,10 @@ synthetic _source is default: index: mode: logsdb - do: - indices.get: + indices.get_settings: index: test-default-source - - - match: { test-default-source.mappings._source.mode: "synthetic" } + - match: { test-default-source.settings.index.mode: logsdb } + - match: { test-default-source.settings.index.mapping.source.mode: null } --- stored _source mode is supported: @@ -28,11 +28,12 @@ stored _source mode is supported: index: mode: logsdb mapping.source.mode: stored + - do: - indices.get: + indices.get_settings: index: test-stored-source - - - match: { test-stored-source.mappings._source.mode: "stored" } + - match: { test-stored-source.settings.index.mode: logsdb } + - match: { test-stored-source.settings.index.mapping.source.mode: stored } --- disabled _source is not supported: @@ -110,7 +111,6 @@ include/exclude is supported with stored _source: indices.get: index: test-includes - - match: { test-includes.mappings._source.mode: "stored" } - match: { test-includes.mappings._source.includes: ["a"] } - do: @@ -129,5 +129,4 @@ include/exclude is supported with stored _source: indices.get: index: test-excludes - - match: { test-excludes.mappings._source.mode: "stored" } - match: { test-excludes.mappings._source.excludes: ["b"] } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml index 4d8f03a6e5e18..9fe3f5e0b7272 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml @@ -450,11 +450,6 @@ nested fields: type: long time_series_metric: gauge - - do: - indices.get_mapping: {} - - - match: {tsdb-synthetic.mappings._source.mode: synthetic} - --- stored source is supported: - requires: @@ -486,12 +481,6 @@ stored source is supported: type: keyword time_series_dimension: true - - do: - indices.get: - index: tsdb_index - - - match: { tsdb_index.mappings._source.mode: "stored" } - --- disabled source is not supported: - requires: diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java index 6a553d5dc5440..8c997a9766baa 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java @@ -11,6 +11,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedFunction; @@ -54,7 +55,7 @@ Settings getAdditionalIndexSettings( /** * Infrastructure class that holds services that can be used by {@link IndexSettingProvider} instances. */ - record Parameters(CheckedFunction mapperServiceFactory) { + record Parameters(ClusterService clusterService, CheckedFunction mapperServiceFactory) { } diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 5746bea12a2d8..7a5f469a57fa1 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -134,6 +134,7 @@ private static Version parseUnchecked(String version) { public static final IndexVersion UPGRADE_TO_LUCENE_10_0_0 = def(9_000_00_0, Version.LUCENE_10_0_0); public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT = def(9_001_00_0, Version.LUCENE_10_0_0); public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID = def(9_002_00_0, Version.LUCENE_10_0_0); + public static final IndexVersion DEPRECATE_SOURCE_MODE_MAPPER = def(9_003_00_0, Version.LUCENE_10_0_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index dd25cd6eb80a3..e5b12f748543f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; @@ -38,6 +39,7 @@ import java.util.Collections; import java.util.List; import java.util.Locale; +import java.util.Map; public class SourceFieldMapper extends MetadataFieldMapper { public static final NodeFeature SYNTHETIC_SOURCE_FALLBACK = new NodeFeature("mapper.source.synthetic_source_fallback"); @@ -68,6 +70,9 @@ public class SourceFieldMapper extends MetadataFieldMapper { return indexMode.defaultSourceMode().name(); }, "index.mapping.source.mode", value -> {}, Setting.Property.Final, Setting.Property.IndexScope); + public static final String DEPRECATION_WARNING = "Configuring source mode in mappings is deprecated and will be removed " + + "in future versions. Use [index.mapping.source.mode] index setting instead."; + /** The source mode */ public enum Mode { DISABLED, @@ -79,28 +84,32 @@ public enum Mode { null, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY + Strings.EMPTY_ARRAY, + false ); private static final SourceFieldMapper STORED = new SourceFieldMapper( Mode.STORED, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY + Strings.EMPTY_ARRAY, + false ); private static final SourceFieldMapper SYNTHETIC = new SourceFieldMapper( Mode.SYNTHETIC, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY + Strings.EMPTY_ARRAY, + false ); private static final SourceFieldMapper DISABLED = new SourceFieldMapper( Mode.DISABLED, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY + Strings.EMPTY_ARRAY, + false ); public static class Defaults { @@ -134,16 +143,7 @@ public static class Builder extends MetadataFieldMapper.Builder { * The default mode for TimeSeries is left empty on purpose, so that mapping printings include the synthetic * source mode. */ - private final Parameter mode = new Parameter<>( - "mode", - true, - () -> null, - (n, c, o) -> Mode.valueOf(o.toString().toUpperCase(Locale.ROOT)), - m -> toType(m).enabled.explicit() ? null : toType(m).mode, - (b, n, v) -> b.field(n, v.toString().toLowerCase(Locale.ROOT)), - v -> v.toString().toLowerCase(Locale.ROOT) - ).setMergeValidator((previous, current, conflicts) -> (previous == current) || current != Mode.STORED) - .setSerializerCheck((includeDefaults, isConfigured, value) -> value != null); // don't emit if `enabled` is configured + private final Parameter mode; private final Parameter> includes = Parameter.stringArrayParam( "includes", false, @@ -158,15 +158,28 @@ public static class Builder extends MetadataFieldMapper.Builder { private final Settings settings; private final IndexMode indexMode; + private boolean serializeMode; private final boolean supportsNonDefaultParameterValues; - public Builder(IndexMode indexMode, final Settings settings, boolean supportsCheckForNonDefaultParams) { + public Builder(IndexMode indexMode, final Settings settings, boolean supportsCheckForNonDefaultParams, boolean serializeMode) { super(Defaults.NAME); this.settings = settings; this.indexMode = indexMode; this.supportsNonDefaultParameterValues = supportsCheckForNonDefaultParams == false || settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); + this.serializeMode = serializeMode; + this.mode = new Parameter<>( + "mode", + true, + () -> null, + (n, c, o) -> Mode.valueOf(o.toString().toUpperCase(Locale.ROOT)), + m -> toType(m).enabled.explicit() ? null : toType(m).mode, + (b, n, v) -> b.field(n, v.toString().toLowerCase(Locale.ROOT)), + v -> v.toString().toLowerCase(Locale.ROOT) + ).setMergeValidator((previous, current, conflicts) -> (previous == current) || current != Mode.STORED) + // don't emit if `enabled` is configured + .setSerializerCheck((includeDefaults, isConfigured, value) -> serializeMode && value != null); } public Builder setSynthetic() { @@ -219,21 +232,22 @@ public SourceFieldMapper build() { if (sourceMode == Mode.SYNTHETIC && (includes.getValue().isEmpty() == false || excludes.getValue().isEmpty() == false)) { throw new IllegalArgumentException("filtering the stored _source is incompatible with synthetic source"); } - - SourceFieldMapper sourceFieldMapper; - if (isDefault()) { + if (mode.isConfigured()) { + serializeMode = true; + } + final SourceFieldMapper sourceFieldMapper; + if (isDefault() && sourceMode == null) { // Needed for bwc so that "mode" is not serialized in case of a standard index with stored source. - if (sourceMode == null) { - sourceFieldMapper = DEFAULT; - } else { - sourceFieldMapper = resolveStaticInstance(sourceMode); - } + sourceFieldMapper = DEFAULT; + } else if (isDefault() && serializeMode == false && sourceMode != null) { + sourceFieldMapper = resolveStaticInstance(sourceMode); } else { sourceFieldMapper = new SourceFieldMapper( sourceMode, enabled.get(), includes.getValue().toArray(Strings.EMPTY_ARRAY), - excludes.getValue().toArray(Strings.EMPTY_ARRAY) + excludes.getValue().toArray(Strings.EMPTY_ARRAY), + serializeMode ); } if (indexMode != null) { @@ -283,15 +297,29 @@ private static SourceFieldMapper resolveStaticInstance(final Mode sourceMode) { if (indexMode == IndexMode.STANDARD && settingSourceMode == Mode.STORED) { return DEFAULT; } - - return resolveStaticInstance(settingSourceMode); + if (c.indexVersionCreated().onOrAfter(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER)) { + return resolveStaticInstance(settingSourceMode); + } else { + return new SourceFieldMapper(settingSourceMode, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, true); + } }, c -> new Builder( c.getIndexSettings().getMode(), c.getSettings(), - c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK) + c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), + c.indexVersionCreated().before(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER) ) - ); + ) { + @Override + public MetadataFieldMapper.Builder parse(String name, Map node, MappingParserContext parserContext) + throws MapperParsingException { + assert name.equals(SourceFieldMapper.NAME) : name; + if (parserContext.indexVersionCreated().after(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER) && node.containsKey("mode")) { + deprecationLogger.critical(DeprecationCategory.MAPPINGS, "mapping_source_mode", SourceFieldMapper.DEPRECATION_WARNING); + } + return super.parse(name, node, parserContext); + } + }; static final class SourceFieldType extends MappedFieldType { private final boolean enabled; @@ -330,8 +358,9 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { } } - // nullable for bwc reasons + // nullable for bwc reasons - TODO: fold this into serializeMode private final @Nullable Mode mode; + private final boolean serializeMode; private final Explicit enabled; /** indicates whether the source will always exist and be complete, for use by features like the update API */ @@ -341,7 +370,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { private final String[] excludes; private final SourceFilter sourceFilter; - private SourceFieldMapper(Mode mode, Explicit enabled, String[] includes, String[] excludes) { + private SourceFieldMapper(Mode mode, Explicit enabled, String[] includes, String[] excludes, boolean serializeMode) { super(new SourceFieldType((enabled.explicit() && enabled.value()) || (enabled.explicit() == false && mode != Mode.DISABLED))); this.mode = mode; this.enabled = enabled; @@ -349,6 +378,7 @@ private SourceFieldMapper(Mode mode, Explicit enabled, String[] include this.includes = includes; this.excludes = excludes; this.complete = stored() && sourceFilter == null; + this.serializeMode = serializeMode; } private static SourceFilter buildSourceFilter(String[] includes, String[] excludes) { @@ -419,7 +449,7 @@ protected String contentType() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(null, Settings.EMPTY, false).init(this); + return new Builder(null, Settings.EMPTY, false, serializeMode).init(this); } /** diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 62f923d673dc7..c2471a9a6bb2f 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -822,7 +822,7 @@ private void construct( .searchOperationListeners(searchOperationListeners) .build(); - final var parameters = new IndexSettingProvider.Parameters(indicesService::createIndexMapperServiceForValidation); + final var parameters = new IndexSettingProvider.Parameters(clusterService, indicesService::createIndexMapperServiceForValidation); IndexSettingProviders indexSettingProviders = new IndexSettingProviders( Sets.union( builtinIndexSettingProviders(), diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java index 399740e6200e6..d4d0e67ff4141 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java @@ -69,7 +69,7 @@ public void testCreateDynamicStringFieldAsKeywordForDimension() throws IOExcepti XContentParser parser = createParser(JsonXContent.jsonXContent, source); SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON); - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Optional.empty()).add( new PassThroughObjectMapper.Builder("labels").setPriority(0).setContainsDimensions().dynamic(ObjectMapper.Dynamic.TRUE) ).build(MapperBuilderContext.root(false, false)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index df6d9380fd141..d7f33b9cdb3ba 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -247,14 +247,14 @@ public void testSyntheticSourceInTimeSeries() throws IOException { }); DocumentMapper mapper = createTimeSeriesModeDocumentMapper(mapping); assertTrue(mapper.sourceMapper().isSynthetic()); - assertEquals("{\"_source\":{\"mode\":\"synthetic\"}}", mapper.sourceMapper().toString()); + assertEquals("{\"_source\":{}}", mapper.sourceMapper().toString()); } public void testSyntheticSourceWithLogsIndexMode() throws IOException { XContentBuilder mapping = fieldMapping(b -> { b.field("type", "keyword"); }); DocumentMapper mapper = createLogsModeDocumentMapper(mapping); assertTrue(mapper.sourceMapper().isSynthetic()); - assertEquals("{\"_source\":{\"mode\":\"synthetic\"}}", mapper.sourceMapper().toString()); + assertEquals("{\"_source\":{}}", mapper.sourceMapper().toString()); } public void testSupportsNonDefaultParameterValues() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index fdc18264e2299..dc70c44a89128 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -384,7 +384,7 @@ public void testSearchRequestRuntimeFieldsAndMultifieldDetection() { public void testSyntheticSourceSearchLookup() throws IOException { // Build a mapping using synthetic source - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Optional.empty()).add( new KeywordFieldMapper.Builder("cat", IndexVersion.current()).ignoreAbove(100) ).build(MapperBuilderContext.root(true, false)); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index c20aded9280fc..dd08107bd67fb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -69,6 +69,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.rest.RestStatus; @@ -112,6 +113,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Optional; @@ -1827,8 +1829,9 @@ public static CreateIndexResponse createIndex(RestClient client, String name, Se if (settings != null && settings.getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) == false) { expectSoftDeletesWarning(request, name); + } else if (isSyntheticSourceConfiguredInMapping(mapping)) { + request.setOptions(expectVersionSpecificWarnings(v -> v.compatible(SourceFieldMapper.DEPRECATION_WARNING))); } - final Response response = client.performRequest(request); try (var parser = responseAsParser(response)) { return TestResponseParsers.parseCreateIndexResponse(parser); @@ -1872,6 +1875,27 @@ protected static void expectSoftDeletesWarning(Request request, String indexName })); } + @SuppressWarnings("unchecked") + protected static boolean isSyntheticSourceConfiguredInMapping(String mapping) { + if (mapping == null) { + return false; + } + var mappings = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + mapping.trim().startsWith("{") ? mapping : '{' + mapping + '}', + false + ); + if (mappings.containsKey("_doc")) { + mappings = (Map) mappings.get("_doc"); + } + Map sourceMapper = (Map) mappings.get(SourceFieldMapper.NAME); + if (sourceMapper == null) { + return false; + } + Object mode = sourceMapper.get("mode"); + return mode != null && mode.toString().toLowerCase(Locale.ROOT).equals("synthetic"); + } + protected static Map getIndexSettings(String index) throws IOException { Request request = new Request("GET", "/" + index + "/_settings"); request.addParameter("flat_settings", "true"); @@ -2269,7 +2293,7 @@ protected static Map> getClusterStateFeatures(RestClient adm */ protected static IndexVersion minimumIndexVersion() throws IOException { final Request request = new Request("GET", "_nodes"); - request.addParameter("filter_path", "nodes.*.version,nodes.*.max_index_version"); + request.addParameter("filter_path", "nodes.*.version,nodes.*.max_index_version,nodes.*.index_version"); final Response response = adminClient().performRequest(request); final Map nodes = ObjectPath.createFromResponse(response).evaluate("nodes"); @@ -2277,10 +2301,13 @@ protected static IndexVersion minimumIndexVersion() throws IOException { IndexVersion minVersion = null; for (Map.Entry node : nodes.entrySet()) { Map nodeData = (Map) node.getValue(); - String versionStr = (String) nodeData.get("max_index_version"); + Object versionStr = nodeData.get("index_version"); + if (versionStr == null) { + versionStr = nodeData.get("max_index_version"); + } // fallback on version if index version is not there IndexVersion indexVersion = versionStr != null - ? IndexVersion.fromId(Integer.parseInt(versionStr)) + ? IndexVersion.fromId(Integer.parseInt(versionStr.toString())) : IndexVersion.fromId( parseLegacyVersion((String) nodeData.get("version")).map(Version::id).orElse(IndexVersions.MINIMUM_COMPATIBLE.id()) ); diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index 8243dcdc9de94..627554f6b261d 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.core.Tuple; import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction; import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; @@ -495,6 +496,8 @@ public void checkWarningHeaders(final List warningHeaders, String testPa } } + unexpected.removeIf(s -> s.endsWith(SourceFieldMapper.DEPRECATION_WARNING + "\"")); + if (unexpected.isEmpty() == false || unmatched.isEmpty() == false || missing.isEmpty() == false diff --git a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java index 53e068ae6126e..0bb4afe51b85a 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.RestStatus; @@ -366,8 +367,10 @@ public void testSyntheticSource() throws Exception { final String leaderIndexName = "synthetic_leader"; if ("leader".equals(targetCluster)) { logger.info("Running against leader cluster"); - createIndex(adminClient(), leaderIndexName, Settings.EMPTY, """ - "_source": {"mode": "synthetic"}, + Settings settings = Settings.builder() + .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) + .build(); + createIndex(adminClient(), leaderIndexName, settings, """ "properties": {"kwd": {"type": "keyword"}}}""", null); for (int i = 0; i < numDocs; i++) { logger.info("Indexing doc [{}]", i); @@ -392,7 +395,6 @@ public void testSyntheticSource() throws Exception { } assertBusy(() -> { verifyDocuments(client(), followIndexName, numDocs); - assertMap(getIndexMappingAsMap(followIndexName), matchesMap().extraOk().entry("_source", Map.of("mode", "synthetic"))); if (overrideNumberOfReplicas) { assertMap(getIndexSettingsAsMap(followIndexName), matchesMap().extraOk().entry("index.number_of_replicas", "0")); } else { diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java index d124fdb5755c3..6f45c9d92fd12 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java @@ -14,6 +14,7 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.index.mapper.BlockLoader; @@ -1456,16 +1457,12 @@ private static void index(String name, String... docs) throws IOException { } private static void createIndex(String name, CheckedConsumer mapping) throws IOException { - Request request = new Request("PUT", "/" + name); XContentBuilder index = JsonXContent.contentBuilder().prettyPrint().startObject(); - index.startObject("mappings"); mapping.accept(index); index.endObject(); - index.endObject(); String configStr = Strings.toString(index); logger.info("index: {} {}", name, configStr); - request.setJsonEntity(configStr); - client().performRequest(request); + ESRestTestCase.createIndex(name, Settings.EMPTY, configStr); } /** diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java index 93ba126e4196f..04d12fd51bae7 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java @@ -67,10 +67,13 @@ public Collection getAdditionalIndexSettingProviders(Index if (DiscoveryNode.isStateless(settings)) { return List.of(logsdbIndexModeSettingsProvider); } - return List.of( - new SyntheticSourceIndexSettingsProvider(licenseService, parameters.mapperServiceFactory(), logsdbIndexModeSettingsProvider), - logsdbIndexModeSettingsProvider + var syntheticSettingProvider = new SyntheticSourceIndexSettingsProvider( + licenseService, + parameters.mapperServiceFactory(), + logsdbIndexModeSettingsProvider, + () -> parameters.clusterService().state().nodes().getMinSupportedIndexVersion() ); + return List.of(syntheticSettingProvider, logsdbIndexModeSettingsProvider); } @Override diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java index e87f10ec19916..1f38ecda19515 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java @@ -26,6 +26,7 @@ import java.io.IOException; import java.time.Instant; import java.util.List; +import java.util.function.Supplier; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_ROUTING_PATH; @@ -39,15 +40,18 @@ final class SyntheticSourceIndexSettingsProvider implements IndexSettingProvider private final SyntheticSourceLicenseService syntheticSourceLicenseService; private final CheckedFunction mapperServiceFactory; private final LogsdbIndexModeSettingsProvider logsdbIndexModeSettingsProvider; + private final Supplier createdIndexVersion; SyntheticSourceIndexSettingsProvider( SyntheticSourceLicenseService syntheticSourceLicenseService, CheckedFunction mapperServiceFactory, - LogsdbIndexModeSettingsProvider logsdbIndexModeSettingsProvider + LogsdbIndexModeSettingsProvider logsdbIndexModeSettingsProvider, + Supplier createdIndexVersion ) { this.syntheticSourceLicenseService = syntheticSourceLicenseService; this.mapperServiceFactory = mapperServiceFactory; this.logsdbIndexModeSettingsProvider = logsdbIndexModeSettingsProvider; + this.createdIndexVersion = createdIndexVersion; } @Override @@ -148,7 +152,7 @@ private IndexMetadata buildIndexMetadataForMapperService( ); int shardReplicas = indexTemplateAndCreateRequestSettings.getAsInt(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0); var finalResolvedSettings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(IndexMetadata.SETTING_VERSION_CREATED, createdIndexVersion.get()) .put(indexTemplateAndCreateRequestSettings) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, dummyShards) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, shardReplicas) diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java index 2d8723a0d8c25..1f5d26eaedf34 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.license.MockLicenseState; @@ -54,7 +55,7 @@ public void setup() { provider = new SyntheticSourceIndexSettingsProvider(syntheticSourceLicenseService, im -> { newMapperServiceCounter.incrementAndGet(); return MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()); - }, getLogsdbIndexModeSettingsProvider(false)); + }, getLogsdbIndexModeSettingsProvider(false), IndexVersion::current); newMapperServiceCounter.set(0); } @@ -336,7 +337,8 @@ public void testGetAdditionalIndexSettingsDowngradeFromSyntheticSourceFileMatch( provider = new SyntheticSourceIndexSettingsProvider( syntheticSourceLicenseService, im -> MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()), - getLogsdbIndexModeSettingsProvider(true) + getLogsdbIndexModeSettingsProvider(true), + IndexVersion::current ); final Settings settings = Settings.EMPTY; diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml index 33fedce3b59c1..792df4dbf639e 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml @@ -470,13 +470,7 @@ create an index with time_series index mode and synthetic source: indices.get_settings: index: "test_time_series_index_mode_synthetic" - match: { test_time_series_index_mode_synthetic.settings.index.mode: time_series } - - - - do: - indices.get_mapping: - index: test_time_series_index_mode_synthetic - - - match: { test_time_series_index_mode_synthetic.mappings._source.mode: synthetic } + - match: { test_time_series_index_mode_synthetic.settings.index.mapping.source.mode: synthetic } --- create an index with logsdb index mode and synthetic source: @@ -493,12 +487,7 @@ create an index with logsdb index mode and synthetic source: indices.get_settings: index: "test_logsdb_index_mode_synthetic" - match: { test_logsdb_index_mode_synthetic.settings.index.mode: logsdb } - - - do: - indices.get_mapping: - index: test_logsdb_index_mode_synthetic - - - match: { test_logsdb_index_mode_synthetic.mappings._source.mode: synthetic } + - match: { test_logsdb_index_mode_synthetic.settings.index.mapping.source.mode: synthetic } --- create an index with time_series index mode and stored source: @@ -524,12 +513,7 @@ create an index with time_series index mode and stored source: indices.get_settings: index: "test_time_series_index_mode_undefined" - match: { test_time_series_index_mode_undefined.settings.index.mode: time_series } - - - do: - indices.get_mapping: - index: test_time_series_index_mode_undefined - - - match: { test_time_series_index_mode_undefined.mappings._source.mode: stored } + - match: { test_time_series_index_mode_undefined.settings.index.mapping.source.mode: stored } --- create an index with logsdb index mode and stored source: @@ -546,12 +530,7 @@ create an index with logsdb index mode and stored source: indices.get_settings: index: "test_logsdb_index_mode_undefined" - match: { test_logsdb_index_mode_undefined.settings.index.mode: logsdb } - - - do: - indices.get_mapping: - index: test_logsdb_index_mode_undefined - - - match: { test_logsdb_index_mode_undefined.mappings._source.mode: stored } + - match: { test_logsdb_index_mode_undefined.settings.index.mapping.source.mode: stored } --- create an index with time_series index mode and disabled source: From 3b0d7e0eee83bf201ea7d0a0d147efdb858eefde Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 17:10:38 +1100 Subject: [PATCH 073/386] Mute org.elasticsearch.xpack.apmdata.APMYamlTestSuiteIT test {yaml=/10_apm/Test template reinstallation} #116445 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 4f8f834bb2c50..5a9be1ca68301 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -281,6 +281,9 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/esql/esql-across-clusters/line_197} issue: https://github.com/elastic/elasticsearch/issues/117099 +- class: org.elasticsearch.xpack.apmdata.APMYamlTestSuiteIT + method: test {yaml=/10_apm/Test template reinstallation} + issue: https://github.com/elastic/elasticsearch/issues/116445 # Examples: # From 261ad852156629d427a1588ffbaab6861f11be89 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 20 Nov 2024 09:44:28 +0100 Subject: [PATCH 074/386] Move duplicate connection lookup logic to AbstractSearchAsyncAction (#117055) We found this duplication today when working on batching query phase requests. For batching it would be nice to have the connection already available at a higher level in the AbstractSearchAsyncAction and this is a worthwhile cleanup in general, given how many issues we had around connection lookup recently. --- .../search/AbstractSearchAsyncAction.java | 16 ++++++--- .../SearchDfsQueryThenFetchAsyncAction.java | 9 +---- .../action/search/SearchPhase.java | 2 +- .../SearchQueryThenFetchAsyncAction.java | 9 +---- .../TransportOpenPointInTimeAction.java | 10 +----- .../AbstractSearchAsyncActionTests.java | 2 +- .../action/search/MockSearchPhaseContext.java | 2 +- .../action/search/SearchAsyncActionTests.java | 34 ++++++++----------- 8 files changed, 33 insertions(+), 51 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index c051f0ca7a6f5..09fb70fb06ba4 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -299,7 +299,7 @@ protected void performPhaseOnShard(final int shardIndex, final SearchShardIterat } private void doPerformPhaseOnShard(int shardIndex, SearchShardIterator shardIt, SearchShardTarget shard, Releasable releasable) { - executePhaseOnShard(shardIt, shard, new SearchActionListener<>(shard, shardIndex) { + var shardListener = new SearchActionListener(shard, shardIndex) { @Override public void innerOnResponse(Result result) { try { @@ -315,7 +315,15 @@ public void onFailure(Exception e) { releasable.close(); onShardFailure(shardIndex, shard, shardIt, e); } - }); + }; + final Transport.Connection connection; + try { + connection = getConnection(shard.getClusterAlias(), shard.getNodeId()); + } catch (Exception e) { + shardListener.onFailure(e); + return; + } + executePhaseOnShard(shardIt, connection, shardListener); } private void failOnUnavailable(int shardIndex, SearchShardIterator shardIt) { @@ -327,12 +335,12 @@ private void failOnUnavailable(int shardIndex, SearchShardIterator shardIt) { /** * Sends the request to the actual shard. * @param shardIt the shards iterator - * @param shard the shard routing to send the request for + * @param connection to node that the shard is located on * @param listener the listener to notify on response */ protected abstract void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java index 69ca1569a7c07..25d59a06664da 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java @@ -84,16 +84,9 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction @Override protected void executePhaseOnShard( final SearchShardIterator shardIt, - final SearchShardTarget shard, + final Transport.Connection connection, final SearchActionListener listener ) { - final Transport.Connection connection; - try { - connection = getConnection(shard.getClusterAlias(), shard.getNodeId()); - } catch (Exception e) { - listener.onFailure(e); - return; - } getSearchTransport().sendExecuteDfs(connection, buildShardSearchRequest(shardIt, listener.requestIndex), getTask(), listener); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java index d91ea85e2fa97..986f7210c0d1b 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java @@ -79,7 +79,7 @@ protected static void doCheckNoMissingShards( /** * Releases shard targets that are not used in the docsIdsToLoad. */ - protected void releaseIrrelevantSearchContext(SearchPhaseResult searchPhaseResult, AbstractSearchAsyncAction context) { + protected static void releaseIrrelevantSearchContext(SearchPhaseResult searchPhaseResult, AbstractSearchAsyncAction context) { // we only release search context that we did not fetch from, if we are not scrolling // or using a PIT and if it has at least one hit that didn't make it to the global topDocs if (searchPhaseResult == null) { diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java index 84e0e2adea612..f75b84abc2f0f 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java @@ -91,16 +91,9 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction listener ) { - final Transport.Connection connection; - try { - connection = getConnection(shard.getClusterAlias(), shard.getNodeId()); - } catch (Exception e) { - listener.onFailure(e); - return; - } ShardSearchRequest request = rewriteShardSearchRequest(super.buildShardSearchRequest(shardIt, listener.requestIndex)); getSearchTransport().sendExecuteQuery(connection, request, getTask(), listener); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java index 7ba4a7ce59869..9e60eedbad6a2 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java @@ -35,7 +35,6 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchService; -import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ShardSearchContextId; @@ -252,16 +251,9 @@ protected String missingShardsErrorMessage(StringBuilder missingShards) { @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener phaseListener ) { - final Transport.Connection connection; - try { - connection = connectionLookup.apply(shardIt.getClusterAlias(), shard.getNodeId()); - } catch (Exception e) { - phaseListener.onFailure(e); - return; - } transportService.sendChildRequest( connection, OPEN_SHARD_READER_CONTEXT_NAME, diff --git a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java index f8ecdbd062054..725a4583d104a 100644 --- a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java @@ -101,7 +101,7 @@ protected SearchPhase getNextPhase() { @Override protected void executePhaseOnShard( final SearchShardIterator shardIt, - final SearchShardTarget shard, + final Transport.Connection shard, final SearchActionListener listener ) {} diff --git a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java index 03c5d0a06f6fb..484b3c6b386fd 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java +++ b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java @@ -147,7 +147,7 @@ public void executeNextPhase(SearchPhase currentPhase, Supplier nex @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection shard, SearchActionListener listener ) { onShardResult(new SearchPhaseResult() { diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index f655136cd4ba4..b4ddd48172d01 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchPhaseResult; -import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.test.ESTestCase; @@ -119,16 +118,15 @@ public void testSkipSearchShards() throws InterruptedException { @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ) { - seenShard.computeIfAbsent(shard.getShardId(), (i) -> { + seenShard.computeIfAbsent(shardIt.shardId(), (i) -> { numRequests.incrementAndGet(); // only count this once per replica return Boolean.TRUE; }); new Thread(() -> { - Transport.Connection connection = getConnection(null, shard.getNodeId()); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult( new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), connection.getNode() @@ -227,23 +225,22 @@ public void testLimitConcurrentShardRequests() throws InterruptedException { @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ) { - seenShard.computeIfAbsent(shard.getShardId(), (i) -> { + seenShard.computeIfAbsent(shardIt.shardId(), (i) -> { numRequests.incrementAndGet(); // only count this once per shard copy return Boolean.TRUE; }); new Thread(() -> { safeAwait(awaitInitialRequests); - Transport.Connection connection = getConnection(null, shard.getNodeId()); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult( new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), connection.getNode() ); try { - if (shardFailures[shard.getShardId().id()]) { + if (shardFailures[shardIt.shardId().id()]) { listener.onFailure(new RuntimeException()); } else { listener.onResponse(testSearchPhaseResult); @@ -340,11 +337,11 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ) { - assertTrue("shard: " + shard.getShardId() + " has been queried twice", testResponse.queried.add(shard.getShardId())); - Transport.Connection connection = getConnection(null, shard.getNodeId()); + var shardId = shardIt.shardId(); + assertTrue("shard: " + shardId + " has been queried twice", testResponse.queried.add(shardId)); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult( new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), connection.getNode() @@ -464,13 +461,13 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ) { - assertTrue("shard: " + shard.getShardId() + " has been queried twice", response.queried.add(shard.getShardId())); - Transport.Connection connection = getConnection(null, shard.getNodeId()); + var shardId = shardIt.shardId(); + assertTrue("shard: " + shardId + " has been queried twice", response.queried.add(shardId)); final TestSearchPhaseResult testSearchPhaseResult; - if (shard.getShardId().id() == 0) { + if (shardId.id() == 0) { testSearchPhaseResult = new TestSearchPhaseResult(null, connection.getNode()); } else { testSearchPhaseResult = new TestSearchPhaseResult( @@ -573,15 +570,14 @@ public void testAllowPartialResults() throws InterruptedException { @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ) { - seenShard.computeIfAbsent(shard.getShardId(), (i) -> { + seenShard.computeIfAbsent(shardIt.shardId(), (i) -> { numRequests.incrementAndGet(); // only count this once per shard copy return Boolean.TRUE; }); new Thread(() -> { - Transport.Connection connection = getConnection(null, shard.getNodeId()); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult( new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), connection.getNode() @@ -673,7 +669,7 @@ public void testSkipUnavailableSearchShards() throws InterruptedException { @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ) { assert false : "Expected to skip all shards"; From 87b3de8d92a70c5475238343c83f8fb6cc37dd6c Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Wed, 20 Nov 2024 09:41:25 +0000 Subject: [PATCH 075/386] Remove NodeFeaturesFixupListener (#117039) --- .../TransportNodesFeaturesAction.java | 5 +- .../features/NodeFeaturesFixupListener.java | 218 ---------------- .../elasticsearch/node/NodeConstruction.java | 2 - .../NodeFeaturesFixupListenerTests.java | 246 ------------------ 4 files changed, 2 insertions(+), 469 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/cluster/features/NodeFeaturesFixupListener.java delete mode 100644 server/src/test/java/org/elasticsearch/cluster/features/NodeFeaturesFixupListenerTests.java diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/features/TransportNodesFeaturesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/features/TransportNodesFeaturesAction.java index 83d1356e5ef62..d20eee96809e8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/features/TransportNodesFeaturesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/features/TransportNodesFeaturesAction.java @@ -16,7 +16,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.features.FeatureService; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -27,8 +27,7 @@ import java.io.IOException; import java.util.List; -@UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) -// @UpdateForV10 // this can be removed in v10. It may be called by v8 nodes to v9 nodes. +@UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // this can be removed in v10. It may be called by v8 nodes to v9 nodes. public class TransportNodesFeaturesAction extends TransportNodesAction< NodesFeaturesRequest, NodesFeaturesResponse, diff --git a/server/src/main/java/org/elasticsearch/cluster/features/NodeFeaturesFixupListener.java b/server/src/main/java/org/elasticsearch/cluster/features/NodeFeaturesFixupListener.java deleted file mode 100644 index 4d9074be15695..0000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/features/NodeFeaturesFixupListener.java +++ /dev/null @@ -1,218 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.features; - -import org.elasticsearch.Version; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.admin.cluster.node.features.NodeFeatures; -import org.elasticsearch.action.admin.cluster.node.features.NodesFeaturesRequest; -import org.elasticsearch.action.admin.cluster.node.features.NodesFeaturesResponse; -import org.elasticsearch.action.admin.cluster.node.features.TransportNodesFeaturesAction; -import org.elasticsearch.client.internal.ClusterAdminClient; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterFeatures; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.ClusterStateTaskExecutor; -import org.elasticsearch.cluster.ClusterStateTaskListener; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.cluster.service.MasterServiceTaskQueue; -import org.elasticsearch.common.Priority; -import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; -import org.elasticsearch.threadpool.Scheduler; -import org.elasticsearch.threadpool.ThreadPool; - -import java.util.Collections; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.Executor; -import java.util.stream.Collectors; - -@UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // this can be removed in v9 -public class NodeFeaturesFixupListener implements ClusterStateListener { - - private static final Logger logger = LogManager.getLogger(NodeFeaturesFixupListener.class); - - private static final TimeValue RETRY_TIME = TimeValue.timeValueSeconds(30); - - private final MasterServiceTaskQueue taskQueue; - private final ClusterAdminClient client; - private final Scheduler scheduler; - private final Executor executor; - private final Set pendingNodes = Collections.synchronizedSet(new HashSet<>()); - - public NodeFeaturesFixupListener(ClusterService service, ClusterAdminClient client, ThreadPool threadPool) { - // there tends to be a lot of state operations on an upgrade - this one is not time-critical, - // so use LOW priority. It just needs to be run at some point after upgrade. - this( - service.createTaskQueue("fix-node-features", Priority.LOW, new NodesFeaturesUpdater()), - client, - threadPool, - threadPool.executor(ThreadPool.Names.CLUSTER_COORDINATION) - ); - } - - NodeFeaturesFixupListener( - MasterServiceTaskQueue taskQueue, - ClusterAdminClient client, - Scheduler scheduler, - Executor executor - ) { - this.taskQueue = taskQueue; - this.client = client; - this.scheduler = scheduler; - this.executor = executor; - } - - class NodesFeaturesTask implements ClusterStateTaskListener { - private final Map> results; - private final int retryNum; - - NodesFeaturesTask(Map> results, int retryNum) { - this.results = results; - this.retryNum = retryNum; - } - - @Override - public void onFailure(Exception e) { - logger.error("Could not apply features for nodes {} to cluster state", results.keySet(), e); - scheduleRetry(results.keySet(), retryNum); - } - - public Map> results() { - return results; - } - } - - static class NodesFeaturesUpdater implements ClusterStateTaskExecutor { - @Override - public ClusterState execute(BatchExecutionContext context) { - ClusterState.Builder builder = ClusterState.builder(context.initialState()); - var existingFeatures = builder.nodeFeatures(); - - boolean modified = false; - for (var c : context.taskContexts()) { - for (var e : c.getTask().results().entrySet()) { - // double check there are still no features for the node - if (existingFeatures.getOrDefault(e.getKey(), Set.of()).isEmpty()) { - builder.putNodeFeatures(e.getKey(), e.getValue()); - modified = true; - } - } - c.success(() -> {}); - } - return modified ? builder.build() : context.initialState(); - } - } - - @Override - public void clusterChanged(ClusterChangedEvent event) { - if (event.nodesDelta().masterNodeChanged() && event.localNodeMaster()) { - /* - * Execute this if we have just become master. - * Check if there are any nodes that should have features in cluster state, but don't. - * This can happen if the master was upgraded from before 8.13, and one or more non-master nodes - * were already upgraded. They don't re-join the cluster with the new master, so never get their features - * (which the master now understands) added to cluster state. - * So we need to do a separate transport call to get the node features and add them to cluster state. - * We can't use features to determine when this should happen, as the features are incorrect. - * We also can't use transport version, as that is unreliable for upgrades - * from versions before 8.8 (see TransportVersionFixupListener). - * So the only thing we can use is release version. - * This is ok here, as Serverless will never hit this case, so the node feature fetch action will never be called on Serverless. - * This whole class will be removed in ES v9. - */ - ClusterFeatures nodeFeatures = event.state().clusterFeatures(); - Set queryNodes = event.state() - .nodes() - .stream() - .filter(n -> n.getVersion().onOrAfter(Version.V_8_15_0)) - .map(DiscoveryNode::getId) - .filter(n -> getNodeFeatures(nodeFeatures, n).isEmpty()) - .collect(Collectors.toSet()); - - if (queryNodes.isEmpty() == false) { - logger.debug("Fetching actual node features for nodes {}", queryNodes); - queryNodesFeatures(queryNodes, 0); - } - } - } - - @SuppressForbidden(reason = "Need to access a specific node's features") - private static Set getNodeFeatures(ClusterFeatures features, String nodeId) { - return features.nodeFeatures().getOrDefault(nodeId, Set.of()); - } - - private void scheduleRetry(Set nodes, int thisRetryNum) { - // just keep retrying until this succeeds - logger.debug("Scheduling retry {} for nodes {}", thisRetryNum + 1, nodes); - scheduler.schedule(() -> queryNodesFeatures(nodes, thisRetryNum + 1), RETRY_TIME, executor); - } - - private void queryNodesFeatures(Set nodes, int retryNum) { - // some might already be in-progress - Set outstandingNodes = Sets.newHashSetWithExpectedSize(nodes.size()); - synchronized (pendingNodes) { - for (String n : nodes) { - if (pendingNodes.add(n)) { - outstandingNodes.add(n); - } - } - } - if (outstandingNodes.isEmpty()) { - // all nodes already have in-progress requests - return; - } - - NodesFeaturesRequest request = new NodesFeaturesRequest(outstandingNodes.toArray(String[]::new)); - client.execute(TransportNodesFeaturesAction.TYPE, request, new ActionListener<>() { - @Override - public void onResponse(NodesFeaturesResponse response) { - pendingNodes.removeAll(outstandingNodes); - handleResponse(response, retryNum); - } - - @Override - public void onFailure(Exception e) { - pendingNodes.removeAll(outstandingNodes); - logger.warn("Could not read features for nodes {}", outstandingNodes, e); - scheduleRetry(outstandingNodes, retryNum); - } - }); - } - - private void handleResponse(NodesFeaturesResponse response, int retryNum) { - if (response.hasFailures()) { - Set failedNodes = new HashSet<>(); - for (FailedNodeException fne : response.failures()) { - logger.warn("Failed to read features from node {}", fne.nodeId(), fne); - failedNodes.add(fne.nodeId()); - } - scheduleRetry(failedNodes, retryNum); - } - // carry on and read what we can - - Map> results = response.getNodes() - .stream() - .collect(Collectors.toUnmodifiableMap(n -> n.getNode().getId(), NodeFeatures::nodeFeatures)); - - if (results.isEmpty() == false) { - taskQueue.submitTask("fix-node-features", new NodesFeaturesTask(results, retryNum), null); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index c2471a9a6bb2f..caf65c05cf27d 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -42,7 +42,6 @@ import org.elasticsearch.cluster.coordination.Coordinator; import org.elasticsearch.cluster.coordination.MasterHistoryService; import org.elasticsearch.cluster.coordination.StableMasterHealthIndicatorService; -import org.elasticsearch.cluster.features.NodeFeaturesFixupListener; import org.elasticsearch.cluster.metadata.DataStreamGlobalRetentionSettings; import org.elasticsearch.cluster.metadata.IndexMetadataVerifier; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -787,7 +786,6 @@ private void construct( if (DiscoveryNode.isMasterNode(settings)) { clusterService.addListener(new SystemIndexMappingUpdateService(systemIndices, client)); - clusterService.addListener(new NodeFeaturesFixupListener(clusterService, client.admin().cluster(), threadPool)); } SourceFieldMetrics sourceFieldMetrics = new SourceFieldMetrics( diff --git a/server/src/test/java/org/elasticsearch/cluster/features/NodeFeaturesFixupListenerTests.java b/server/src/test/java/org/elasticsearch/cluster/features/NodeFeaturesFixupListenerTests.java deleted file mode 100644 index 00cfac7248da6..0000000000000 --- a/server/src/test/java/org/elasticsearch/cluster/features/NodeFeaturesFixupListenerTests.java +++ /dev/null @@ -1,246 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.features; - -import org.elasticsearch.Version; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.features.NodeFeatures; -import org.elasticsearch.action.admin.cluster.node.features.NodesFeaturesRequest; -import org.elasticsearch.action.admin.cluster.node.features.NodesFeaturesResponse; -import org.elasticsearch.action.admin.cluster.node.features.TransportNodesFeaturesAction; -import org.elasticsearch.client.internal.ClusterAdminClient; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.features.NodeFeaturesFixupListener.NodesFeaturesTask; -import org.elasticsearch.cluster.features.NodeFeaturesFixupListener.NodesFeaturesUpdater; -import org.elasticsearch.cluster.node.DiscoveryNodeUtils; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.node.VersionInformation; -import org.elasticsearch.cluster.service.ClusterStateTaskExecutorUtils; -import org.elasticsearch.cluster.service.MasterServiceTaskQueue; -import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.Scheduler; -import org.mockito.ArgumentCaptor; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.Executor; - -import static org.elasticsearch.test.LambdaMatchers.transformedMatch; -import static org.hamcrest.Matchers.arrayContainingInAnyOrder; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.ArgumentMatchers.same; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.hamcrest.MockitoHamcrest.argThat; - -public class NodeFeaturesFixupListenerTests extends ESTestCase { - - @SuppressWarnings("unchecked") - private static MasterServiceTaskQueue newMockTaskQueue() { - return mock(MasterServiceTaskQueue.class); - } - - private static DiscoveryNodes nodes(Version... versions) { - var builder = DiscoveryNodes.builder(); - for (int i = 0; i < versions.length; i++) { - builder.add(DiscoveryNodeUtils.create("node" + i, new TransportAddress(TransportAddress.META_ADDRESS, 9200 + i), versions[i])); - } - builder.localNodeId("node0").masterNodeId("node0"); - return builder.build(); - } - - private static DiscoveryNodes nodes(VersionInformation... versions) { - var builder = DiscoveryNodes.builder(); - for (int i = 0; i < versions.length; i++) { - builder.add( - DiscoveryNodeUtils.builder("node" + i) - .address(new TransportAddress(TransportAddress.META_ADDRESS, 9200 + i)) - .version(versions[i]) - .build() - ); - } - builder.localNodeId("node0").masterNodeId("node0"); - return builder.build(); - } - - @SafeVarargs - private static Map> features(Set... nodeFeatures) { - Map> features = new HashMap<>(); - for (int i = 0; i < nodeFeatures.length; i++) { - features.put("node" + i, nodeFeatures[i]); - } - return features; - } - - private static NodesFeaturesResponse getResponse(Map> responseData) { - return new NodesFeaturesResponse( - ClusterName.DEFAULT, - responseData.entrySet() - .stream() - .map( - e -> new NodeFeatures( - e.getValue(), - DiscoveryNodeUtils.create(e.getKey(), new TransportAddress(TransportAddress.META_ADDRESS, 9200)) - ) - ) - .toList(), - List.of() - ); - } - - public void testNothingDoneWhenNothingToFix() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(nodes(Version.CURRENT, Version.CURRENT)) - .nodeFeatures(features(Set.of("f1", "f2"), Set.of("f1", "f2"))) - .build(); - - NodeFeaturesFixupListener listener = new NodeFeaturesFixupListener(taskQueue, client, null, null); - listener.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - - verify(taskQueue, never()).submitTask(anyString(), any(), any()); - } - - public void testFeaturesFixedAfterNewMaster() throws Exception { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - Set features = Set.of("f1", "f2"); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(nodes(Version.CURRENT, Version.CURRENT, Version.CURRENT)) - .nodeFeatures(features(features, Set.of(), Set.of())) - .build(); - - ArgumentCaptor> action = ArgumentCaptor.captor(); - ArgumentCaptor task = ArgumentCaptor.captor(); - - NodeFeaturesFixupListener listener = new NodeFeaturesFixupListener(taskQueue, client, null, null); - listener.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - verify(client).execute( - eq(TransportNodesFeaturesAction.TYPE), - argThat(transformedMatch(NodesFeaturesRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), - action.capture() - ); - - action.getValue().onResponse(getResponse(Map.of("node1", features, "node2", features))); - verify(taskQueue).submitTask(anyString(), task.capture(), any()); - - ClusterState newState = ClusterStateTaskExecutorUtils.executeAndAssertSuccessful( - testState, - new NodesFeaturesUpdater(), - List.of(task.getValue()) - ); - - assertThat(newState.clusterFeatures().allNodeFeatures(), containsInAnyOrder("f1", "f2")); - } - - public void testFeaturesFetchedOnlyForUpdatedNodes() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes( - nodes( - VersionInformation.CURRENT, - VersionInformation.CURRENT, - new VersionInformation(Version.V_8_12_0, IndexVersion.current(), IndexVersion.current()) - ) - ) - .nodeFeatures(features(Set.of("f1", "f2"), Set.of(), Set.of())) - .build(); - - ArgumentCaptor> action = ArgumentCaptor.captor(); - - NodeFeaturesFixupListener listener = new NodeFeaturesFixupListener(taskQueue, client, null, null); - listener.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - verify(client).execute( - eq(TransportNodesFeaturesAction.TYPE), - argThat(transformedMatch(NodesFeaturesRequest::nodesIds, arrayContainingInAnyOrder("node1"))), - action.capture() - ); - } - - public void testConcurrentChangesDoNotOverlap() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - Set features = Set.of("f1", "f2"); - - ClusterState testState1 = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(nodes(Version.CURRENT, Version.CURRENT, Version.CURRENT)) - .nodeFeatures(features(features, Set.of(), Set.of())) - .build(); - - NodeFeaturesFixupListener listeners = new NodeFeaturesFixupListener(taskQueue, client, null, null); - listeners.clusterChanged(new ClusterChangedEvent("test", testState1, ClusterState.EMPTY_STATE)); - verify(client).execute( - eq(TransportNodesFeaturesAction.TYPE), - argThat(transformedMatch(NodesFeaturesRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), - any() - ); - // don't send back the response yet - - ClusterState testState2 = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(nodes(Version.CURRENT, Version.CURRENT, Version.CURRENT)) - .nodeFeatures(features(features, features, Set.of())) - .build(); - // should not send any requests - listeners.clusterChanged(new ClusterChangedEvent("test", testState2, testState1)); - verifyNoMoreInteractions(client); - } - - public void testFailedRequestsAreRetried() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - Scheduler scheduler = mock(Scheduler.class); - Executor executor = mock(Executor.class); - Set features = Set.of("f1", "f2"); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(nodes(Version.CURRENT, Version.CURRENT, Version.CURRENT)) - .nodeFeatures(features(features, Set.of(), Set.of())) - .build(); - - ArgumentCaptor> action = ArgumentCaptor.captor(); - ArgumentCaptor retry = ArgumentCaptor.forClass(Runnable.class); - - NodeFeaturesFixupListener listener = new NodeFeaturesFixupListener(taskQueue, client, scheduler, executor); - listener.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - verify(client).execute( - eq(TransportNodesFeaturesAction.TYPE), - argThat(transformedMatch(NodesFeaturesRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), - action.capture() - ); - - action.getValue().onFailure(new RuntimeException("failure")); - verify(scheduler).schedule(retry.capture(), any(), same(executor)); - - // running the retry should cause another call - retry.getValue().run(); - verify(client, times(2)).execute( - eq(TransportNodesFeaturesAction.TYPE), - argThat(transformedMatch(NodesFeaturesRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), - action.capture() - ); - } -} From 49ac50db08e4b084b4f059937df150bfba60978e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 20:55:55 +1100 Subject: [PATCH 076/386] Mute org.elasticsearch.upgrades.DownsampleIT testRollupIndex {upgradedNodes=3} #117122 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 5a9be1ca68301..30fa5ee2cd732 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -284,6 +284,9 @@ tests: - class: org.elasticsearch.xpack.apmdata.APMYamlTestSuiteIT method: test {yaml=/10_apm/Test template reinstallation} issue: https://github.com/elastic/elasticsearch/issues/116445 +- class: org.elasticsearch.upgrades.DownsampleIT + method: testRollupIndex {upgradedNodes=3} + issue: https://github.com/elastic/elasticsearch/issues/117122 # Examples: # From f6210d6f894f9f16c0e1993204bcbda1e210e839 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 20:56:09 +1100 Subject: [PATCH 077/386] Mute org.elasticsearch.upgrades.DownsampleIT testRollupIndex {upgradedNodes=1} #117123 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 30fa5ee2cd732..2ee45197e1c4d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -287,6 +287,9 @@ tests: - class: org.elasticsearch.upgrades.DownsampleIT method: testRollupIndex {upgradedNodes=3} issue: https://github.com/elastic/elasticsearch/issues/117122 +- class: org.elasticsearch.upgrades.DownsampleIT + method: testRollupIndex {upgradedNodes=1} + issue: https://github.com/elastic/elasticsearch/issues/117123 # Examples: # From 0e100d7172c10594775c0fd6c3e99751d8d5df8d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 20:56:21 +1100 Subject: [PATCH 078/386] Mute org.elasticsearch.upgrades.DownsampleIT testRollupIndex {upgradedNodes=2} #117124 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 2ee45197e1c4d..a13e0df6ea7d5 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -290,6 +290,9 @@ tests: - class: org.elasticsearch.upgrades.DownsampleIT method: testRollupIndex {upgradedNodes=1} issue: https://github.com/elastic/elasticsearch/issues/117123 +- class: org.elasticsearch.upgrades.DownsampleIT + method: testRollupIndex {upgradedNodes=2} + issue: https://github.com/elastic/elasticsearch/issues/117124 # Examples: # From 7a5ddb7cb41d1696cf2091537d38bcfda8704361 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 20:56:35 +1100 Subject: [PATCH 079/386] Mute org.elasticsearch.upgrades.IndexingIT testAutoIdWithOpTypeCreate {upgradedNodes=1} #117125 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index a13e0df6ea7d5..1126d2ed358a6 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -293,6 +293,9 @@ tests: - class: org.elasticsearch.upgrades.DownsampleIT method: testRollupIndex {upgradedNodes=2} issue: https://github.com/elastic/elasticsearch/issues/117124 +- class: org.elasticsearch.upgrades.IndexingIT + method: testAutoIdWithOpTypeCreate {upgradedNodes=1} + issue: https://github.com/elastic/elasticsearch/issues/117125 # Examples: # From 4e05c46b3181d9c2be59ed5b363e64d5573cc676 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 20:56:44 +1100 Subject: [PATCH 080/386] Mute org.elasticsearch.upgrades.IndexingIT testTsdb {upgradedNodes=1} #117126 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 1126d2ed358a6..afde710866782 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -296,6 +296,9 @@ tests: - class: org.elasticsearch.upgrades.IndexingIT method: testAutoIdWithOpTypeCreate {upgradedNodes=1} issue: https://github.com/elastic/elasticsearch/issues/117125 +- class: org.elasticsearch.upgrades.IndexingIT + method: testTsdb {upgradedNodes=1} + issue: https://github.com/elastic/elasticsearch/issues/117126 # Examples: # From 7aa6972c98704dcbed19c963d12147fca1e848fc Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 20:56:54 +1100 Subject: [PATCH 081/386] Mute org.elasticsearch.upgrades.IndexingIT testIndexing {upgradedNodes=1} #117127 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index afde710866782..a1b4657ad8a5b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -299,6 +299,9 @@ tests: - class: org.elasticsearch.upgrades.IndexingIT method: testTsdb {upgradedNodes=1} issue: https://github.com/elastic/elasticsearch/issues/117126 +- class: org.elasticsearch.upgrades.IndexingIT + method: testIndexing {upgradedNodes=1} + issue: https://github.com/elastic/elasticsearch/issues/117127 # Examples: # From caad8dfc1ee2d19e7250b5635c883726066a30d7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 20:57:05 +1100 Subject: [PATCH 082/386] Mute org.elasticsearch.upgrades.IndexingIT testSyntheticSource {upgradedNodes=1} #117128 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index a1b4657ad8a5b..2f3142d7eb174 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -302,6 +302,9 @@ tests: - class: org.elasticsearch.upgrades.IndexingIT method: testIndexing {upgradedNodes=1} issue: https://github.com/elastic/elasticsearch/issues/117127 +- class: org.elasticsearch.upgrades.IndexingIT + method: testSyntheticSource {upgradedNodes=1} + issue: https://github.com/elastic/elasticsearch/issues/117128 # Examples: # From 0fabe093352e89cda1eed6a72741f47295d46c1f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 21:48:06 +1100 Subject: [PATCH 083/386] Mute org.elasticsearch.upgrades.IndexingIT testIndexing {upgradedNodes=3} #117135 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 2f3142d7eb174..ed0dbdab6ac7a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -305,6 +305,9 @@ tests: - class: org.elasticsearch.upgrades.IndexingIT method: testSyntheticSource {upgradedNodes=1} issue: https://github.com/elastic/elasticsearch/issues/117128 +- class: org.elasticsearch.upgrades.IndexingIT + method: testIndexing {upgradedNodes=3} + issue: https://github.com/elastic/elasticsearch/issues/117135 # Examples: # From 96934b91fceae9ce45a7dbb88669ee7378c04086 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 21:48:15 +1100 Subject: [PATCH 084/386] Mute org.elasticsearch.upgrades.IndexingIT testTsdb {upgradedNodes=3} #117136 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index ed0dbdab6ac7a..b33086017c804 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -308,6 +308,9 @@ tests: - class: org.elasticsearch.upgrades.IndexingIT method: testIndexing {upgradedNodes=3} issue: https://github.com/elastic/elasticsearch/issues/117135 +- class: org.elasticsearch.upgrades.IndexingIT + method: testTsdb {upgradedNodes=3} + issue: https://github.com/elastic/elasticsearch/issues/117136 # Examples: # From 4879d6a3d2e52b1a3725bce2b0988bdf5decb3da Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 21:48:24 +1100 Subject: [PATCH 085/386] Mute org.elasticsearch.upgrades.IndexingIT testIndexing {upgradedNodes=2} #117137 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index b33086017c804..b907d44385099 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -311,6 +311,9 @@ tests: - class: org.elasticsearch.upgrades.IndexingIT method: testTsdb {upgradedNodes=3} issue: https://github.com/elastic/elasticsearch/issues/117136 +- class: org.elasticsearch.upgrades.IndexingIT + method: testIndexing {upgradedNodes=2} + issue: https://github.com/elastic/elasticsearch/issues/117137 # Examples: # From abe76473bf905dc94a87890cdfaeac2a5b1d6d41 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 21:48:33 +1100 Subject: [PATCH 086/386] Mute org.elasticsearch.upgrades.IndexingIT testTsdb {upgradedNodes=2} #117138 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index b907d44385099..13a5807214764 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -314,6 +314,9 @@ tests: - class: org.elasticsearch.upgrades.IndexingIT method: testIndexing {upgradedNodes=2} issue: https://github.com/elastic/elasticsearch/issues/117137 +- class: org.elasticsearch.upgrades.IndexingIT + method: testTsdb {upgradedNodes=2} + issue: https://github.com/elastic/elasticsearch/issues/117138 # Examples: # From f6a8ffe1a014c76796d8d30450a77b2419f8a750 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 20 Nov 2024 22:02:27 +1100 Subject: [PATCH 087/386] Mute org.elasticsearch.upgrades.IndexingIT org.elasticsearch.upgrades.IndexingIT #117140 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 13a5807214764..1383e94111b15 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -317,6 +317,8 @@ tests: - class: org.elasticsearch.upgrades.IndexingIT method: testTsdb {upgradedNodes=2} issue: https://github.com/elastic/elasticsearch/issues/117138 +- class: org.elasticsearch.upgrades.IndexingIT + issue: https://github.com/elastic/elasticsearch/issues/117140 # Examples: # From b89d578bc05e4a908fdc6e82c2a1cd4f0352454f Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 20 Nov 2024 12:03:54 +0100 Subject: [PATCH 088/386] Delete redundant ListenerActionIT (#117079) The listener pool is long gone, this test is irrelevant now. --- .../action/ListenerActionIT.java | 52 ------------------- 1 file changed, 52 deletions(-) delete mode 100644 server/src/internalClusterTest/java/org/elasticsearch/action/ListenerActionIT.java diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/ListenerActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/ListenerActionIT.java deleted file mode 100644 index 8b5e014b519c8..0000000000000 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/ListenerActionIT.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.action; - -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.client.internal.Requests; -import org.elasticsearch.test.ESIntegTestCase; - -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicReference; - -public class ListenerActionIT extends ESIntegTestCase { - public void testThreadedListeners() throws Throwable { - final CountDownLatch latch = new CountDownLatch(1); - final AtomicReference failure = new AtomicReference<>(); - final AtomicReference threadName = new AtomicReference<>(); - Client client = client(); - - IndexRequest request = new IndexRequest("test").id("1"); - if (randomBoolean()) { - // set the source, without it, we will have a verification failure - request.source(Requests.INDEX_CONTENT_TYPE, "field1", "value1"); - } - - client.index(request, new ActionListener() { - @Override - public void onResponse(DocWriteResponse indexResponse) { - threadName.set(Thread.currentThread().getName()); - latch.countDown(); - } - - @Override - public void onFailure(Exception e) { - threadName.set(Thread.currentThread().getName()); - failure.set(e); - latch.countDown(); - } - }); - - latch.await(); - - assertFalse(threadName.get().contains("listener")); - } -} From 9854fdc21572d47cb8feeda87894391ebaa2bb7d Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Wed, 20 Nov 2024 12:10:45 +0000 Subject: [PATCH 089/386] Add back gte version conditions on full restart tests (#117129) Follow on from #116929. It is valid to do a full restart from any v8 cluster into a v9 cluster, so we need to maintain test conditions for those versions. Rather than historical versions, we can now use version features instead. --- muted-tests.yml | 30 ------------------- .../FullClusterRestartDownsampleIT.java | 1 + .../upgrades/FullClusterRestartIT.java | 2 ++ 3 files changed, 3 insertions(+), 30 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 1383e94111b15..463715c5afd68 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -245,36 +245,6 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=snapshot/10_basic/Failed to snapshot indices with synthetic source} issue: https://github.com/elastic/elasticsearch/issues/117082 -- class: org.elasticsearch.upgrades.FullClusterRestartDownsampleIT - method: testRollupIndex {cluster=OLD} - issue: https://github.com/elastic/elasticsearch/issues/117084 -- class: org.elasticsearch.upgrades.FullClusterRestartDownsampleIT - method: testRollupIndex {cluster=UPGRADED} - issue: https://github.com/elastic/elasticsearch/issues/117086 -- class: org.elasticsearch.upgrades.FullClusterRestartIT - method: testNewReplicasTimeSeriesMode {cluster=OLD} - issue: https://github.com/elastic/elasticsearch/issues/117087 -- class: org.elasticsearch.upgrades.FullClusterRestartIT - method: testNewReplicasTimeSeriesMode {cluster=UPGRADED} - issue: https://github.com/elastic/elasticsearch/issues/117088 -- class: org.elasticsearch.upgrades.FullClusterRestartIT - method: testSearchTimeSeriesMode {cluster=OLD} - issue: https://github.com/elastic/elasticsearch/issues/117089 -- class: org.elasticsearch.upgrades.FullClusterRestartIT - method: testSearchTimeSeriesMode {cluster=UPGRADED} - issue: https://github.com/elastic/elasticsearch/issues/117090 -- class: org.elasticsearch.xpack.restart.CoreFullClusterRestartIT - method: testNewReplicasTimeSeriesMode {cluster=OLD} - issue: https://github.com/elastic/elasticsearch/issues/117091 -- class: org.elasticsearch.xpack.restart.CoreFullClusterRestartIT - method: testSearchTimeSeriesMode {cluster=OLD} - issue: https://github.com/elastic/elasticsearch/issues/117092 -- class: org.elasticsearch.xpack.restart.CoreFullClusterRestartIT - method: testNewReplicasTimeSeriesMode {cluster=UPGRADED} - issue: https://github.com/elastic/elasticsearch/issues/117093 -- class: org.elasticsearch.xpack.restart.CoreFullClusterRestartIT - method: testSearchTimeSeriesMode {cluster=UPGRADED} - issue: https://github.com/elastic/elasticsearch/issues/117094 - class: org.elasticsearch.discovery.ClusterDisruptionIT method: testAckedIndexing issue: https://github.com/elastic/elasticsearch/issues/117024 diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java index 6682d48c1796c..d98d53baf9015 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java @@ -268,6 +268,7 @@ private String getRollupIndexName() throws IOException { } public void testRollupIndex() throws Exception { + assumeTrue("Downsample got many stability improvements in 8.10.0", oldClusterHasFeature("gte_v8.10.0")); if (isRunningAgainstOldCluster()) { createIlmPolicy(); createIndex(); diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 83bf16a0cc24a..0f41712abe927 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -261,6 +261,7 @@ public void testNewReplicas() throws Exception { } public void testSearchTimeSeriesMode() throws Exception { + assumeTrue("indexing time series indices changed in 8.2.0", oldClusterHasFeature("gte_v8.2.0")); int numDocs; if (isRunningAgainstOldCluster()) { numDocs = createTimeSeriesModeIndex(1); @@ -298,6 +299,7 @@ public void testSearchTimeSeriesMode() throws Exception { } public void testNewReplicasTimeSeriesMode() throws Exception { + assumeTrue("indexing time series indices changed in 8.2.0", oldClusterHasFeature("gte_v8.2.0")); if (isRunningAgainstOldCluster()) { createTimeSeriesModeIndex(0); } else { From ac06a84e0a49a653f0800fdf19f63234b9393865 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 20 Nov 2024 13:38:26 +0100 Subject: [PATCH 090/386] Revert "Deprecate _source.mode in mappings (#116689)" (#117150) This reverts commit 0d7b90e22a0000eaea291f14aa4a62d4c18ffe05, because of bwc testing failures. --- .../compat/RestCompatTestTransformTask.java | 2 +- docs/changelog/116689.yaml | 10 -- .../LogsIndexModeFullClusterRestartIT.java | 16 +++- .../LogsIndexModeRollingUpgradeIT.java | 20 +++- rest-api-spec/build.gradle | 6 -- .../rest-api-spec/test/logsdb/10_settings.yml | 5 + .../test/logsdb/20_source_mapping.yml | 15 +-- .../rest-api-spec/test/tsdb/20_mapping.yml | 11 +++ .../index/IndexSettingProvider.java | 3 +- .../elasticsearch/index/IndexVersions.java | 1 - .../index/mapper/SourceFieldMapper.java | 92 +++++++------------ .../elasticsearch/node/NodeConstruction.java | 2 +- .../mapper/DynamicFieldsBuilderTests.java | 2 +- .../index/mapper/SourceFieldMapperTests.java | 4 +- .../query/SearchExecutionContextTests.java | 2 +- .../test/rest/ESRestTestCase.java | 35 +------ .../test/rest/yaml/section/DoSection.java | 3 - .../xpack/ccr/FollowIndexIT.java | 8 +- .../esql/qa/rest/FieldExtractorTestCase.java | 7 +- .../xpack/logsdb/LogsDBPlugin.java | 9 +- .../SyntheticSourceIndexSettingsProvider.java | 8 +- ...heticSourceIndexSettingsProviderTests.java | 6 +- .../test/40_source_mode_setting.yml | 29 +++++- 23 files changed, 134 insertions(+), 162 deletions(-) delete mode 100644 docs/changelog/116689.yaml diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java index ba242a8e23861..ef93dafa913cd 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java @@ -137,7 +137,7 @@ public void skipTest(String fullTestName, String reason) { // However, the folder can be arbitrarily nest so, a == a1/a2/a3, and the test name can include forward slashes, so c == c1/c2/c3 // So we also need to support a1/a2/a3/b/c1/c2/c3 - String[] testParts = fullTestName.split("/", 3); + String[] testParts = fullTestName.split("/"); if (testParts.length < 3) { throw new IllegalArgumentException( "To skip tests, all 3 parts [folder/file/test name] must be defined. found [" + fullTestName + "]" diff --git a/docs/changelog/116689.yaml b/docs/changelog/116689.yaml deleted file mode 100644 index 0b1d1646868aa..0000000000000 --- a/docs/changelog/116689.yaml +++ /dev/null @@ -1,10 +0,0 @@ -pr: 116689 -summary: Deprecate `_source.mode` in mappings -area: Mapping -type: deprecation -issues: [] -deprecation: - title: Deprecate `_source.mode` in mappings - area: Mapping - details: Configuring `_source.mode` in mappings is deprecated and will be removed in future versions. Use `index.mapping.source.mode` index setting instead. - impact: Use `index.mapping.source.mode` index setting instead diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java index 9866d94dccc3c..3459a29e98649 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; +import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.hamcrest.Matcher; @@ -29,6 +30,9 @@ import java.util.Map; import java.util.function.Supplier; +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; + public class LogsIndexModeFullClusterRestartIT extends ParameterizedFullClusterRestartTestCase { @ClassRule @@ -165,16 +169,22 @@ public void testLogsIndexing() throws IOException { assertOK(bulkIndexResponse); assertThat(entityAsMap(bulkIndexResponse).get("errors"), Matchers.is(false)); - assertIndexSettings(0, Matchers.nullValue()); - assertIndexSettings(1, Matchers.equalTo("logsdb")); + assertIndexMappingsAndSettings(0, Matchers.nullValue(), matchesMap().extraOk()); + assertIndexMappingsAndSettings( + 1, + Matchers.equalTo("logsdb"), + matchesMap().extraOk().entry("_source", Map.of("mode", "synthetic")) + ); } } - private void assertIndexSettings(int backingIndex, final Matcher indexModeMatcher) throws IOException { + private void assertIndexMappingsAndSettings(int backingIndex, final Matcher indexModeMatcher, final MapMatcher mappingsMatcher) + throws IOException { assertThat( getSettings(client(), getWriteBackingIndex(client(), "logs-apache-production", backingIndex)).get("index.mode"), indexModeMatcher ); + assertMap(getIndexMappingAsMap(getWriteBackingIndex(client(), "logs-apache-production", backingIndex)), mappingsMatcher); } private static Request createDataStream(final String dataStreamName) { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java index 1eb7cbd3f70c2..8c369ebc9950d 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; +import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.hamcrest.Matcher; @@ -29,6 +30,9 @@ import java.util.Map; import java.util.function.Supplier; +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; + public class LogsIndexModeRollingUpgradeIT extends AbstractRollingUpgradeTestCase { @ClassRule() @@ -156,10 +160,14 @@ public void testLogsIndexing() throws IOException { assertOK(bulkIndexResponse); assertThat(entityAsMap(bulkIndexResponse).get("errors"), Matchers.is(false)); - assertIndexSettings(0, Matchers.nullValue()); - assertIndexSettings(1, Matchers.nullValue()); - assertIndexSettings(2, Matchers.nullValue()); - assertIndexSettings(3, Matchers.equalTo("logsdb")); + assertIndexMappingsAndSettings(0, Matchers.nullValue(), matchesMap().extraOk()); + assertIndexMappingsAndSettings(1, Matchers.nullValue(), matchesMap().extraOk()); + assertIndexMappingsAndSettings(2, Matchers.nullValue(), matchesMap().extraOk()); + assertIndexMappingsAndSettings( + 3, + Matchers.equalTo("logsdb"), + matchesMap().extraOk().entry("_source", Map.of("mode", "synthetic")) + ); } } @@ -175,11 +183,13 @@ static void enableLogsdbByDefault() throws IOException { assertOK(client().performRequest(request)); } - private void assertIndexSettings(int backingIndex, final Matcher indexModeMatcher) throws IOException { + private void assertIndexMappingsAndSettings(int backingIndex, final Matcher indexModeMatcher, final MapMatcher mappingsMatcher) + throws IOException { assertThat( getSettings(client(), getWriteBackingIndex(client(), "logs-apache-production", backingIndex)).get("index.mode"), indexModeMatcher ); + assertMap(getIndexMappingAsMap(getWriteBackingIndex(client(), "logs-apache-production", backingIndex)), mappingsMatcher); } private static Request createDataStream(final String dataStreamName) { diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 650d17e41de7f..439960228cef6 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -60,10 +60,4 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.skipTest("cat.aliases/10_basic/Deprecated local parameter", "CAT APIs not covered by compatibility policy") task.skipTest("cat.shards/10_basic/Help", "sync_id is removed in 9.0") task.skipTest("search/500_date_range/from, to, include_lower, include_upper deprecated", "deprecated parameters are removed in 9.0") - task.skipTest("tsdb/20_mapping/stored source is supported", "no longer serialize source_mode") - task.skipTest("tsdb/20_mapping/Synthetic source", "no longer serialize source_mode") - task.skipTest("logsdb/10_settings/create logs index", "no longer serialize source_mode") - task.skipTest("logsdb/20_source_mapping/stored _source mode is supported", "no longer serialize source_mode") - task.skipTest("logsdb/20_source_mapping/include/exclude is supported with stored _source", "no longer serialize source_mode") - task.skipTest("logsdb/20_source_mapping/synthetic _source is default", "no longer serialize source_mode") }) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml index 463df7d2ab1bb..d0f89b1b8b6cb 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml @@ -76,6 +76,11 @@ create logs index: - is_true: test - match: { test.settings.index.mode: "logsdb" } + - do: + indices.get_mapping: + index: test + - match: { test.mappings._source.mode: synthetic } + --- using default timestamp field mapping: - requires: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml index 06a007b8aaca5..27146557bb1be 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml @@ -13,10 +13,10 @@ synthetic _source is default: index: mode: logsdb - do: - indices.get_settings: + indices.get: index: test-default-source - - match: { test-default-source.settings.index.mode: logsdb } - - match: { test-default-source.settings.index.mapping.source.mode: null } + + - match: { test-default-source.mappings._source.mode: "synthetic" } --- stored _source mode is supported: @@ -28,12 +28,11 @@ stored _source mode is supported: index: mode: logsdb mapping.source.mode: stored - - do: - indices.get_settings: + indices.get: index: test-stored-source - - match: { test-stored-source.settings.index.mode: logsdb } - - match: { test-stored-source.settings.index.mapping.source.mode: stored } + + - match: { test-stored-source.mappings._source.mode: "stored" } --- disabled _source is not supported: @@ -111,6 +110,7 @@ include/exclude is supported with stored _source: indices.get: index: test-includes + - match: { test-includes.mappings._source.mode: "stored" } - match: { test-includes.mappings._source.includes: ["a"] } - do: @@ -129,4 +129,5 @@ include/exclude is supported with stored _source: indices.get: index: test-excludes + - match: { test-excludes.mappings._source.mode: "stored" } - match: { test-excludes.mappings._source.excludes: ["b"] } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml index 9fe3f5e0b7272..4d8f03a6e5e18 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml @@ -450,6 +450,11 @@ nested fields: type: long time_series_metric: gauge + - do: + indices.get_mapping: {} + + - match: {tsdb-synthetic.mappings._source.mode: synthetic} + --- stored source is supported: - requires: @@ -481,6 +486,12 @@ stored source is supported: type: keyword time_series_dimension: true + - do: + indices.get: + index: tsdb_index + + - match: { tsdb_index.mappings._source.mode: "stored" } + --- disabled source is not supported: - requires: diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java index 8c997a9766baa..6a553d5dc5440 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java @@ -11,7 +11,6 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedFunction; @@ -55,7 +54,7 @@ Settings getAdditionalIndexSettings( /** * Infrastructure class that holds services that can be used by {@link IndexSettingProvider} instances. */ - record Parameters(ClusterService clusterService, CheckedFunction mapperServiceFactory) { + record Parameters(CheckedFunction mapperServiceFactory) { } diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 7a5f469a57fa1..5746bea12a2d8 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -134,7 +134,6 @@ private static Version parseUnchecked(String version) { public static final IndexVersion UPGRADE_TO_LUCENE_10_0_0 = def(9_000_00_0, Version.LUCENE_10_0_0); public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT = def(9_001_00_0, Version.LUCENE_10_0_0); public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID = def(9_002_00_0, Version.LUCENE_10_0_0); - public static final IndexVersion DEPRECATE_SOURCE_MODE_MAPPER = def(9_003_00_0, Version.LUCENE_10_0_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index e5b12f748543f..dd25cd6eb80a3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; @@ -39,7 +38,6 @@ import java.util.Collections; import java.util.List; import java.util.Locale; -import java.util.Map; public class SourceFieldMapper extends MetadataFieldMapper { public static final NodeFeature SYNTHETIC_SOURCE_FALLBACK = new NodeFeature("mapper.source.synthetic_source_fallback"); @@ -70,9 +68,6 @@ public class SourceFieldMapper extends MetadataFieldMapper { return indexMode.defaultSourceMode().name(); }, "index.mapping.source.mode", value -> {}, Setting.Property.Final, Setting.Property.IndexScope); - public static final String DEPRECATION_WARNING = "Configuring source mode in mappings is deprecated and will be removed " - + "in future versions. Use [index.mapping.source.mode] index setting instead."; - /** The source mode */ public enum Mode { DISABLED, @@ -84,32 +79,28 @@ public enum Mode { null, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - false + Strings.EMPTY_ARRAY ); private static final SourceFieldMapper STORED = new SourceFieldMapper( Mode.STORED, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - false + Strings.EMPTY_ARRAY ); private static final SourceFieldMapper SYNTHETIC = new SourceFieldMapper( Mode.SYNTHETIC, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - false + Strings.EMPTY_ARRAY ); private static final SourceFieldMapper DISABLED = new SourceFieldMapper( Mode.DISABLED, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - false + Strings.EMPTY_ARRAY ); public static class Defaults { @@ -143,7 +134,16 @@ public static class Builder extends MetadataFieldMapper.Builder { * The default mode for TimeSeries is left empty on purpose, so that mapping printings include the synthetic * source mode. */ - private final Parameter mode; + private final Parameter mode = new Parameter<>( + "mode", + true, + () -> null, + (n, c, o) -> Mode.valueOf(o.toString().toUpperCase(Locale.ROOT)), + m -> toType(m).enabled.explicit() ? null : toType(m).mode, + (b, n, v) -> b.field(n, v.toString().toLowerCase(Locale.ROOT)), + v -> v.toString().toLowerCase(Locale.ROOT) + ).setMergeValidator((previous, current, conflicts) -> (previous == current) || current != Mode.STORED) + .setSerializerCheck((includeDefaults, isConfigured, value) -> value != null); // don't emit if `enabled` is configured private final Parameter> includes = Parameter.stringArrayParam( "includes", false, @@ -158,28 +158,15 @@ public static class Builder extends MetadataFieldMapper.Builder { private final Settings settings; private final IndexMode indexMode; - private boolean serializeMode; private final boolean supportsNonDefaultParameterValues; - public Builder(IndexMode indexMode, final Settings settings, boolean supportsCheckForNonDefaultParams, boolean serializeMode) { + public Builder(IndexMode indexMode, final Settings settings, boolean supportsCheckForNonDefaultParams) { super(Defaults.NAME); this.settings = settings; this.indexMode = indexMode; this.supportsNonDefaultParameterValues = supportsCheckForNonDefaultParams == false || settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); - this.serializeMode = serializeMode; - this.mode = new Parameter<>( - "mode", - true, - () -> null, - (n, c, o) -> Mode.valueOf(o.toString().toUpperCase(Locale.ROOT)), - m -> toType(m).enabled.explicit() ? null : toType(m).mode, - (b, n, v) -> b.field(n, v.toString().toLowerCase(Locale.ROOT)), - v -> v.toString().toLowerCase(Locale.ROOT) - ).setMergeValidator((previous, current, conflicts) -> (previous == current) || current != Mode.STORED) - // don't emit if `enabled` is configured - .setSerializerCheck((includeDefaults, isConfigured, value) -> serializeMode && value != null); } public Builder setSynthetic() { @@ -232,22 +219,21 @@ public SourceFieldMapper build() { if (sourceMode == Mode.SYNTHETIC && (includes.getValue().isEmpty() == false || excludes.getValue().isEmpty() == false)) { throw new IllegalArgumentException("filtering the stored _source is incompatible with synthetic source"); } - if (mode.isConfigured()) { - serializeMode = true; - } - final SourceFieldMapper sourceFieldMapper; - if (isDefault() && sourceMode == null) { + + SourceFieldMapper sourceFieldMapper; + if (isDefault()) { // Needed for bwc so that "mode" is not serialized in case of a standard index with stored source. - sourceFieldMapper = DEFAULT; - } else if (isDefault() && serializeMode == false && sourceMode != null) { - sourceFieldMapper = resolveStaticInstance(sourceMode); + if (sourceMode == null) { + sourceFieldMapper = DEFAULT; + } else { + sourceFieldMapper = resolveStaticInstance(sourceMode); + } } else { sourceFieldMapper = new SourceFieldMapper( sourceMode, enabled.get(), includes.getValue().toArray(Strings.EMPTY_ARRAY), - excludes.getValue().toArray(Strings.EMPTY_ARRAY), - serializeMode + excludes.getValue().toArray(Strings.EMPTY_ARRAY) ); } if (indexMode != null) { @@ -297,29 +283,15 @@ private static SourceFieldMapper resolveStaticInstance(final Mode sourceMode) { if (indexMode == IndexMode.STANDARD && settingSourceMode == Mode.STORED) { return DEFAULT; } - if (c.indexVersionCreated().onOrAfter(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER)) { - return resolveStaticInstance(settingSourceMode); - } else { - return new SourceFieldMapper(settingSourceMode, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, true); - } + + return resolveStaticInstance(settingSourceMode); }, c -> new Builder( c.getIndexSettings().getMode(), c.getSettings(), - c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), - c.indexVersionCreated().before(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER) + c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK) ) - ) { - @Override - public MetadataFieldMapper.Builder parse(String name, Map node, MappingParserContext parserContext) - throws MapperParsingException { - assert name.equals(SourceFieldMapper.NAME) : name; - if (parserContext.indexVersionCreated().after(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER) && node.containsKey("mode")) { - deprecationLogger.critical(DeprecationCategory.MAPPINGS, "mapping_source_mode", SourceFieldMapper.DEPRECATION_WARNING); - } - return super.parse(name, node, parserContext); - } - }; + ); static final class SourceFieldType extends MappedFieldType { private final boolean enabled; @@ -358,9 +330,8 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { } } - // nullable for bwc reasons - TODO: fold this into serializeMode + // nullable for bwc reasons private final @Nullable Mode mode; - private final boolean serializeMode; private final Explicit enabled; /** indicates whether the source will always exist and be complete, for use by features like the update API */ @@ -370,7 +341,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { private final String[] excludes; private final SourceFilter sourceFilter; - private SourceFieldMapper(Mode mode, Explicit enabled, String[] includes, String[] excludes, boolean serializeMode) { + private SourceFieldMapper(Mode mode, Explicit enabled, String[] includes, String[] excludes) { super(new SourceFieldType((enabled.explicit() && enabled.value()) || (enabled.explicit() == false && mode != Mode.DISABLED))); this.mode = mode; this.enabled = enabled; @@ -378,7 +349,6 @@ private SourceFieldMapper(Mode mode, Explicit enabled, String[] include this.includes = includes; this.excludes = excludes; this.complete = stored() && sourceFilter == null; - this.serializeMode = serializeMode; } private static SourceFilter buildSourceFilter(String[] includes, String[] excludes) { @@ -449,7 +419,7 @@ protected String contentType() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(null, Settings.EMPTY, false, serializeMode).init(this); + return new Builder(null, Settings.EMPTY, false).init(this); } /** diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index caf65c05cf27d..842fa773ce330 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -820,7 +820,7 @@ private void construct( .searchOperationListeners(searchOperationListeners) .build(); - final var parameters = new IndexSettingProvider.Parameters(clusterService, indicesService::createIndexMapperServiceForValidation); + final var parameters = new IndexSettingProvider.Parameters(indicesService::createIndexMapperServiceForValidation); IndexSettingProviders indexSettingProviders = new IndexSettingProviders( Sets.union( builtinIndexSettingProviders(), diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java index d4d0e67ff4141..399740e6200e6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java @@ -69,7 +69,7 @@ public void testCreateDynamicStringFieldAsKeywordForDimension() throws IOExcepti XContentParser parser = createParser(JsonXContent.jsonXContent, source); SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON); - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, false).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Optional.empty()).add( new PassThroughObjectMapper.Builder("labels").setPriority(0).setContainsDimensions().dynamic(ObjectMapper.Dynamic.TRUE) ).build(MapperBuilderContext.root(false, false)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index d7f33b9cdb3ba..df6d9380fd141 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -247,14 +247,14 @@ public void testSyntheticSourceInTimeSeries() throws IOException { }); DocumentMapper mapper = createTimeSeriesModeDocumentMapper(mapping); assertTrue(mapper.sourceMapper().isSynthetic()); - assertEquals("{\"_source\":{}}", mapper.sourceMapper().toString()); + assertEquals("{\"_source\":{\"mode\":\"synthetic\"}}", mapper.sourceMapper().toString()); } public void testSyntheticSourceWithLogsIndexMode() throws IOException { XContentBuilder mapping = fieldMapping(b -> { b.field("type", "keyword"); }); DocumentMapper mapper = createLogsModeDocumentMapper(mapping); assertTrue(mapper.sourceMapper().isSynthetic()); - assertEquals("{\"_source\":{}}", mapper.sourceMapper().toString()); + assertEquals("{\"_source\":{\"mode\":\"synthetic\"}}", mapper.sourceMapper().toString()); } public void testSupportsNonDefaultParameterValues() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index dc70c44a89128..fdc18264e2299 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -384,7 +384,7 @@ public void testSearchRequestRuntimeFieldsAndMultifieldDetection() { public void testSyntheticSourceSearchLookup() throws IOException { // Build a mapping using synthetic source - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, false).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Optional.empty()).add( new KeywordFieldMapper.Builder("cat", IndexVersion.current()).ignoreAbove(100) ).build(MapperBuilderContext.root(true, false)); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index dd08107bd67fb..c20aded9280fc 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -69,7 +69,6 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.rest.RestStatus; @@ -113,7 +112,6 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Optional; @@ -1829,9 +1827,8 @@ public static CreateIndexResponse createIndex(RestClient client, String name, Se if (settings != null && settings.getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) == false) { expectSoftDeletesWarning(request, name); - } else if (isSyntheticSourceConfiguredInMapping(mapping)) { - request.setOptions(expectVersionSpecificWarnings(v -> v.compatible(SourceFieldMapper.DEPRECATION_WARNING))); } + final Response response = client.performRequest(request); try (var parser = responseAsParser(response)) { return TestResponseParsers.parseCreateIndexResponse(parser); @@ -1875,27 +1872,6 @@ protected static void expectSoftDeletesWarning(Request request, String indexName })); } - @SuppressWarnings("unchecked") - protected static boolean isSyntheticSourceConfiguredInMapping(String mapping) { - if (mapping == null) { - return false; - } - var mappings = XContentHelper.convertToMap( - JsonXContent.jsonXContent, - mapping.trim().startsWith("{") ? mapping : '{' + mapping + '}', - false - ); - if (mappings.containsKey("_doc")) { - mappings = (Map) mappings.get("_doc"); - } - Map sourceMapper = (Map) mappings.get(SourceFieldMapper.NAME); - if (sourceMapper == null) { - return false; - } - Object mode = sourceMapper.get("mode"); - return mode != null && mode.toString().toLowerCase(Locale.ROOT).equals("synthetic"); - } - protected static Map getIndexSettings(String index) throws IOException { Request request = new Request("GET", "/" + index + "/_settings"); request.addParameter("flat_settings", "true"); @@ -2293,7 +2269,7 @@ protected static Map> getClusterStateFeatures(RestClient adm */ protected static IndexVersion minimumIndexVersion() throws IOException { final Request request = new Request("GET", "_nodes"); - request.addParameter("filter_path", "nodes.*.version,nodes.*.max_index_version,nodes.*.index_version"); + request.addParameter("filter_path", "nodes.*.version,nodes.*.max_index_version"); final Response response = adminClient().performRequest(request); final Map nodes = ObjectPath.createFromResponse(response).evaluate("nodes"); @@ -2301,13 +2277,10 @@ protected static IndexVersion minimumIndexVersion() throws IOException { IndexVersion minVersion = null; for (Map.Entry node : nodes.entrySet()) { Map nodeData = (Map) node.getValue(); - Object versionStr = nodeData.get("index_version"); - if (versionStr == null) { - versionStr = nodeData.get("max_index_version"); - } + String versionStr = (String) nodeData.get("max_index_version"); // fallback on version if index version is not there IndexVersion indexVersion = versionStr != null - ? IndexVersion.fromId(Integer.parseInt(versionStr.toString())) + ? IndexVersion.fromId(Integer.parseInt(versionStr)) : IndexVersion.fromId( parseLegacyVersion((String) nodeData.get("version")).map(Version::id).orElse(IndexVersions.MINIMUM_COMPATIBLE.id()) ); diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index 627554f6b261d..8243dcdc9de94 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.core.Tuple; import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction; import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; @@ -496,8 +495,6 @@ public void checkWarningHeaders(final List warningHeaders, String testPa } } - unexpected.removeIf(s -> s.endsWith(SourceFieldMapper.DEPRECATION_WARNING + "\"")); - if (unexpected.isEmpty() == false || unmatched.isEmpty() == false || missing.isEmpty() == false diff --git a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java index 0bb4afe51b85a..53e068ae6126e 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.RestStatus; @@ -367,10 +366,8 @@ public void testSyntheticSource() throws Exception { final String leaderIndexName = "synthetic_leader"; if ("leader".equals(targetCluster)) { logger.info("Running against leader cluster"); - Settings settings = Settings.builder() - .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) - .build(); - createIndex(adminClient(), leaderIndexName, settings, """ + createIndex(adminClient(), leaderIndexName, Settings.EMPTY, """ + "_source": {"mode": "synthetic"}, "properties": {"kwd": {"type": "keyword"}}}""", null); for (int i = 0; i < numDocs; i++) { logger.info("Indexing doc [{}]", i); @@ -395,6 +392,7 @@ public void testSyntheticSource() throws Exception { } assertBusy(() -> { verifyDocuments(client(), followIndexName, numDocs); + assertMap(getIndexMappingAsMap(followIndexName), matchesMap().extraOk().entry("_source", Map.of("mode", "synthetic"))); if (overrideNumberOfReplicas) { assertMap(getIndexSettingsAsMap(followIndexName), matchesMap().extraOk().entry("index.number_of_replicas", "0")); } else { diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java index 6f45c9d92fd12..d124fdb5755c3 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java @@ -14,7 +14,6 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.network.NetworkAddress; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.index.mapper.BlockLoader; @@ -1457,12 +1456,16 @@ private static void index(String name, String... docs) throws IOException { } private static void createIndex(String name, CheckedConsumer mapping) throws IOException { + Request request = new Request("PUT", "/" + name); XContentBuilder index = JsonXContent.contentBuilder().prettyPrint().startObject(); + index.startObject("mappings"); mapping.accept(index); index.endObject(); + index.endObject(); String configStr = Strings.toString(index); logger.info("index: {} {}", name, configStr); - ESRestTestCase.createIndex(name, Settings.EMPTY, configStr); + request.setJsonEntity(configStr); + client().performRequest(request); } /** diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java index 04d12fd51bae7..93ba126e4196f 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java @@ -67,13 +67,10 @@ public Collection getAdditionalIndexSettingProviders(Index if (DiscoveryNode.isStateless(settings)) { return List.of(logsdbIndexModeSettingsProvider); } - var syntheticSettingProvider = new SyntheticSourceIndexSettingsProvider( - licenseService, - parameters.mapperServiceFactory(), - logsdbIndexModeSettingsProvider, - () -> parameters.clusterService().state().nodes().getMinSupportedIndexVersion() + return List.of( + new SyntheticSourceIndexSettingsProvider(licenseService, parameters.mapperServiceFactory(), logsdbIndexModeSettingsProvider), + logsdbIndexModeSettingsProvider ); - return List.of(syntheticSettingProvider, logsdbIndexModeSettingsProvider); } @Override diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java index 1f38ecda19515..e87f10ec19916 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java @@ -26,7 +26,6 @@ import java.io.IOException; import java.time.Instant; import java.util.List; -import java.util.function.Supplier; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_ROUTING_PATH; @@ -40,18 +39,15 @@ final class SyntheticSourceIndexSettingsProvider implements IndexSettingProvider private final SyntheticSourceLicenseService syntheticSourceLicenseService; private final CheckedFunction mapperServiceFactory; private final LogsdbIndexModeSettingsProvider logsdbIndexModeSettingsProvider; - private final Supplier createdIndexVersion; SyntheticSourceIndexSettingsProvider( SyntheticSourceLicenseService syntheticSourceLicenseService, CheckedFunction mapperServiceFactory, - LogsdbIndexModeSettingsProvider logsdbIndexModeSettingsProvider, - Supplier createdIndexVersion + LogsdbIndexModeSettingsProvider logsdbIndexModeSettingsProvider ) { this.syntheticSourceLicenseService = syntheticSourceLicenseService; this.mapperServiceFactory = mapperServiceFactory; this.logsdbIndexModeSettingsProvider = logsdbIndexModeSettingsProvider; - this.createdIndexVersion = createdIndexVersion; } @Override @@ -152,7 +148,7 @@ private IndexMetadata buildIndexMetadataForMapperService( ); int shardReplicas = indexTemplateAndCreateRequestSettings.getAsInt(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0); var finalResolvedSettings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, createdIndexVersion.get()) + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) .put(indexTemplateAndCreateRequestSettings) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, dummyShards) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, shardReplicas) diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java index 1f5d26eaedf34..2d8723a0d8c25 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.license.MockLicenseState; @@ -55,7 +54,7 @@ public void setup() { provider = new SyntheticSourceIndexSettingsProvider(syntheticSourceLicenseService, im -> { newMapperServiceCounter.incrementAndGet(); return MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()); - }, getLogsdbIndexModeSettingsProvider(false), IndexVersion::current); + }, getLogsdbIndexModeSettingsProvider(false)); newMapperServiceCounter.set(0); } @@ -337,8 +336,7 @@ public void testGetAdditionalIndexSettingsDowngradeFromSyntheticSourceFileMatch( provider = new SyntheticSourceIndexSettingsProvider( syntheticSourceLicenseService, im -> MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()), - getLogsdbIndexModeSettingsProvider(true), - IndexVersion::current + getLogsdbIndexModeSettingsProvider(true) ); final Settings settings = Settings.EMPTY; diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml index 792df4dbf639e..33fedce3b59c1 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml @@ -470,7 +470,13 @@ create an index with time_series index mode and synthetic source: indices.get_settings: index: "test_time_series_index_mode_synthetic" - match: { test_time_series_index_mode_synthetic.settings.index.mode: time_series } - - match: { test_time_series_index_mode_synthetic.settings.index.mapping.source.mode: synthetic } + + + - do: + indices.get_mapping: + index: test_time_series_index_mode_synthetic + + - match: { test_time_series_index_mode_synthetic.mappings._source.mode: synthetic } --- create an index with logsdb index mode and synthetic source: @@ -487,7 +493,12 @@ create an index with logsdb index mode and synthetic source: indices.get_settings: index: "test_logsdb_index_mode_synthetic" - match: { test_logsdb_index_mode_synthetic.settings.index.mode: logsdb } - - match: { test_logsdb_index_mode_synthetic.settings.index.mapping.source.mode: synthetic } + + - do: + indices.get_mapping: + index: test_logsdb_index_mode_synthetic + + - match: { test_logsdb_index_mode_synthetic.mappings._source.mode: synthetic } --- create an index with time_series index mode and stored source: @@ -513,7 +524,12 @@ create an index with time_series index mode and stored source: indices.get_settings: index: "test_time_series_index_mode_undefined" - match: { test_time_series_index_mode_undefined.settings.index.mode: time_series } - - match: { test_time_series_index_mode_undefined.settings.index.mapping.source.mode: stored } + + - do: + indices.get_mapping: + index: test_time_series_index_mode_undefined + + - match: { test_time_series_index_mode_undefined.mappings._source.mode: stored } --- create an index with logsdb index mode and stored source: @@ -530,7 +546,12 @@ create an index with logsdb index mode and stored source: indices.get_settings: index: "test_logsdb_index_mode_undefined" - match: { test_logsdb_index_mode_undefined.settings.index.mode: logsdb } - - match: { test_logsdb_index_mode_undefined.settings.index.mapping.source.mode: stored } + + - do: + indices.get_mapping: + index: test_logsdb_index_mode_undefined + + - match: { test_logsdb_index_mode_undefined.mappings._source.mode: stored } --- create an index with time_series index mode and disabled source: From dcd7fb7d694251772c7b635d2227bc174f2248d3 Mon Sep 17 00:00:00 2001 From: "Joey F. Poon" Date: Wed, 20 Nov 2024 22:50:53 +0900 Subject: [PATCH 091/386] Add `.security-workflow-insights` perms for kibana_system (#116485) Adds auto_configure, read, write, and create_index permissions for the new .security-workflows-insights-* index. This index is created and used internally by Kibana. --- .../KibanaOwnedReservedRoleDescriptors.java | 16 ++++++++++++++ .../authz/store/ReservedRolesStoreTests.java | 22 +++++++++++++++++++ 2 files changed, 38 insertions(+) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 259e66f633bac..cc589b53eaa1a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -484,6 +484,22 @@ static RoleDescriptor kibanaSystem(String name) { // Endpoint heartbeat. Kibana reads from these to determine metering/billing for // endpoints. RoleDescriptor.IndicesPrivileges.builder().indices(".logs-endpoint.heartbeat-*").privileges("read", "create_index").build(), + // Security Solution workflows insights. Kibana creates, manages, and uses these + // to provide users with insights on potential configuration improvements + RoleDescriptor.IndicesPrivileges.builder() + .indices(".edr-workflow-insights-*") + .privileges( + "create_index", + "auto_configure", + "manage", + "read", + "write", + "delete", + TransportUpdateSettingsAction.TYPE.name(), + TransportPutMappingAction.TYPE.name(), + RolloverAction.NAME + ) + .build(), // For connectors telemetry. Will be removed once we switched to connectors API RoleDescriptor.IndicesPrivileges.builder().indices(".elastic-connectors*").privileges("read").build() }, null, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 9818a890d465f..17579fd6368ce 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -1106,6 +1106,28 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(RolloverAction.NAME).test(indexAbstraction), is(true)); }); + // index for Security Solution workflow insights + Arrays.asList(".edr-workflow-insights-" + randomAlphaOfLength(randomIntBetween(0, 13))).forEach((index) -> { + final IndexAbstraction indexAbstraction = mockIndexAbstraction(index); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportMultiSearchAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(READ_CROSS_CLUSTER_NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportUpdateSettingsAction.TYPE.name()).test(indexAbstraction), + is(true) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportPutMappingAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(RolloverAction.NAME).test(indexAbstraction), is(true)); + }); + // Data telemetry reads mappings, metadata and stats of indices Arrays.asList(randomAlphaOfLengthBetween(8, 24), "packetbeat-*").forEach((index) -> { logger.info("index name [{}]", index); From 0e641793cbd228d6fffac03b2d6b3367c7c99a88 Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Wed, 20 Nov 2024 09:29:47 -0500 Subject: [PATCH 092/386] [ML] Randomly generate uuids (#116662) randomAlpha is generating UTF16 strings, which return as UTF8 and fail to match. Generating random UTF8 strings can generate control characters, which the REST handler throws an error on. So we're generating UUIDs. Fix #113430 --- muted-tests.yml | 3 --- .../org/elasticsearch/xpack/inference/InferenceCrudIT.java | 4 ++-- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 463715c5afd68..cc568144698ee 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -112,9 +112,6 @@ tests: - class: org.elasticsearch.xpack.shutdown.NodeShutdownIT method: testStalledShardMigrationProperlyDetected issue: https://github.com/elastic/elasticsearch/issues/115697 -- class: org.elasticsearch.xpack.inference.InferenceCrudIT - method: testSupportedStream - issue: https://github.com/elastic/elasticsearch/issues/113430 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_start_stop/Verify start transform reuses destination index} issue: https://github.com/elastic/elasticsearch/issues/115808 diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index 591db6db8495a..78e064b42bbb2 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -432,7 +432,7 @@ public void testUnsupportedStream() throws Exception { assertEquals(TaskType.SPARSE_EMBEDDING.toString(), singleModel.get("task_type")); try { - var events = streamInferOnMockService(modelId, TaskType.SPARSE_EMBEDDING, List.of(randomAlphaOfLength(10))); + var events = streamInferOnMockService(modelId, TaskType.SPARSE_EMBEDDING, List.of(randomUUID())); assertThat(events.size(), equalTo(2)); events.forEach(event -> { switch (event.name()) { @@ -457,7 +457,7 @@ public void testSupportedStream() throws Exception { assertEquals(modelId, singleModel.get("inference_id")); assertEquals(TaskType.COMPLETION.toString(), singleModel.get("task_type")); - var input = IntStream.range(1, 2 + randomInt(8)).mapToObj(i -> randomAlphaOfLength(10)).toList(); + var input = IntStream.range(1, 2 + randomInt(8)).mapToObj(i -> randomUUID()).toList(); try { var events = streamInferOnMockService(modelId, TaskType.COMPLETION, input); From c3f73d0319b312d2e44df89da0fcb32d774e5954 Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Wed, 20 Nov 2024 09:31:01 -0500 Subject: [PATCH 093/386] Esql Enable Date Nanos (#117080) This enables date nanos support as tech preview. Basic operations, like reading values, binary comparisons, and functions that don't care about type should work, but some functions are not yet supported. Most notably, Bucket is not yet supported, although Date_Trunc is and can be used for grouping. See the docs for the full list of limitations. relates to #109352 --- docs/changelog/117080.yaml | 5 ++ docs/reference/esql/esql-limitations.asciidoc | 4 +- .../functions/kibana/definition/case.json | 42 +++++++++ .../functions/kibana/definition/coalesce.json | 18 ++++ .../functions/kibana/definition/count.json | 2 +- .../kibana/definition/count_distinct.json | 66 ++++++++++++++ .../kibana/definition/date_trunc.json | 36 ++++++++ .../functions/kibana/definition/equals.json | 18 ++++ .../kibana/definition/greater_than.json | 18 ++++ .../definition/greater_than_or_equal.json | 18 ++++ .../functions/kibana/definition/greatest.json | 18 ++++ .../functions/kibana/definition/least.json | 18 ++++ .../kibana/definition/less_than.json | 18 ++++ .../kibana/definition/less_than_or_equal.json | 18 ++++ .../functions/kibana/definition/match.json | 2 +- .../esql/functions/kibana/definition/max.json | 12 +++ .../esql/functions/kibana/definition/min.json | 12 +++ .../functions/kibana/definition/mv_count.json | 12 +++ .../kibana/definition/mv_dedupe.json | 12 +++ .../functions/kibana/definition/mv_first.json | 12 +++ .../functions/kibana/definition/mv_last.json | 12 +++ .../functions/kibana/definition/mv_max.json | 12 +++ .../functions/kibana/definition/mv_min.json | 12 +++ .../functions/kibana/definition/mv_slice.json | 24 +++++ .../functions/kibana/definition/mv_sort.json | 18 ++++ .../kibana/definition/not_equals.json | 18 ++++ .../functions/kibana/definition/qstr.json | 2 +- .../kibana/definition/to_date_nanos.json | 87 ++++++++++++++++++- .../kibana/definition/to_datetime.json | 12 +++ .../functions/kibana/definition/to_long.json | 12 +++ .../kibana/definition/to_string.json | 12 +++ .../functions/kibana/definition/values.json | 12 +++ .../esql/functions/kibana/docs/match.md | 10 +-- .../esql/functions/kibana/docs/qstr.md | 10 +-- .../esql/functions/types/case.asciidoc | 2 + .../esql/functions/types/coalesce.asciidoc | 1 + .../functions/types/count_distinct.asciidoc | 4 + .../esql/functions/types/date_trunc.asciidoc | 2 + .../esql/functions/types/equals.asciidoc | 1 + .../functions/types/greater_than.asciidoc | 1 + .../types/greater_than_or_equal.asciidoc | 1 + .../esql/functions/types/greatest.asciidoc | 1 + .../esql/functions/types/least.asciidoc | 1 + .../esql/functions/types/less_than.asciidoc | 1 + .../types/less_than_or_equal.asciidoc | 1 + .../esql/functions/types/max.asciidoc | 1 + .../esql/functions/types/min.asciidoc | 1 + .../esql/functions/types/mv_count.asciidoc | 1 + .../esql/functions/types/mv_dedupe.asciidoc | 1 + .../esql/functions/types/mv_first.asciidoc | 1 + .../esql/functions/types/mv_last.asciidoc | 1 + .../esql/functions/types/mv_max.asciidoc | 1 + .../esql/functions/types/mv_min.asciidoc | 1 + .../esql/functions/types/mv_slice.asciidoc | 1 + .../esql/functions/types/mv_sort.asciidoc | 1 + .../esql/functions/types/not_equals.asciidoc | 1 + .../functions/types/to_date_nanos.asciidoc | 8 +- .../esql/functions/types/to_datetime.asciidoc | 1 + .../esql/functions/types/to_long.asciidoc | 1 + .../esql/functions/types/to_string.asciidoc | 1 + .../esql/functions/types/values.asciidoc | 1 + .../esql/core/plugin/EsqlCorePlugin.java | 1 - .../xpack/esql/core/type/DataType.java | 1 - .../xpack/esql/action/EsqlCapabilities.java | 12 +-- .../function/EsqlFunctionRegistry.java | 2 + .../expression/function/aggregate/Max.java | 4 +- .../expression/function/aggregate/Min.java | 4 +- .../expression/function/aggregate/Values.java | 7 +- .../function/scalar/date/DateTrunc.java | 4 +- .../operator/comparison/LessThan.java | 4 +- .../function/MultiRowTestCaseSupplier.java | 36 ++++++++ .../aggregate/CountDistinctTests.java | 1 + .../function/aggregate/MaxTests.java | 9 ++ .../function/aggregate/MinTests.java | 9 ++ .../function/aggregate/ValuesTests.java | 1 + .../AbstractMultivalueFunctionTestCase.java | 4 - .../operator/comparison/LessThanTests.java | 4 +- 77 files changed, 716 insertions(+), 40 deletions(-) create mode 100644 docs/changelog/117080.yaml diff --git a/docs/changelog/117080.yaml b/docs/changelog/117080.yaml new file mode 100644 index 0000000000000..5909f966e0fa2 --- /dev/null +++ b/docs/changelog/117080.yaml @@ -0,0 +1,5 @@ +pr: 117080 +summary: Esql Enable Date Nanos (tech preview) +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/reference/esql/esql-limitations.asciidoc b/docs/reference/esql/esql-limitations.asciidoc index 1772e956bd9e2..c2849e4889f98 100644 --- a/docs/reference/esql/esql-limitations.asciidoc +++ b/docs/reference/esql/esql-limitations.asciidoc @@ -25,6 +25,9 @@ include::processing-commands/limit.asciidoc[tag=limitation] * `alias` * `boolean` * `date` +* `date_nanos` (Tech Preview) +** The following functions don't yet support date nanos: `bucket`, `date_format`, `date_parse`, `date_diff`, `date_extract` +** You can use `to_datetime` to cast to millisecond dates to use unsupported functions * `double` (`float`, `half_float`, `scaled_float` are represented as `double`) * `ip` * `keyword` family including `keyword`, `constant_keyword`, and `wildcard` @@ -50,7 +53,6 @@ include::processing-commands/limit.asciidoc[tag=limitation] ** `position` ** `aggregate_metric_double` * Date/time -** `date_nanos` ** `date_range` * Other types ** `binary` diff --git a/docs/reference/esql/functions/kibana/definition/case.json b/docs/reference/esql/functions/kibana/definition/case.json index bf498f690551c..51693d9d30660 100644 --- a/docs/reference/esql/functions/kibana/definition/case.json +++ b/docs/reference/esql/functions/kibana/definition/case.json @@ -172,6 +172,48 @@ "variadic" : true, "returnType" : "date" }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "date_nanos", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "date_nanos", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + }, + { + "name" : "elseValue", + "type" : "date_nanos", + "optional" : true, + "description" : "The value that's returned when no condition evaluates to `true`." + } + ], + "variadic" : true, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/coalesce.json b/docs/reference/esql/functions/kibana/definition/coalesce.json index 7f49195190951..c929323397c9b 100644 --- a/docs/reference/esql/functions/kibana/definition/coalesce.json +++ b/docs/reference/esql/functions/kibana/definition/coalesce.json @@ -88,6 +88,24 @@ "variadic" : true, "returnType" : "date" }, + { + "params" : [ + { + "name" : "first", + "type" : "date_nanos", + "optional" : false, + "description" : "Expression to evaluate." + }, + { + "name" : "rest", + "type" : "date_nanos", + "optional" : true, + "description" : "Other expression to evaluate." + } + ], + "variadic" : true, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/count.json b/docs/reference/esql/functions/kibana/definition/count.json index 88d4ba3d3e339..329a18c4d9d01 100644 --- a/docs/reference/esql/functions/kibana/definition/count.json +++ b/docs/reference/esql/functions/kibana/definition/count.json @@ -151,7 +151,7 @@ ], "examples" : [ "FROM employees\n| STATS COUNT(height)", - "FROM employees \n| STATS count = COUNT(*) BY languages \n| SORT languages DESC", + "FROM employees\n| STATS count = COUNT(*) BY languages\n| SORT languages DESC", "ROW words=\"foo;bar;baz;qux;quux;foo\"\n| STATS word_count = COUNT(SPLIT(words, \";\"))", "ROW n=1\n| WHERE n < 0\n| STATS COUNT(n)", "ROW n=1\n| STATS COUNT(n > 0 OR NULL), COUNT(n < 0 OR NULL)" diff --git a/docs/reference/esql/functions/kibana/definition/count_distinct.json b/docs/reference/esql/functions/kibana/definition/count_distinct.json index 3addd08df60df..54b99ee84ce2d 100644 --- a/docs/reference/esql/functions/kibana/definition/count_distinct.json +++ b/docs/reference/esql/functions/kibana/definition/count_distinct.json @@ -136,6 +136,72 @@ "variadic" : false, "returnType" : "long" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "integer", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "unsigned_long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/date_trunc.json b/docs/reference/esql/functions/kibana/definition/date_trunc.json index 871994407233b..cdda984a0ce7e 100644 --- a/docs/reference/esql/functions/kibana/definition/date_trunc.json +++ b/docs/reference/esql/functions/kibana/definition/date_trunc.json @@ -22,6 +22,24 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "interval", + "type" : "date_period", + "optional" : false, + "description" : "Interval; expressed using the timespan literal syntax." + }, + { + "name" : "date", + "type" : "date_nanos", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { @@ -39,6 +57,24 @@ ], "variadic" : false, "returnType" : "date" + }, + { + "params" : [ + { + "name" : "interval", + "type" : "time_duration", + "optional" : false, + "description" : "Interval; expressed using the timespan literal syntax." + }, + { + "name" : "date", + "type" : "date_nanos", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "date_nanos" } ], "examples" : [ diff --git a/docs/reference/esql/functions/kibana/definition/equals.json b/docs/reference/esql/functions/kibana/definition/equals.json index 59df59eaccc4e..885d949f4b20f 100644 --- a/docs/reference/esql/functions/kibana/definition/equals.json +++ b/docs/reference/esql/functions/kibana/definition/equals.json @@ -77,6 +77,24 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/greater_than.json b/docs/reference/esql/functions/kibana/definition/greater_than.json index 7354112551e2c..cf6e30a0a4547 100644 --- a/docs/reference/esql/functions/kibana/definition/greater_than.json +++ b/docs/reference/esql/functions/kibana/definition/greater_than.json @@ -23,6 +23,24 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/greater_than_or_equal.json b/docs/reference/esql/functions/kibana/definition/greater_than_or_equal.json index 832eed417ef4a..2535c68af6acf 100644 --- a/docs/reference/esql/functions/kibana/definition/greater_than_or_equal.json +++ b/docs/reference/esql/functions/kibana/definition/greater_than_or_equal.json @@ -23,6 +23,24 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/greatest.json b/docs/reference/esql/functions/kibana/definition/greatest.json index eebb4fad1eb1d..077100317dfca 100644 --- a/docs/reference/esql/functions/kibana/definition/greatest.json +++ b/docs/reference/esql/functions/kibana/definition/greatest.json @@ -53,6 +53,24 @@ "variadic" : true, "returnType" : "date" }, + { + "params" : [ + { + "name" : "first", + "type" : "date_nanos", + "optional" : false, + "description" : "First of the columns to evaluate." + }, + { + "name" : "rest", + "type" : "date_nanos", + "optional" : true, + "description" : "The rest of the columns to evaluate." + } + ], + "variadic" : true, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/least.json b/docs/reference/esql/functions/kibana/definition/least.json index 02fa58f92eaef..18ec65c60f475 100644 --- a/docs/reference/esql/functions/kibana/definition/least.json +++ b/docs/reference/esql/functions/kibana/definition/least.json @@ -52,6 +52,24 @@ "variadic" : true, "returnType" : "date" }, + { + "params" : [ + { + "name" : "first", + "type" : "date_nanos", + "optional" : false, + "description" : "First of the columns to evaluate." + }, + { + "name" : "rest", + "type" : "date_nanos", + "optional" : true, + "description" : "The rest of the columns to evaluate." + } + ], + "variadic" : true, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/less_than.json b/docs/reference/esql/functions/kibana/definition/less_than.json index 66578d73b8e9c..a73754d200d46 100644 --- a/docs/reference/esql/functions/kibana/definition/less_than.json +++ b/docs/reference/esql/functions/kibana/definition/less_than.json @@ -23,6 +23,24 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/less_than_or_equal.json b/docs/reference/esql/functions/kibana/definition/less_than_or_equal.json index 5ffd4567cdb07..7af477db32a34 100644 --- a/docs/reference/esql/functions/kibana/definition/less_than_or_equal.json +++ b/docs/reference/esql/functions/kibana/definition/less_than_or_equal.json @@ -23,6 +23,24 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/match.json b/docs/reference/esql/functions/kibana/definition/match.json index 8a355360a790f..4a5b05a3f501b 100644 --- a/docs/reference/esql/functions/kibana/definition/match.json +++ b/docs/reference/esql/functions/kibana/definition/match.json @@ -78,7 +78,7 @@ } ], "examples" : [ - "from books \n| where match(author, \"Faulkner\")\n| keep book_no, author \n| sort book_no \n| limit 5;" + "FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;" ], "preview" : true, "snapshot_only" : false diff --git a/docs/reference/esql/functions/kibana/definition/max.json b/docs/reference/esql/functions/kibana/definition/max.json index 45fd26571b091..7f3d2215ee099 100644 --- a/docs/reference/esql/functions/kibana/definition/max.json +++ b/docs/reference/esql/functions/kibana/definition/max.json @@ -28,6 +28,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/min.json b/docs/reference/esql/functions/kibana/definition/min.json index ae71fba049dbe..74e3fd8208f1b 100644 --- a/docs/reference/esql/functions/kibana/definition/min.json +++ b/docs/reference/esql/functions/kibana/definition/min.json @@ -28,6 +28,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_count.json b/docs/reference/esql/functions/kibana/definition/mv_count.json index 4767b35ec7cac..90ace2525f710 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_count.json +++ b/docs/reference/esql/functions/kibana/definition/mv_count.json @@ -52,6 +52,18 @@ "variadic" : false, "returnType" : "integer" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "integer" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_dedupe.json b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json index bfca58bc3e140..ce2c96dbc1757 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_dedupe.json +++ b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json @@ -53,6 +53,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_first.json b/docs/reference/esql/functions/kibana/definition/mv_first.json index a2b6358023e4b..552f568c9b171 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_first.json +++ b/docs/reference/esql/functions/kibana/definition/mv_first.json @@ -52,6 +52,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_last.json b/docs/reference/esql/functions/kibana/definition/mv_last.json index b6dc268af5305..78d7b348a6042 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_last.json +++ b/docs/reference/esql/functions/kibana/definition/mv_last.json @@ -52,6 +52,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_max.json b/docs/reference/esql/functions/kibana/definition/mv_max.json index 27d2b010dc02c..a1e55c58cff70 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_max.json +++ b/docs/reference/esql/functions/kibana/definition/mv_max.json @@ -28,6 +28,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_min.json b/docs/reference/esql/functions/kibana/definition/mv_min.json index 410e97335687f..7998ca4eda94e 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_min.json +++ b/docs/reference/esql/functions/kibana/definition/mv_min.json @@ -28,6 +28,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_slice.json b/docs/reference/esql/functions/kibana/definition/mv_slice.json index dbbfe0ffb5a78..df4d48145fac6 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_slice.json +++ b/docs/reference/esql/functions/kibana/definition/mv_slice.json @@ -100,6 +100,30 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression. If `null`, the function returns `null`." + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "Start position. If `null`, the function returns `null`. The start argument can be negative. An index of -1 is used to specify the last value in the list." + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "End position(included). Optional; if omitted, the position at `start` is returned. The end argument can be negative. An index of -1 is used to specify the last value in the list." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_sort.json b/docs/reference/esql/functions/kibana/definition/mv_sort.json index 4cb255fb0afcb..072c05743af33 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_sort.json +++ b/docs/reference/esql/functions/kibana/definition/mv_sort.json @@ -40,6 +40,24 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression. If `null`, the function returns `null`." + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "Sort order. The valid options are ASC and DESC, the default is ASC." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/not_equals.json b/docs/reference/esql/functions/kibana/definition/not_equals.json index 69389d4c8d077..24f31115cbc37 100644 --- a/docs/reference/esql/functions/kibana/definition/not_equals.json +++ b/docs/reference/esql/functions/kibana/definition/not_equals.json @@ -77,6 +77,24 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/qstr.json b/docs/reference/esql/functions/kibana/definition/qstr.json index 9823c3cff8923..76473349a3414 100644 --- a/docs/reference/esql/functions/kibana/definition/qstr.json +++ b/docs/reference/esql/functions/kibana/definition/qstr.json @@ -30,7 +30,7 @@ } ], "examples" : [ - "from books \n| where qstr(\"author: Faulkner\")\n| keep book_no, author \n| sort book_no \n| limit 5;" + "FROM books \n| WHERE QSTR(\"author: Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;" ], "preview" : true, "snapshot_only" : false diff --git a/docs/reference/esql/functions/kibana/definition/to_date_nanos.json b/docs/reference/esql/functions/kibana/definition/to_date_nanos.json index 07ffe84444f02..d9409bceb8e6f 100644 --- a/docs/reference/esql/functions/kibana/definition/to_date_nanos.json +++ b/docs/reference/esql/functions/kibana/definition/to_date_nanos.json @@ -4,7 +4,92 @@ "name" : "to_date_nanos", "description" : "Converts an input to a nanosecond-resolution date value (aka date_nanos).", "note" : "The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z. Additionally, integers cannot be converted into date nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch.", - "signatures" : [ ], + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "date", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + } + ], "preview" : true, "snapshot_only" : false } diff --git a/docs/reference/esql/functions/kibana/definition/to_datetime.json b/docs/reference/esql/functions/kibana/definition/to_datetime.json index 072aa66aac669..8f9ecbd139d32 100644 --- a/docs/reference/esql/functions/kibana/definition/to_datetime.json +++ b/docs/reference/esql/functions/kibana/definition/to_datetime.json @@ -17,6 +17,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/to_long.json b/docs/reference/esql/functions/kibana/definition/to_long.json index afd6de001bbc6..eb1ce7220c3f9 100644 --- a/docs/reference/esql/functions/kibana/definition/to_long.json +++ b/docs/reference/esql/functions/kibana/definition/to_long.json @@ -52,6 +52,18 @@ "variadic" : false, "returnType" : "long" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "long" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/to_string.json b/docs/reference/esql/functions/kibana/definition/to_string.json index 33e95d5bed1c2..1c86e81b31136 100644 --- a/docs/reference/esql/functions/kibana/definition/to_string.json +++ b/docs/reference/esql/functions/kibana/definition/to_string.json @@ -52,6 +52,18 @@ "variadic" : false, "returnType" : "keyword" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/values.json b/docs/reference/esql/functions/kibana/definition/values.json index ae69febd4f755..95ac402bb242a 100644 --- a/docs/reference/esql/functions/kibana/definition/values.json +++ b/docs/reference/esql/functions/kibana/definition/values.json @@ -28,6 +28,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/docs/match.md b/docs/reference/esql/functions/kibana/docs/match.md index 3c06662982bbf..b866637b41b85 100644 --- a/docs/reference/esql/functions/kibana/docs/match.md +++ b/docs/reference/esql/functions/kibana/docs/match.md @@ -6,9 +6,9 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ Performs a match query on the specified field. Returns true if the provided query matches the row. ``` -from books -| where match(author, "Faulkner") -| keep book_no, author -| sort book_no -| limit 5; +FROM books +| WHERE MATCH(author, "Faulkner") +| KEEP book_no, author +| SORT book_no +| LIMIT 5; ``` diff --git a/docs/reference/esql/functions/kibana/docs/qstr.md b/docs/reference/esql/functions/kibana/docs/qstr.md index 37b5777623185..9b5dc3f9a22eb 100644 --- a/docs/reference/esql/functions/kibana/docs/qstr.md +++ b/docs/reference/esql/functions/kibana/docs/qstr.md @@ -6,9 +6,9 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ Performs a query string query. Returns true if the provided query string matches the row. ``` -from books -| where qstr("author: Faulkner") -| keep book_no, author -| sort book_no -| limit 5; +FROM books +| WHERE QSTR("author: Faulkner") +| KEEP book_no, author +| SORT book_no +| LIMIT 5; ``` diff --git a/docs/reference/esql/functions/types/case.asciidoc b/docs/reference/esql/functions/types/case.asciidoc index c6fb6a091e9d0..9e6915a37fc14 100644 --- a/docs/reference/esql/functions/types/case.asciidoc +++ b/docs/reference/esql/functions/types/case.asciidoc @@ -13,6 +13,8 @@ boolean | cartesian_shape | cartesian_shape | cartesian_shape boolean | cartesian_shape | | cartesian_shape boolean | date | date | date boolean | date | | date +boolean | date_nanos | date_nanos | date_nanos +boolean | date_nanos | | date_nanos boolean | double | double | double boolean | double | | double boolean | geo_point | geo_point | geo_point diff --git a/docs/reference/esql/functions/types/coalesce.asciidoc b/docs/reference/esql/functions/types/coalesce.asciidoc index 23a249494e0a2..b6479dc7ff86a 100644 --- a/docs/reference/esql/functions/types/coalesce.asciidoc +++ b/docs/reference/esql/functions/types/coalesce.asciidoc @@ -10,6 +10,7 @@ boolean | | boolean cartesian_point | cartesian_point | cartesian_point cartesian_shape | cartesian_shape | cartesian_shape date | date | date +date_nanos | date_nanos | date_nanos geo_point | geo_point | geo_point geo_shape | geo_shape | geo_shape integer | integer | integer diff --git a/docs/reference/esql/functions/types/count_distinct.asciidoc b/docs/reference/esql/functions/types/count_distinct.asciidoc index c365c8814573c..f5758a8914d20 100644 --- a/docs/reference/esql/functions/types/count_distinct.asciidoc +++ b/docs/reference/esql/functions/types/count_distinct.asciidoc @@ -13,6 +13,10 @@ date | integer | long date | long | long date | unsigned_long | long date | | long +date_nanos | integer | long +date_nanos | long | long +date_nanos | unsigned_long | long +date_nanos | | long double | integer | long double | long | long double | unsigned_long | long diff --git a/docs/reference/esql/functions/types/date_trunc.asciidoc b/docs/reference/esql/functions/types/date_trunc.asciidoc index aa7dee99c6c44..c610f9119e51c 100644 --- a/docs/reference/esql/functions/types/date_trunc.asciidoc +++ b/docs/reference/esql/functions/types/date_trunc.asciidoc @@ -6,5 +6,7 @@ |=== interval | date | result date_period | date | date +date_period | date_nanos | date_nanos time_duration | date | date +time_duration | date_nanos | date_nanos |=== diff --git a/docs/reference/esql/functions/types/equals.asciidoc b/docs/reference/esql/functions/types/equals.asciidoc index ad0e46ef4b8da..8d48b7ebf084a 100644 --- a/docs/reference/esql/functions/types/equals.asciidoc +++ b/docs/reference/esql/functions/types/equals.asciidoc @@ -9,6 +9,7 @@ boolean | boolean | boolean cartesian_point | cartesian_point | boolean cartesian_shape | cartesian_shape | boolean date | date | boolean +date_nanos | date_nanos | boolean double | double | boolean double | integer | boolean double | long | boolean diff --git a/docs/reference/esql/functions/types/greater_than.asciidoc b/docs/reference/esql/functions/types/greater_than.asciidoc index c506328126a94..8000fd34c8507 100644 --- a/docs/reference/esql/functions/types/greater_than.asciidoc +++ b/docs/reference/esql/functions/types/greater_than.asciidoc @@ -6,6 +6,7 @@ |=== lhs | rhs | result date | date | boolean +date_nanos | date_nanos | boolean double | double | boolean double | integer | boolean double | long | boolean diff --git a/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc b/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc index c506328126a94..8000fd34c8507 100644 --- a/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc +++ b/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc @@ -6,6 +6,7 @@ |=== lhs | rhs | result date | date | boolean +date_nanos | date_nanos | boolean double | double | boolean double | integer | boolean double | long | boolean diff --git a/docs/reference/esql/functions/types/greatest.asciidoc b/docs/reference/esql/functions/types/greatest.asciidoc index 7df77a6991315..0bc11b569d426 100644 --- a/docs/reference/esql/functions/types/greatest.asciidoc +++ b/docs/reference/esql/functions/types/greatest.asciidoc @@ -8,6 +8,7 @@ first | rest | result boolean | boolean | boolean boolean | | boolean date | date | date +date_nanos | date_nanos | date_nanos double | double | double integer | integer | integer integer | | integer diff --git a/docs/reference/esql/functions/types/least.asciidoc b/docs/reference/esql/functions/types/least.asciidoc index 7df77a6991315..0bc11b569d426 100644 --- a/docs/reference/esql/functions/types/least.asciidoc +++ b/docs/reference/esql/functions/types/least.asciidoc @@ -8,6 +8,7 @@ first | rest | result boolean | boolean | boolean boolean | | boolean date | date | date +date_nanos | date_nanos | date_nanos double | double | double integer | integer | integer integer | | integer diff --git a/docs/reference/esql/functions/types/less_than.asciidoc b/docs/reference/esql/functions/types/less_than.asciidoc index c506328126a94..8000fd34c8507 100644 --- a/docs/reference/esql/functions/types/less_than.asciidoc +++ b/docs/reference/esql/functions/types/less_than.asciidoc @@ -6,6 +6,7 @@ |=== lhs | rhs | result date | date | boolean +date_nanos | date_nanos | boolean double | double | boolean double | integer | boolean double | long | boolean diff --git a/docs/reference/esql/functions/types/less_than_or_equal.asciidoc b/docs/reference/esql/functions/types/less_than_or_equal.asciidoc index c506328126a94..8000fd34c8507 100644 --- a/docs/reference/esql/functions/types/less_than_or_equal.asciidoc +++ b/docs/reference/esql/functions/types/less_than_or_equal.asciidoc @@ -6,6 +6,7 @@ |=== lhs | rhs | result date | date | boolean +date_nanos | date_nanos | boolean double | double | boolean double | integer | boolean double | long | boolean diff --git a/docs/reference/esql/functions/types/max.asciidoc b/docs/reference/esql/functions/types/max.asciidoc index 564fb8dc3bfb0..adf457dac31b8 100644 --- a/docs/reference/esql/functions/types/max.asciidoc +++ b/docs/reference/esql/functions/types/max.asciidoc @@ -7,6 +7,7 @@ field | result boolean | boolean date | date +date_nanos | date_nanos double | double integer | integer ip | ip diff --git a/docs/reference/esql/functions/types/min.asciidoc b/docs/reference/esql/functions/types/min.asciidoc index 564fb8dc3bfb0..adf457dac31b8 100644 --- a/docs/reference/esql/functions/types/min.asciidoc +++ b/docs/reference/esql/functions/types/min.asciidoc @@ -7,6 +7,7 @@ field | result boolean | boolean date | date +date_nanos | date_nanos double | double integer | integer ip | ip diff --git a/docs/reference/esql/functions/types/mv_count.asciidoc b/docs/reference/esql/functions/types/mv_count.asciidoc index 260c531731f04..c58c4eda44396 100644 --- a/docs/reference/esql/functions/types/mv_count.asciidoc +++ b/docs/reference/esql/functions/types/mv_count.asciidoc @@ -9,6 +9,7 @@ boolean | integer cartesian_point | integer cartesian_shape | integer date | integer +date_nanos | integer double | integer geo_point | integer geo_shape | integer diff --git a/docs/reference/esql/functions/types/mv_dedupe.asciidoc b/docs/reference/esql/functions/types/mv_dedupe.asciidoc index 976de79bb0910..1524ec86cd5ec 100644 --- a/docs/reference/esql/functions/types/mv_dedupe.asciidoc +++ b/docs/reference/esql/functions/types/mv_dedupe.asciidoc @@ -9,6 +9,7 @@ boolean | boolean cartesian_point | cartesian_point cartesian_shape | cartesian_shape date | date +date_nanos | date_nanos double | double geo_point | geo_point geo_shape | geo_shape diff --git a/docs/reference/esql/functions/types/mv_first.asciidoc b/docs/reference/esql/functions/types/mv_first.asciidoc index 47736e76d1db4..e68af2f992b43 100644 --- a/docs/reference/esql/functions/types/mv_first.asciidoc +++ b/docs/reference/esql/functions/types/mv_first.asciidoc @@ -9,6 +9,7 @@ boolean | boolean cartesian_point | cartesian_point cartesian_shape | cartesian_shape date | date +date_nanos | date_nanos double | double geo_point | geo_point geo_shape | geo_shape diff --git a/docs/reference/esql/functions/types/mv_last.asciidoc b/docs/reference/esql/functions/types/mv_last.asciidoc index 47736e76d1db4..e68af2f992b43 100644 --- a/docs/reference/esql/functions/types/mv_last.asciidoc +++ b/docs/reference/esql/functions/types/mv_last.asciidoc @@ -9,6 +9,7 @@ boolean | boolean cartesian_point | cartesian_point cartesian_shape | cartesian_shape date | date +date_nanos | date_nanos double | double geo_point | geo_point geo_shape | geo_shape diff --git a/docs/reference/esql/functions/types/mv_max.asciidoc b/docs/reference/esql/functions/types/mv_max.asciidoc index d4e014554c86c..ffba14489b97c 100644 --- a/docs/reference/esql/functions/types/mv_max.asciidoc +++ b/docs/reference/esql/functions/types/mv_max.asciidoc @@ -7,6 +7,7 @@ field | result boolean | boolean date | date +date_nanos | date_nanos double | double integer | integer ip | ip diff --git a/docs/reference/esql/functions/types/mv_min.asciidoc b/docs/reference/esql/functions/types/mv_min.asciidoc index d4e014554c86c..ffba14489b97c 100644 --- a/docs/reference/esql/functions/types/mv_min.asciidoc +++ b/docs/reference/esql/functions/types/mv_min.asciidoc @@ -7,6 +7,7 @@ field | result boolean | boolean date | date +date_nanos | date_nanos double | double integer | integer ip | ip diff --git a/docs/reference/esql/functions/types/mv_slice.asciidoc b/docs/reference/esql/functions/types/mv_slice.asciidoc index 60c1f6315a599..75f45e333ee0c 100644 --- a/docs/reference/esql/functions/types/mv_slice.asciidoc +++ b/docs/reference/esql/functions/types/mv_slice.asciidoc @@ -9,6 +9,7 @@ boolean | integer | integer | boolean cartesian_point | integer | integer | cartesian_point cartesian_shape | integer | integer | cartesian_shape date | integer | integer | date +date_nanos | integer | integer | date_nanos double | integer | integer | double geo_point | integer | integer | geo_point geo_shape | integer | integer | geo_shape diff --git a/docs/reference/esql/functions/types/mv_sort.asciidoc b/docs/reference/esql/functions/types/mv_sort.asciidoc index c21ea5983945e..83d3e45c7be02 100644 --- a/docs/reference/esql/functions/types/mv_sort.asciidoc +++ b/docs/reference/esql/functions/types/mv_sort.asciidoc @@ -7,6 +7,7 @@ field | order | result boolean | keyword | boolean date | keyword | date +date_nanos | keyword | date_nanos double | keyword | double integer | keyword | integer ip | keyword | ip diff --git a/docs/reference/esql/functions/types/not_equals.asciidoc b/docs/reference/esql/functions/types/not_equals.asciidoc index ad0e46ef4b8da..8d48b7ebf084a 100644 --- a/docs/reference/esql/functions/types/not_equals.asciidoc +++ b/docs/reference/esql/functions/types/not_equals.asciidoc @@ -9,6 +9,7 @@ boolean | boolean | boolean cartesian_point | cartesian_point | boolean cartesian_shape | cartesian_shape | boolean date | date | boolean +date_nanos | date_nanos | boolean double | double | boolean double | integer | boolean double | long | boolean diff --git a/docs/reference/esql/functions/types/to_date_nanos.asciidoc b/docs/reference/esql/functions/types/to_date_nanos.asciidoc index 1f50b65f25a77..dec6833d14b08 100644 --- a/docs/reference/esql/functions/types/to_date_nanos.asciidoc +++ b/docs/reference/esql/functions/types/to_date_nanos.asciidoc @@ -5,5 +5,11 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== field | result -date_nanos +date | date_nanos +date_nanos | date_nanos +double | date_nanos +keyword | date_nanos +long | date_nanos +text | date_nanos +unsigned_long | date_nanos |=== diff --git a/docs/reference/esql/functions/types/to_datetime.asciidoc b/docs/reference/esql/functions/types/to_datetime.asciidoc index 80c986efca794..118ed6c09c11c 100644 --- a/docs/reference/esql/functions/types/to_datetime.asciidoc +++ b/docs/reference/esql/functions/types/to_datetime.asciidoc @@ -6,6 +6,7 @@ |=== field | result date | date +date_nanos | date double | date integer | date keyword | date diff --git a/docs/reference/esql/functions/types/to_long.asciidoc b/docs/reference/esql/functions/types/to_long.asciidoc index a07990cb1cfbf..1009543c1bbde 100644 --- a/docs/reference/esql/functions/types/to_long.asciidoc +++ b/docs/reference/esql/functions/types/to_long.asciidoc @@ -9,6 +9,7 @@ boolean | long counter_integer | long counter_long | long date | long +date_nanos | long double | long integer | long keyword | long diff --git a/docs/reference/esql/functions/types/to_string.asciidoc b/docs/reference/esql/functions/types/to_string.asciidoc index 26a5b31a2a589..9d4188214b3d8 100644 --- a/docs/reference/esql/functions/types/to_string.asciidoc +++ b/docs/reference/esql/functions/types/to_string.asciidoc @@ -9,6 +9,7 @@ boolean | keyword cartesian_point | keyword cartesian_shape | keyword date | keyword +date_nanos | keyword double | keyword geo_point | keyword geo_shape | keyword diff --git a/docs/reference/esql/functions/types/values.asciidoc b/docs/reference/esql/functions/types/values.asciidoc index 564fb8dc3bfb0..adf457dac31b8 100644 --- a/docs/reference/esql/functions/types/values.asciidoc +++ b/docs/reference/esql/functions/types/values.asciidoc @@ -7,6 +7,7 @@ field | result boolean | boolean date | date +date_nanos | date_nanos double | double integer | integer ip | ip diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java index d84a471815a9a..61b480968e974 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java @@ -12,7 +12,6 @@ import org.elasticsearch.plugins.Plugin; public class EsqlCorePlugin extends Plugin implements ExtensiblePlugin { - public static final FeatureFlag DATE_NANOS_FEATURE_FLAG = new FeatureFlag("esql_date_nanos"); public static final FeatureFlag SEMANTIC_TEXT_FEATURE_FLAG = new FeatureFlag("esql_semantic_text"); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index e980b1509813e..1c65dd386667f 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -209,7 +209,6 @@ public enum DataType { * check that sending them to a function produces a sane error message. */ public static final Map UNDER_CONSTRUCTION = Map.ofEntries( - Map.entry(DATE_NANOS, EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), Map.entry(SEMANTIC_TEXT, EsqlCorePlugin.SEMANTIC_TEXT_FEATURE_FLAG) ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 2a62216072e9c..ab3a7e3e7d0b8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -322,32 +322,32 @@ public enum Cap { /** * Support for nanosecond dates as a data type */ - DATE_NANOS_TYPE(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + DATE_NANOS_TYPE(), /** * Support for to_date_nanos function */ - TO_DATE_NANOS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + TO_DATE_NANOS(), /** * Support for date nanos type in binary comparisons */ - DATE_NANOS_BINARY_COMPARISON(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + DATE_NANOS_BINARY_COMPARISON(), /** * Support Least and Greatest functions on Date Nanos type */ - LEAST_GREATEST_FOR_DATENANOS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + LEAST_GREATEST_FOR_DATENANOS(), /** * Support for date_trunc function on date nanos type */ - DATE_TRUNC_DATE_NANOS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + DATE_TRUNC_DATE_NANOS(), /** * support aggregations on date nanos */ - DATE_NANOS_AGGREGATIONS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + DATE_NANOS_AGGREGATIONS(), /** * Support for datetime in least and greatest functions diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index ca02441d2e1ad..eafb1fdbcbdcb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -501,7 +501,9 @@ private static DataType getTargetType(String[] names) { types.add(type); } } + return types.stream() + .filter(DATA_TYPE_CASTING_PRIORITY::containsKey) .min((dt1, dt2) -> DATA_TYPE_CASTING_PRIORITY.get(dt1).compareTo(DATA_TYPE_CASTING_PRIORITY.get(dt2))) .orElse(UNSUPPORTED); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java index ac2d4ff3cbc43..2165c3c7ad1a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java @@ -55,7 +55,7 @@ public class Max extends AggregateFunction implements ToAggregator, SurrogateExp ); @FunctionInfo( - returnType = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "long", "version" }, + returnType = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "long", "version" }, description = "The maximum value of a field.", isAggregation = true, examples = { @@ -72,7 +72,7 @@ public Max( Source source, @Param( name = "field", - type = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "text", "long", "version" } + type = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "text", "long", "version" } ) Expression field ) { this(source, field, Literal.TRUE); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java index a5fc8196847b7..7d67868dd4134 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java @@ -55,7 +55,7 @@ public class Min extends AggregateFunction implements ToAggregator, SurrogateExp ); @FunctionInfo( - returnType = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "long", "version" }, + returnType = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "long", "version" }, description = "The minimum value of a field.", isAggregation = true, examples = { @@ -72,7 +72,7 @@ public Min( Source source, @Param( name = "field", - type = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "text", "long", "version" } + type = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "text", "long", "version" } ) Expression field ) { this(source, field, Literal.TRUE); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java index 111eab051719b..e7df990b20422 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java @@ -52,7 +52,7 @@ public class Values extends AggregateFunction implements ToAggregator { ); @FunctionInfo( - returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "version" }, + returnType = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "version" }, preview = true, description = "Returns all values in a group as a multivalued field. The order of the returned values isn't guaranteed. " + "If you need the values returned in order use <>.", @@ -70,7 +70,10 @@ public class Values extends AggregateFunction implements ToAggregator { ) public Values( Source source, - @Param(name = "field", type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }) Expression v + @Param( + name = "field", + type = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" } + ) Expression v ) { this(source, v, Literal.TRUE); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java index 6e38d72500840..a35b67d7ac3fd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java @@ -62,7 +62,7 @@ public interface DateTruncFactoryProvider { protected static final ZoneId DEFAULT_TZ = ZoneOffset.UTC; @FunctionInfo( - returnType = "date", + returnType = { "date", "date_nanos" }, description = "Rounds down a date to the closest interval.", examples = { @Example(file = "date", tag = "docsDateTrunc"), @@ -83,7 +83,7 @@ public DateTrunc( type = { "date_period", "time_duration" }, description = "Interval; expressed using the timespan literal syntax." ) Expression interval, - @Param(name = "date", type = { "date" }, description = "Date expression") Expression field + @Param(name = "date", type = { "date", "date_nanos" }, description = "Date expression") Expression field ) { super(source, List.of(interval, field)); this.interval = interval; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThan.java index 56ade3982e0d8..3ae7bd93092ef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThan.java @@ -53,12 +53,12 @@ public LessThan( Source source, @Param( name = "lhs", - type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" }, + type = { "boolean", "date_nanos", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" }, description = "An expression." ) Expression left, @Param( name = "rhs", - type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" }, + type = { "boolean", "date_nanos", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" }, description = "An expression." ) Expression right ) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java index 7fe67707a7976..775ca45bfa124 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java @@ -262,6 +262,42 @@ public static List dateCases(int minRows, int maxRows) { return cases; } + /** + * + * Generate cases for {@link DataType#DATE_NANOS}. + * + */ + public static List dateNanosCases(int minRows, int maxRows) { + List cases = new ArrayList<>(); + addSuppliers(cases, minRows, maxRows, "<1970-01-01T00:00:00.000000000Z>", DataType.DATE_NANOS, () -> 0L); + addSuppliers( + cases, + minRows, + maxRows, + "", + DataType.DATE_NANOS, + () -> ESTestCase.randomLongBetween(0, 10 * (long) 10e11) + ); + addSuppliers( + cases, + minRows, + maxRows, + "", + DataType.DATE_NANOS, + () -> ESTestCase.randomLongBetween(10 * (long) 10e11, Long.MAX_VALUE) + ); + addSuppliers( + cases, + minRows, + maxRows, + "", + DataType.DATE_NANOS, + () -> ESTestCase.randomLongBetween(Long.MAX_VALUE / 100 * 99, Long.MAX_VALUE) + ); + + return cases; + } + public static List booleanCases(int minRows, int maxRows) { List cases = new ArrayList<>(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java index 5e23083d7c810..fff2d824fc710 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java @@ -52,6 +52,7 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.longCases(1, 1000, Long.MIN_VALUE, Long.MAX_VALUE, true), MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true), MultiRowTestCaseSupplier.dateCases(1, 1000), + MultiRowTestCaseSupplier.dateNanosCases(1, 1000), MultiRowTestCaseSupplier.booleanCases(1, 1000), MultiRowTestCaseSupplier.ipCases(1, 1000), MultiRowTestCaseSupplier.versionCases(1, 1000), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java index 9756804a1ec0f..7d4b46f2a902a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java @@ -90,6 +90,15 @@ public static Iterable parameters() { equalTo(200L) ) ), + new TestCaseSupplier( + List.of(DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of(TestCaseSupplier.TypedData.multiRow(List.of(200L), DataType.DATE_NANOS, "field")), + "Max[field=Attribute[channel=0]]", + DataType.DATE_NANOS, + equalTo(200L) + ) + ), new TestCaseSupplier( List.of(DataType.BOOLEAN), () -> new TestCaseSupplier.TestCase( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java index 171181496c889..58ef8d86017a8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java @@ -90,6 +90,15 @@ public static Iterable parameters() { equalTo(200L) ) ), + new TestCaseSupplier( + List.of(DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of(TestCaseSupplier.TypedData.multiRow(List.of(200L), DataType.DATE_NANOS, "field")), + "Min[field=Attribute[channel=0]]", + DataType.DATE_NANOS, + equalTo(200L) + ) + ), new TestCaseSupplier( List.of(DataType.BOOLEAN), () -> new TestCaseSupplier.TestCase( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java index 55320543d0ec3..29faceee7497e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java @@ -45,6 +45,7 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.longCases(1, 1000, Long.MIN_VALUE, Long.MAX_VALUE, true), MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true), MultiRowTestCaseSupplier.dateCases(1, 1000), + MultiRowTestCaseSupplier.dateNanosCases(1, 1000), MultiRowTestCaseSupplier.booleanCases(1, 1000), MultiRowTestCaseSupplier.ipCases(1, 1000), MultiRowTestCaseSupplier.versionCases(1, 1000), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java index 65f5653f27e1a..11894cf5b847b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java @@ -14,7 +14,6 @@ import org.elasticsearch.geometry.Geometry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.plugin.EsqlCorePlugin; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; @@ -397,9 +396,6 @@ protected static void dateNanos( DataType expectedDataType, BiFunction> matcher ) { - if (EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG.isEnabled() == false) { - return; - } cases.add( new TestCaseSupplier( name + "(epoch nanos)", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java index e8f9f26a76f43..0d114b4964920 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java @@ -113,8 +113,8 @@ public static Iterable parameters() { "rhs", (l, r) -> ((Number) l).longValue() < ((Number) r).longValue(), DataType.BOOLEAN, - TestCaseSupplier.dateCases(), - TestCaseSupplier.dateCases(), + TestCaseSupplier.dateNanosCases(), + TestCaseSupplier.dateNanosCases(), List.of(), false ) From dea1e7dfa7ae598879a7d04233b9665d55c32497 Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Wed, 20 Nov 2024 15:34:19 +0100 Subject: [PATCH 094/386] Propagate scoring function through random sampler (#116957) * Propagate scoring function through random sampler. * Update docs/changelog/116957.yaml * Correct score mode in random sampler weight * Fix random sampling with scores and p=1.0 * Unit test with scores * YAML test * Add capability --- docs/changelog/116957.yaml | 5 ++ modules/aggregations/build.gradle | 2 +- .../test/aggregations/random_sampler.yml | 60 +++++++++++++++++++ .../action/search/SearchCapabilities.java | 3 + .../search/aggregations/AggregatorBase.java | 2 +- .../random/RandomSamplerAggregator.java | 53 ++++++++++++---- .../RandomSamplerAggregatorFactory.java | 42 +------------ .../random/RandomSamplerAggregatorTests.java | 38 ++++++++++++ 8 files changed, 150 insertions(+), 55 deletions(-) create mode 100644 docs/changelog/116957.yaml diff --git a/docs/changelog/116957.yaml b/docs/changelog/116957.yaml new file mode 100644 index 0000000000000..1020190de180d --- /dev/null +++ b/docs/changelog/116957.yaml @@ -0,0 +1,5 @@ +pr: 116957 +summary: Propagate scoring function through random sampler +area: Machine Learning +type: bug +issues: [ 110134 ] diff --git a/modules/aggregations/build.gradle b/modules/aggregations/build.gradle index 5df0a890af753..2835180904620 100644 --- a/modules/aggregations/build.gradle +++ b/modules/aggregations/build.gradle @@ -20,7 +20,7 @@ esplugin { restResources { restApi { - include '_common', 'indices', 'cluster', 'index', 'search', 'nodes', 'bulk', 'scripts_painless_execute', 'put_script' + include 'capabilities', '_common', 'indices', 'cluster', 'index', 'search', 'nodes', 'bulk', 'scripts_painless_execute', 'put_script' } restTests { // Pulls in all aggregation tests from core AND the forwards v7's core for forwards compatibility diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/random_sampler.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/random_sampler.yml index 5b2c2dc379cb9..4d8efe2a6f9d8 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/random_sampler.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/random_sampler.yml @@ -142,6 +142,66 @@ setup: } - match: { aggregations.sampled.mean.value: 1.0 } --- +"Test random_sampler aggregation with scored subagg": + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ random_sampler_with_scored_subaggs ] + test_runner_features: capabilities + reason: "Support for random sampler with scored subaggs capability required" + - do: + search: + index: data + size: 0 + body: > + { + "query": { + "function_score": { + "random_score": {} + } + }, + "aggs": { + "sampled": { + "random_sampler": { + "probability": 0.5 + }, + "aggs": { + "top": { + "top_hits": {} + } + } + } + } + } + - is_true: aggregations.sampled.top.hits + - do: + search: + index: data + size: 0 + body: > + { + "query": { + "function_score": { + "random_score": {} + } + }, + "aggs": { + "sampled": { + "random_sampler": { + "probability": 1.0 + }, + "aggs": { + "top": { + "top_hits": {} + } + } + } + } + } + - match: { aggregations.sampled.top.hits.total.value: 6 } + - is_true: aggregations.sampled.top.hits.hits.0._score +--- "Test random_sampler aggregation with poor settings": - requires: cluster_features: ["gte_v8.2.0"] diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java index 7b57481ad5716..241f30b367782 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java @@ -41,6 +41,8 @@ private SearchCapabilities() {} /** Support multi-dense-vector script field access. */ private static final String MULTI_DENSE_VECTOR_SCRIPT_ACCESS = "multi_dense_vector_script_access"; + private static final String RANDOM_SAMPLER_WITH_SCORED_SUBAGGS = "random_sampler_with_scored_subaggs"; + public static final Set CAPABILITIES; static { HashSet capabilities = new HashSet<>(); @@ -50,6 +52,7 @@ private SearchCapabilities() {} capabilities.add(DENSE_VECTOR_DOCVALUE_FIELDS); capabilities.add(TRANSFORM_RANK_RRF_TO_RETRIEVER); capabilities.add(NESTED_RETRIEVER_INNER_HITS_SUPPORT); + capabilities.add(RANDOM_SAMPLER_WITH_SCORED_SUBAGGS); if (MultiDenseVectorFieldMapper.FEATURE_FLAG.isEnabled()) { capabilities.add(MULTI_DENSE_VECTOR_FIELD_MAPPER); capabilities.add(MULTI_DENSE_VECTOR_SCRIPT_ACCESS); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java index bf9116207b375..1ea7769b33384 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java @@ -40,7 +40,7 @@ public abstract class AggregatorBase extends Aggregator { protected final String name; protected final Aggregator parent; - private final AggregationContext context; + protected final AggregationContext context; private final Map metadata; protected final Aggregator[] subAggregators; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java index 921cbb96385ad..699b8c6b5d500 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java @@ -9,12 +9,15 @@ package org.elasticsearch.search.aggregations.bucket.sampler.random; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; -import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -22,6 +25,7 @@ import org.elasticsearch.search.aggregations.CardinalityUpperBound; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; @@ -34,14 +38,13 @@ public class RandomSamplerAggregator extends BucketsAggregator implements Single private final int seed; private final Integer shardSeed; private final double probability; - private final CheckedSupplier weightSupplier; + private Weight weight; RandomSamplerAggregator( String name, int seed, Integer shardSeed, double probability, - CheckedSupplier weightSupplier, AggregatorFactories factories, AggregationContext context, Aggregator parent, @@ -56,10 +59,33 @@ public class RandomSamplerAggregator extends BucketsAggregator implements Single RandomSamplerAggregationBuilder.NAME + " aggregation [" + name + "] must have sub aggregations configured" ); } - this.weightSupplier = weightSupplier; this.shardSeed = shardSeed; } + /** + * This creates the query weight which will be used in the aggregator. + * + * This weight is a boolean query between {@link RandomSamplingQuery} and the configured top level query of the search. This allows + * the aggregation to iterate the documents directly, thus sampling in the background instead of the foreground. + * @return weight to be used, is cached for additional usages + * @throws IOException when building the weight or queries fails; + */ + private Weight getWeight() throws IOException { + if (weight == null) { + ScoreMode scoreMode = scoreMode(); + BooleanQuery.Builder fullQuery = new BooleanQuery.Builder().add( + context.query(), + scoreMode.needsScores() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER + ); + if (probability < 1.0) { + Query sampleQuery = new RandomSamplingQuery(probability, seed, shardSeed == null ? context.shardRandomSeed() : shardSeed); + fullQuery.add(sampleQuery, BooleanClause.Occur.FILTER); + } + weight = context.searcher().createWeight(context.searcher().rewrite(fullQuery.build()), scoreMode, 1f); + } + return weight; + } + @Override public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( @@ -101,22 +127,26 @@ protected LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCt if (sub.isNoop()) { return LeafBucketCollector.NO_OP_COLLECTOR; } + + Scorer scorer = getWeight().scorer(aggCtx.getLeafReaderContext()); + // This means there are no docs to iterate, possibly due to the fields not existing + if (scorer == null) { + return LeafBucketCollector.NO_OP_COLLECTOR; + } + sub.setScorer(scorer); + // No sampling is being done, collect all docs + // TODO know when sampling would be much slower and skip sampling: https://github.com/elastic/elasticsearch/issues/84353 if (probability >= 1.0) { grow(1); - return new LeafBucketCollector() { + return new LeafBucketCollectorBase(sub, null) { @Override public void collect(int doc, long owningBucketOrd) throws IOException { collectExistingBucket(sub, doc, 0); } }; } - // TODO know when sampling would be much slower and skip sampling: https://github.com/elastic/elasticsearch/issues/84353 - Scorer scorer = weightSupplier.get().scorer(aggCtx.getLeafReaderContext()); - // This means there are no docs to iterate, possibly due to the fields not existing - if (scorer == null) { - return LeafBucketCollector.NO_OP_COLLECTOR; - } + final DocIdSetIterator docIt = scorer.iterator(); final Bits liveDocs = aggCtx.getLeafReaderContext().reader().getLiveDocs(); try { @@ -136,5 +166,4 @@ public void collect(int doc, long owningBucketOrd) throws IOException { // Since we have done our own collection, there is nothing for the leaf collector to do return LeafBucketCollector.NO_OP_COLLECTOR; } - } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorFactory.java index 67c958046dac7..50921501896d3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorFactory.java @@ -9,10 +9,6 @@ package org.elasticsearch.search.aggregations.bucket.sampler.random; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Weight; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactory; @@ -30,7 +26,6 @@ public class RandomSamplerAggregatorFactory extends AggregatorFactory { private final Integer shardSeed; private final double probability; private final SamplingContext samplingContext; - private Weight weight; RandomSamplerAggregatorFactory( String name, @@ -57,41 +52,6 @@ public Optional getSamplingContext() { @Override public Aggregator createInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) throws IOException { - return new RandomSamplerAggregator( - name, - seed, - shardSeed, - probability, - this::getWeight, - factories, - context, - parent, - cardinality, - metadata - ); + return new RandomSamplerAggregator(name, seed, shardSeed, probability, factories, context, parent, cardinality, metadata); } - - /** - * This creates the query weight which will be used in the aggregator. - * - * This weight is a boolean query between {@link RandomSamplingQuery} and the configured top level query of the search. This allows - * the aggregation to iterate the documents directly, thus sampling in the background instead of the foreground. - * @return weight to be used, is cached for additional usages - * @throws IOException when building the weight or queries fails; - */ - private Weight getWeight() throws IOException { - if (weight == null) { - RandomSamplingQuery query = new RandomSamplingQuery( - probability, - seed, - shardSeed == null ? context.shardRandomSeed() : shardSeed - ); - BooleanQuery booleanQuery = new BooleanQuery.Builder().add(query, BooleanClause.Occur.FILTER) - .add(context.query(), BooleanClause.Occur.FILTER) - .build(); - weight = context.searcher().createWeight(context.searcher().rewrite(booleanQuery), ScoreMode.COMPLETE_NO_SCORES, 1f); - } - return weight; - } - } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorTests.java index f75f9f474c8e8..2f51a5a09a8ac 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorTests.java @@ -11,22 +11,29 @@ import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.TermQuery; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.metrics.Avg; import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.aggregations.metrics.Min; +import org.elasticsearch.search.aggregations.metrics.TopHits; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.TypeSafeMatcher; import java.io.IOException; +import java.util.Arrays; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.DoubleStream; @@ -37,6 +44,8 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notANumber; @@ -76,6 +85,35 @@ public void testAggregationSampling() throws IOException { assertThat(avgAvg, closeTo(1.5, 0.5)); } + public void testAggregationSampling_withScores() throws IOException { + long[] counts = new long[5]; + AtomicInteger integer = new AtomicInteger(); + do { + testCase(RandomSamplerAggregatorTests::writeTestDocs, (InternalRandomSampler result) -> { + counts[integer.get()] = result.getDocCount(); + if (result.getDocCount() > 0) { + TopHits agg = result.getAggregations().get("top"); + List hits = Arrays.asList(agg.getHits().getHits()); + assertThat(Strings.toString(result), hits, hasSize(1)); + assertThat(Strings.toString(result), hits.get(0).getScore(), allOf(greaterThan(0.0f), lessThan(1.0f))); + } + }, + new AggTestConfig( + new RandomSamplerAggregationBuilder("my_agg").subAggregation(AggregationBuilders.topHits("top").size(1)) + .setProbability(0.25), + longField(NUMERIC_FIELD_NAME) + ).withQuery( + new BooleanQuery.Builder().add( + new TermQuery(new Term(KEYWORD_FIELD_NAME, KEYWORD_FIELD_VALUE)), + BooleanClause.Occur.SHOULD + ).build() + ) + ); + } while (integer.incrementAndGet() < 5); + long avgCount = LongStream.of(counts).sum() / integer.get(); + assertThat(avgCount, allOf(greaterThanOrEqualTo(20L), lessThanOrEqualTo(70L))); + } + public void testAggregationSamplingNestedAggsScaled() throws IOException { // in case 0 docs get sampled, which can rarely happen // in case the test index has many segments. From 20166ae425a7d4bb715f6c1f3c157f4037e9479c Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Wed, 20 Nov 2024 16:40:23 +0200 Subject: [PATCH 095/386] Unmute `Failed to snapshot indices with synthetic source` (#117117) --- muted-tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index cc568144698ee..20cea26eb4be7 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -239,9 +239,6 @@ tests: - class: org.elasticsearch.upgrades.QueryBuilderBWCIT method: testQueryBuilderBWC {cluster=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/116990 -- class: org.elasticsearch.xpack.test.rest.XPackRestIT - method: test {p0=snapshot/10_basic/Failed to snapshot indices with synthetic source} - issue: https://github.com/elastic/elasticsearch/issues/117082 - class: org.elasticsearch.discovery.ClusterDisruptionIT method: testAckedIndexing issue: https://github.com/elastic/elasticsearch/issues/117024 From d5bc38958617516511b68de8995e57e824c0da95 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 20 Nov 2024 09:51:23 -0500 Subject: [PATCH 096/386] ESQL: Fix invariant test in TOP(bytes) (#117049) Fixes a self-test in the code for `TOP(bytes)`, specifically around the merging used for grouping by ordinals. --- .../data/sort/BytesRefBucketedSort.java | 2 +- .../data/sort/BucketedSortTestCase.java | 46 +++++++++++++++++++ 2 files changed, 47 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java index 9198de53b1e04..6dca94b9bc79a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java @@ -147,7 +147,7 @@ public void merge(int bucket, BytesRefBucketedSort other, int otherBucket) { // The value was never collected. return; } - other.checkInvariant(bucket); + other.checkInvariant(otherBucket); long otherStart = other.startIndex(otherBucket, otherRootIndex); long otherEnd = other.common.endIndex(otherRootIndex); // TODO: This can be improved for heapified buckets by making use of the heap structures diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java index f857f50b2d30f..339c2bba2a734 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java @@ -363,6 +363,52 @@ public final void testMergeEmptyToEmpty() { } } + public final void testMergeOtherBigger() { + try (T sort = build(SortOrder.ASC, 3)) { + var values = threeSortedValues(); + + collect(sort, values.get(0), 0); + collect(sort, values.get(1), 0); + collect(sort, values.get(2), 0); + + try (T other = build(SortOrder.ASC, 3)) { + collect(other, values.get(0), 0); + collect(other, values.get(1), 1); + collect(other, values.get(2), 2); + + merge(sort, 0, other, 0); + merge(sort, 0, other, 1); + merge(sort, 0, other, 2); + } + + assertBlock(sort, 0, List.of(values.get(0), values.get(0), values.get(1))); + } + } + + public final void testMergeThisBigger() { + try (T sort = build(SortOrder.ASC, 3)) { + var values = threeSortedValues(); + + collect(sort, values.get(0), 0); + collect(sort, values.get(1), 1); + collect(sort, values.get(2), 2); + + try (T other = build(SortOrder.ASC, 3)) { + collect(other, values.get(0), 0); + collect(other, values.get(1), 0); + collect(other, values.get(2), 0); + + merge(sort, 0, other, 0); + merge(sort, 1, other, 0); + merge(sort, 2, other, 0); + } + + assertBlock(sort, 0, List.of(values.get(0), values.get(0), values.get(1))); + assertBlock(sort, 1, List.of(values.get(0), values.get(1), values.get(1))); + assertBlock(sort, 2, values); + } + } + protected void assertBlock(T sort, int groupId, List values) { var blockFactory = TestBlockFactory.getNonBreakingInstance(); From 537bba6787ee43fc691e8bb2a586a51f46090bae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Wed, 20 Nov 2024 16:20:44 +0100 Subject: [PATCH 097/386] Disable KqlQueryBuilderTests on non-snapshots build. (#117025) --- muted-tests.yml | 2 -- x-pack/plugin/kql/build.gradle | 8 -------- .../xpack/kql/query/KqlQueryBuilderTests.java | 6 ++++++ 3 files changed, 6 insertions(+), 10 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 20cea26eb4be7..a7d8995253dd8 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -180,8 +180,6 @@ tests: - class: org.elasticsearch.xpack.downsample.ILMDownsampleDisruptionIT method: testILMDownsampleRollingRestart issue: https://github.com/elastic/elasticsearch/issues/114233 -- class: org.elasticsearch.xpack.kql.query.KqlQueryBuilderTests - issue: https://github.com/elastic/elasticsearch/issues/116487 - class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests method: testInvalidJSON issue: https://github.com/elastic/elasticsearch/issues/116521 diff --git a/x-pack/plugin/kql/build.gradle b/x-pack/plugin/kql/build.gradle index 054011a458fe0..79f2c91114bd9 100644 --- a/x-pack/plugin/kql/build.gradle +++ b/x-pack/plugin/kql/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - import static org.elasticsearch.gradle.util.PlatformUtils.normalize apply plugin: 'elasticsearch.internal-es-plugin' @@ -28,12 +26,6 @@ dependencies { tasks.named('yamlRestTest').configure { usesDefaultDistribution() - - /**************************************************************** - * Enable QA/rest integration tests for snapshot builds only * - * TODO: Enable for all builds upon this feature release * - ****************************************************************/ - enabled = buildParams.isSnapshotBuild() } /********************************** diff --git a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilderTests.java b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilderTests.java index 2bc23c7d457dd..7323f7d6d1a4e 100644 --- a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilderTests.java +++ b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilderTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.kql.query; import org.apache.lucene.search.Query; +import org.elasticsearch.Build; import org.elasticsearch.core.Strings; import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -21,6 +22,7 @@ import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.xpack.kql.KqlPlugin; import org.hamcrest.Matchers; +import org.junit.BeforeClass; import java.io.IOException; import java.util.Collection; @@ -34,6 +36,10 @@ import static org.hamcrest.Matchers.nullValue; public class KqlQueryBuilderTests extends AbstractQueryTestCase { + @BeforeClass + protected static void ensureSnapshotBuild() { + assumeTrue("requires snapshot builds", Build.current().isSnapshot()); + } @Override protected Collection> getPlugins() { From 21c4431d450e67096d8c8dc3f44f36e49b8ab0d9 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Wed, 20 Nov 2024 17:23:02 +0200 Subject: [PATCH 098/386] Unmute org.elasticsearch.upgrades.IndexingIT (#117155) --- muted-tests.yml | 36 +----------------------------------- 1 file changed, 1 insertion(+), 35 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index a7d8995253dd8..2216873a3a265 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -246,41 +246,7 @@ tests: - class: org.elasticsearch.xpack.apmdata.APMYamlTestSuiteIT method: test {yaml=/10_apm/Test template reinstallation} issue: https://github.com/elastic/elasticsearch/issues/116445 -- class: org.elasticsearch.upgrades.DownsampleIT - method: testRollupIndex {upgradedNodes=3} - issue: https://github.com/elastic/elasticsearch/issues/117122 -- class: org.elasticsearch.upgrades.DownsampleIT - method: testRollupIndex {upgradedNodes=1} - issue: https://github.com/elastic/elasticsearch/issues/117123 -- class: org.elasticsearch.upgrades.DownsampleIT - method: testRollupIndex {upgradedNodes=2} - issue: https://github.com/elastic/elasticsearch/issues/117124 -- class: org.elasticsearch.upgrades.IndexingIT - method: testAutoIdWithOpTypeCreate {upgradedNodes=1} - issue: https://github.com/elastic/elasticsearch/issues/117125 -- class: org.elasticsearch.upgrades.IndexingIT - method: testTsdb {upgradedNodes=1} - issue: https://github.com/elastic/elasticsearch/issues/117126 -- class: org.elasticsearch.upgrades.IndexingIT - method: testIndexing {upgradedNodes=1} - issue: https://github.com/elastic/elasticsearch/issues/117127 -- class: org.elasticsearch.upgrades.IndexingIT - method: testSyntheticSource {upgradedNodes=1} - issue: https://github.com/elastic/elasticsearch/issues/117128 -- class: org.elasticsearch.upgrades.IndexingIT - method: testIndexing {upgradedNodes=3} - issue: https://github.com/elastic/elasticsearch/issues/117135 -- class: org.elasticsearch.upgrades.IndexingIT - method: testTsdb {upgradedNodes=3} - issue: https://github.com/elastic/elasticsearch/issues/117136 -- class: org.elasticsearch.upgrades.IndexingIT - method: testIndexing {upgradedNodes=2} - issue: https://github.com/elastic/elasticsearch/issues/117137 -- class: org.elasticsearch.upgrades.IndexingIT - method: testTsdb {upgradedNodes=2} - issue: https://github.com/elastic/elasticsearch/issues/117138 -- class: org.elasticsearch.upgrades.IndexingIT - issue: https://github.com/elastic/elasticsearch/issues/117140 + # Examples: # From e5209f9b22b2dd4ea9504e26a3d2173141b494a1 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Wed, 20 Nov 2024 16:50:07 +0100 Subject: [PATCH 099/386] [Inference API] Expand RateLimiter docs (#117156) --- .../xpack/inference/common/RateLimiter.java | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java index bbc5082d45004..b74e473155aec 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java @@ -28,6 +28,14 @@ * * By setting the accumulated tokens limit to a value greater than zero, it effectively allows bursts of traffic. If the accumulated * tokens limit is set to zero, it will force the acquiring thread to wait on each call. + * + * Example: + * Time unit: Second + * Tokens to produce per time unit: 10 + * Limit for tokens in bucket: 100 + * + * Tokens in bucket after n seconds (n second -> tokens in bucket): + * 1 sec -> 10 tokens, 2 sec -> 20 tokens, ... , 10 sec -> 100 tokens (bucket full), ... 200 sec -> 100 tokens (no increase in tokens) */ public class RateLimiter { @@ -76,6 +84,7 @@ public final synchronized void setRate(double newAccumulatedTokensLimit, double throw new IllegalArgumentException(Strings.format("Tokens per time unit must be less than or equal to %s", Double.MAX_VALUE)); } + // If the new token limit is smaller than what we've accumulated already we need to drop tokens to meet the new token limit accumulatedTokens = Math.min(accumulatedTokens, newAccumulatedTokensLimit); accumulatedTokensLimit = newAccumulatedTokensLimit; @@ -88,7 +97,8 @@ public final synchronized void setRate(double newAccumulatedTokensLimit, double } /** - * Causes the thread to wait until the tokens are available + * Causes the thread to wait until the tokens are available. + * This reserves token in advance leading to a reduction of accumulated tokens. * @param tokens the number of items of work that should be throttled, typically you'd pass a value of 1 here * @throws InterruptedException _ */ @@ -130,6 +140,7 @@ private static void validateTokenRequest(int tokens) { /** * Returns the amount of time to wait for the tokens to become available. + * This reserves tokens in advance leading to a reduction of accumulated tokens. * @param tokens the number of items of work that should be throttled, typically you'd pass a value of 1 here. Must be greater than 0. * @return the amount of time to wait */ From c2c0901bbac39334c61e9677aa3ee4fb921ef549 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Wed, 20 Nov 2024 15:57:14 +0000 Subject: [PATCH 100/386] Remove desired node historical features (#116951) --- .../upgrades/DesiredNodesUpgradeIT.java | 96 +------------------ .../TransportUpdateDesiredNodesAction.java | 17 ---- .../UpdateDesiredNodesRequest.java | 7 -- .../cluster/metadata/DesiredNode.java | 6 -- .../cluster/metadata/MetadataFeatures.java | 12 --- .../cluster/metadata/DesiredNodeTests.java | 32 ------- ...toricalFeaturesMetadataExtractorTests.java | 3 +- 7 files changed, 5 insertions(+), 168 deletions(-) diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java index eb01d67432fe3..d9adec47ff483 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java @@ -14,7 +14,6 @@ import org.elasticsearch.Build; import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesRequest; import org.elasticsearch.client.Request; -import org.elasticsearch.client.ResponseException; import org.elasticsearch.cluster.metadata.DesiredNode; import org.elasticsearch.cluster.metadata.DesiredNodeWithStatus; import org.elasticsearch.common.Strings; @@ -42,22 +41,7 @@ public DesiredNodesUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { desiredNodesVersion = upgradedNodes + 1; } - private enum ProcessorsPrecision { - DOUBLE, - FLOAT - } - public void testUpgradeDesiredNodes() throws Exception { - if (oldClusterHasFeature(DesiredNode.DOUBLE_PROCESSORS_SUPPORTED)) { - assertUpgradedNodesCanReadDesiredNodes(); - } else if (oldClusterHasFeature(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED)) { - assertDesiredNodesUpdatedWithRoundedUpFloatsAreIdempotent(); - } else { - assertDesiredNodesWithFloatProcessorsAreRejectedInOlderVersions(); - } - } - - private void assertUpgradedNodesCanReadDesiredNodes() throws Exception { if (isMixedCluster() || isUpgradedCluster()) { final Map desiredNodes = getLatestDesiredNodes(); final String historyId = extractValue(desiredNodes, "history_id"); @@ -66,60 +50,10 @@ private void assertUpgradedNodesCanReadDesiredNodes() throws Exception { assertThat(version, is(equalTo(desiredNodesVersion - 1))); } - addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(desiredNodesVersion, ProcessorsPrecision.DOUBLE); + addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(desiredNodesVersion); assertAllDesiredNodesAreActualized(); } - private void assertDesiredNodesUpdatedWithRoundedUpFloatsAreIdempotent() throws Exception { - // We define the same set of desired nodes to ensure that they are equal across all - // the test runs, otherwise we cannot guarantee an idempotent update in this test - final var desiredNodes = getNodeNames().stream() - .map( - nodeName -> new DesiredNode( - Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), - 1238.49922909, - ByteSizeValue.ofGb(32), - ByteSizeValue.ofGb(128), - clusterHasFeature(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED) ? null : Build.current().version() - ) - ) - .toList(); - - if (isMixedCluster()) { - updateDesiredNodes(desiredNodes, desiredNodesVersion - 1); - } - for (int i = 0; i < 2; i++) { - updateDesiredNodes(desiredNodes, desiredNodesVersion); - } - - final Map latestDesiredNodes = getLatestDesiredNodes(); - final int latestDesiredNodesVersion = extractValue(latestDesiredNodes, "version"); - assertThat(latestDesiredNodesVersion, is(equalTo(desiredNodesVersion))); - - if (isUpgradedCluster()) { - assertAllDesiredNodesAreActualized(); - } - } - - private void assertDesiredNodesWithFloatProcessorsAreRejectedInOlderVersions() throws Exception { - if (isOldCluster()) { - addClusterNodesToDesiredNodesWithIntegerProcessors(1); - } else if (isMixedCluster()) { - // Processor ranges or float processors are forbidden during upgrades: 8.2 -> 8.3 clusters - final var responseException = expectThrows( - ResponseException.class, - () -> addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(desiredNodesVersion, ProcessorsPrecision.FLOAT) - ); - final var statusCode = responseException.getResponse().getStatusLine().getStatusCode(); - assertThat(statusCode, is(equalTo(400))); - } else { - assertAllDesiredNodesAreActualized(); - addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(4, ProcessorsPrecision.FLOAT); - } - - getLatestDesiredNodes(); - } - private Map getLatestDesiredNodes() throws IOException { final var getDesiredNodesRequest = new Request("GET", "/_internal/desired_nodes/_latest"); final var response = client().performRequest(getDesiredNodesRequest); @@ -140,15 +74,14 @@ private void assertAllDesiredNodesAreActualized() throws Exception { } } - private void addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(int version, ProcessorsPrecision processorsPrecision) - throws Exception { + private void addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(int version) throws Exception { final List nodes; if (randomBoolean()) { nodes = getNodeNames().stream() .map( nodeName -> new DesiredNode( Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), - processorsPrecision == ProcessorsPrecision.DOUBLE ? randomDoubleProcessorCount() : 0.5f, + randomDoubleProcessorCount(), ByteSizeValue.ofGb(randomIntBetween(10, 24)), ByteSizeValue.ofGb(randomIntBetween(128, 256)), clusterHasFeature(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED) ? null : Build.current().version() @@ -157,9 +90,7 @@ private void addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(int ve .toList(); } else { nodes = getNodeNames().stream().map(nodeName -> { - double minProcessors = processorsPrecision == ProcessorsPrecision.DOUBLE - ? randomDoubleProcessorCount() - : randomFloatProcessorCount(); + double minProcessors = randomDoubleProcessorCount(); return new DesiredNode( Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), new DesiredNode.ProcessorsRange(minProcessors, minProcessors + randomIntBetween(10, 20)), @@ -172,21 +103,6 @@ private void addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(int ve updateDesiredNodes(nodes, version); } - private void addClusterNodesToDesiredNodesWithIntegerProcessors(int version) throws Exception { - final var nodes = getNodeNames().stream() - .map( - nodeName -> new DesiredNode( - Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), - randomIntBetween(1, 24), - ByteSizeValue.ofGb(randomIntBetween(10, 24)), - ByteSizeValue.ofGb(randomIntBetween(128, 256)), - clusterHasFeature(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED) ? null : Build.current().version() - ) - ) - .toList(); - updateDesiredNodes(nodes, version); - } - private void updateDesiredNodes(List nodes, int version) throws IOException { final var request = new Request("PUT", "/_internal/desired_nodes/upgrade_test/" + version); try (var builder = JsonXContent.contentBuilder()) { @@ -223,10 +139,6 @@ private double randomDoubleProcessorCount() { return randomDoubleBetween(0.5, 512.1234, true); } - private float randomFloatProcessorCount() { - return randomIntBetween(1, 512) + randomFloat(); - } - @SuppressWarnings("unchecked") private static T extractValue(Map map, String path) { return (T) XContentMapValues.extractValue(path, map); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java index bed2815f5a895..a0948af88e2f5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java @@ -20,7 +20,6 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.desirednodes.VersionConflictException; -import org.elasticsearch.cluster.metadata.DesiredNode; import org.elasticsearch.cluster.metadata.DesiredNodes; import org.elasticsearch.cluster.metadata.DesiredNodesMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -99,22 +98,6 @@ protected void masterOperation( ); } - @Override - protected void doExecute(Task task, UpdateDesiredNodesRequest request, ActionListener listener) { - if (request.clusterHasRequiredFeatures(nf -> featureService.clusterHasFeature(clusterService.state(), nf)) == false) { - listener.onFailure( - new IllegalArgumentException( - "Unable to use processor ranges, floating-point (with greater precision) processors " - + "in mixed-clusters with nodes that do not support feature " - + DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED.id() - ) - ); - return; - } - - super.doExecute(task, request, listener); - } - static ClusterState replaceDesiredNodes(ClusterState clusterState, DesiredNodes newDesiredNodes) { return clusterState.copyAndUpdateMetadata( metadata -> metadata.putCustom(DesiredNodesMetadata.TYPE, new DesiredNodesMetadata(newDesiredNodes)) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java index a94401fdd66f3..21b714b105b59 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -26,7 +25,6 @@ import java.io.IOException; import java.util.List; import java.util.Objects; -import java.util.function.Predicate; public class UpdateDesiredNodesRequest extends AcknowledgedRequest { private static final TransportVersion DRY_RUN_VERSION = TransportVersions.V_8_4_0; @@ -117,11 +115,6 @@ public boolean isDryRun() { return dryRun; } - public boolean clusterHasRequiredFeatures(Predicate clusterHasFeature) { - return clusterHasFeature.test(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED) - || nodes.stream().allMatch(n -> n.clusterHasRequiredFeatures(clusterHasFeature)); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java index fb8559b19d81d..de3343c1944c1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java @@ -37,7 +37,6 @@ import java.util.Objects; import java.util.Set; import java.util.TreeSet; -import java.util.function.Predicate; import java.util.regex.Pattern; import static java.lang.String.format; @@ -48,7 +47,6 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparable { public static final NodeFeature RANGE_FLOAT_PROCESSORS_SUPPORTED = new NodeFeature("desired_node.range_float_processors"); - public static final NodeFeature DOUBLE_PROCESSORS_SUPPORTED = new NodeFeature("desired_node.double_processors"); public static final NodeFeature DESIRED_NODE_VERSION_DEPRECATED = new NodeFeature("desired_node.version_deprecated"); public static final TransportVersion RANGE_FLOAT_PROCESSORS_SUPPORT_TRANSPORT_VERSION = TransportVersions.V_8_3_0; @@ -348,10 +346,6 @@ public Set getRoles() { return roles; } - public boolean clusterHasRequiredFeatures(Predicate clusterHasFeature) { - return (processorsRange == null && processors.hasDecimals() == false) || clusterHasFeature.test(RANGE_FLOAT_PROCESSORS_SUPPORTED); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataFeatures.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataFeatures.java index 89f776a7ada0f..49bd38330e3af 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataFeatures.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataFeatures.java @@ -9,24 +9,12 @@ package org.elasticsearch.cluster.metadata; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import java.util.Map; import java.util.Set; public class MetadataFeatures implements FeatureSpecification { - @Override - public Map getHistoricalFeatures() { - return Map.of( - DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED, - Version.V_8_3_0, - DesiredNode.DOUBLE_PROCESSORS_SUPPORTED, - Version.V_8_5_0 - ); - } - @Override public Set getFeatures() { return Set.of(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodeTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodeTests.java index 41651d52ceb9f..0e4b8271ceac7 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodeTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodeTests.java @@ -185,38 +185,6 @@ public void testNodeCPUsRoundUp() { } } - public void testDesiredNodeHasRangeFloatProcessors() { - final var settings = Settings.builder().put(NODE_NAME_SETTING.getKey(), randomAlphaOfLength(10)).build(); - - { - final var desiredNode = new DesiredNode( - settings, - new DesiredNode.ProcessorsRange(0.4, 1.2), - ByteSizeValue.ofGb(1), - ByteSizeValue.ofGb(1) - ); - assertThat(desiredNode.clusterHasRequiredFeatures(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED::equals), is(true)); - assertThat(desiredNode.clusterHasRequiredFeatures(nf -> false), is(false)); - } - - { - final var desiredNode = new DesiredNode( - settings, - randomIntBetween(0, 10) + randomDoubleBetween(0.00001, 0.99999, true), - ByteSizeValue.ofGb(1), - ByteSizeValue.ofGb(1) - ); - assertThat(desiredNode.clusterHasRequiredFeatures(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED::equals), is(true)); - assertThat(desiredNode.clusterHasRequiredFeatures(nf -> false), is(false)); - } - - { - final var desiredNode = new DesiredNode(settings, 2.0f, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1)); - assertThat(desiredNode.clusterHasRequiredFeatures(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED::equals), is(true)); - assertThat(desiredNode.clusterHasRequiredFeatures(nf -> false), is(true)); - } - } - public void testEqualsOrProcessorsCloseTo() { final Settings settings = Settings.builder().put(NODE_NAME_SETTING.getKey(), randomAlphaOfLength(10)).build(); final double maxDelta = 1E-3; diff --git a/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java b/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java index e230982073699..d810f17ae552e 100644 --- a/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java +++ b/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java @@ -27,7 +27,6 @@ import java.util.Set; import static org.elasticsearch.xcontent.XContentParserConfiguration.EMPTY; -import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasEntry; @@ -48,7 +47,7 @@ public void testExtractHistoricalMetadata() throws IOException { nodeFeatureVersionMap.putAll(historical); featureNamesSet.addAll(names); }); - assertThat(nodeFeatureVersionMap, not(anEmptyMap())); + // assertThat(nodeFeatureVersionMap, not(anEmptyMap())); assertThat(featureNamesSet, not(empty())); assertThat(featureNamesSet, hasItem("test_features_enabled")); From 9477bd691aeee7518a7ec7557c391e3bca5c5cad Mon Sep 17 00:00:00 2001 From: Stanislav Malyshev Date: Wed, 20 Nov 2024 09:15:12 -0700 Subject: [PATCH 101/386] Fix long metric deserialize & add - auto-resize needs to be set manually (#117105) * Fix long metric deserialize & add - auto-resize needs to be set manually --- docs/changelog/117105.yaml | 6 +++++ .../admin/cluster/stats/LongMetric.java | 1 + .../stats/CCSTelemetrySnapshotTests.java | 24 ++++++++++++++++++- 3 files changed, 30 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/117105.yaml diff --git a/docs/changelog/117105.yaml b/docs/changelog/117105.yaml new file mode 100644 index 0000000000000..de56c4d521a62 --- /dev/null +++ b/docs/changelog/117105.yaml @@ -0,0 +1,6 @@ +pr: 117105 +summary: Fix long metric deserialize & add - auto-resize needs to be set manually +area: CCS +type: bug +issues: + - 116914 diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/LongMetric.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/LongMetric.java index 737e83d4b30a1..07d9c11ae4c07 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/LongMetric.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/LongMetric.java @@ -74,6 +74,7 @@ public static LongMetricValue fromStream(StreamInput in) throws IOException { try { // TODO: not sure what is the good value for minBarForHighestToLowestValueRatio here? Histogram dh = Histogram.decodeFromCompressedByteBuffer(bb, 1); + dh.setAutoResize(true); return new LongMetricValue(dh); } catch (DataFormatException e) { throw new IOException(e); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/CCSTelemetrySnapshotTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/CCSTelemetrySnapshotTests.java index e9188d9cb8f0d..a72630c327ea2 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/CCSTelemetrySnapshotTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/CCSTelemetrySnapshotTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.admin.cluster.stats.CCSTelemetrySnapshot.PerClusterCCSTelemetry; import org.elasticsearch.action.admin.cluster.stats.LongMetric.LongMetricValue; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Tuple; @@ -32,9 +33,13 @@ public class CCSTelemetrySnapshotTests extends AbstractWireSerializingTestCase { private LongMetricValue randomLongMetricValue() { + return randomLongMetricValueBetween(0, 1_000_000); + } + + private LongMetricValue randomLongMetricValueBetween(int low, int high) { LongMetric v = new LongMetric(); for (int i = 0; i < randomIntBetween(5, 10); i++) { - v.record(randomIntBetween(0, 1_000_000)); + v.record(randomIntBetween(low, high)); } return v.getValue(); } @@ -330,4 +335,21 @@ private String readJSONFromResource(String fileName) throws IOException { return new String(inputStream.readAllBytes(), StandardCharsets.UTF_8); } } + + public void testRanges() throws IOException { + var value1 = randomLongMetricValueBetween(1_000_000, 10_000_000); + var count1 = value1.count(); + var max1 = value1.max(); + var output = new BytesStreamOutput(); + value1.writeTo(output); + var value1Read = LongMetricValue.fromStream(output.bytes().streamInput()); + var value2 = randomLongMetricValueBetween(0, 100); + var count2 = value2.count(); + output = new BytesStreamOutput(); + value2.writeTo(output); + var value2Read = LongMetricValue.fromStream(output.bytes().streamInput()); + value2Read.add(value1Read); + assertThat(value2Read.count(), equalTo(count1 + count2)); + assertThat(value2Read.max(), equalTo(max1)); + } } From ccdc5627788123860fbfe9b812e0f6db0c27e0da Mon Sep 17 00:00:00 2001 From: Sam Xiao Date: Wed, 20 Nov 2024 11:30:52 -0500 Subject: [PATCH 102/386] Azure Fixture: Add additional batch delete path (#116985) Co-authored-by: Elastic Machine --- .../src/main/java/fixture/azure/AzureHttpHandler.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java b/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java index 92ce04b6bea5b..bbcfe1f75dc06 100644 --- a/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java +++ b/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java @@ -355,6 +355,14 @@ public void handle(final HttpExchange exchange) throws IOException { throw new IllegalStateException("Got multiple deletes in a single request?"); } toDelete = blobName; + } else if (Regex.simpleMatch("DELETE /" + account + "/" + container + "/*", line)) { + // possible alternative DELETE url, depending on which method is used in the batch client + String path = RestUtils.decodeComponent(line.split("(\\s|\\?)")[1]); + String blobName = path.split(account)[1]; + if (toDelete != null) { + throw new IllegalStateException("Got multiple deletes in a single request?"); + } + toDelete = blobName; } } response.append("--").append(responseBoundary).append("--\r\n0\r\n"); From 312f8315d35a640ed43673de75dad61b65481391 Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Wed, 20 Nov 2024 17:43:01 +0100 Subject: [PATCH 103/386] Longer RCS suite timeout due to slow keystore (#117157) Rather than muting the suite and losing signal, bump the suite timeout to account for very slow keystore operations. We should follow this up with performance improvements around keystore setup in tests. Closes: https://github.com/elastic/elasticsearch/issues/116883 --- .../RemoteClusterSecurityReloadCredentialsRestIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityReloadCredentialsRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityReloadCredentialsRestIT.java index 42982e6183613..fb941e9e815cf 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityReloadCredentialsRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityReloadCredentialsRestIT.java @@ -42,7 +42,7 @@ import static org.hamcrest.Matchers.nullValue; // account for slow stored secure settings updates (involves removing and re-creating the keystore) -@TimeoutSuite(millis = 10 * TimeUnits.MINUTE) +@TimeoutSuite(millis = 20 * TimeUnits.MINUTE) public class RemoteClusterSecurityReloadCredentialsRestIT extends AbstractRemoteClusterSecurityTestCase { private static final MutableSettingsProvider keystoreSettings = new MutableSettingsProvider(); From fe7818af04b564d222faa1c8ff166d01e59a9671 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 20 Nov 2024 10:27:44 -0800 Subject: [PATCH 104/386] Deprecate _source.mode in mappings (#117172) Re-introduce #116689 --- .../compat/RestCompatTestTransformTask.java | 2 +- docs/changelog/116689.yaml | 10 ++ .../LogsIndexModeFullClusterRestartIT.java | 16 +--- .../LogsIndexModeRollingUpgradeIT.java | 20 +--- rest-api-spec/build.gradle | 6 ++ .../rest-api-spec/test/logsdb/10_settings.yml | 5 - .../test/logsdb/20_source_mapping.yml | 15 ++- .../rest-api-spec/test/tsdb/20_mapping.yml | 11 --- .../index/IndexSettingProvider.java | 3 +- .../elasticsearch/index/IndexVersions.java | 1 + .../index/mapper/SourceFieldMapper.java | 92 ++++++++++++------- .../elasticsearch/node/NodeConstruction.java | 2 +- .../mapper/DynamicFieldsBuilderTests.java | 2 +- .../index/mapper/SourceFieldMapperTests.java | 4 +- .../query/SearchExecutionContextTests.java | 2 +- .../test/rest/ESRestTestCase.java | 35 ++++++- .../test/rest/yaml/section/DoSection.java | 3 + .../xpack/ccr/FollowIndexIT.java | 8 +- .../esql/qa/rest/FieldExtractorTestCase.java | 7 +- .../xpack/logsdb/LogsDBPlugin.java | 9 +- .../SyntheticSourceIndexSettingsProvider.java | 8 +- ...heticSourceIndexSettingsProviderTests.java | 6 +- .../test/40_source_mode_setting.yml | 29 +----- 23 files changed, 162 insertions(+), 134 deletions(-) create mode 100644 docs/changelog/116689.yaml diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java index ef93dafa913cd..ba242a8e23861 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java @@ -137,7 +137,7 @@ public void skipTest(String fullTestName, String reason) { // However, the folder can be arbitrarily nest so, a == a1/a2/a3, and the test name can include forward slashes, so c == c1/c2/c3 // So we also need to support a1/a2/a3/b/c1/c2/c3 - String[] testParts = fullTestName.split("/"); + String[] testParts = fullTestName.split("/", 3); if (testParts.length < 3) { throw new IllegalArgumentException( "To skip tests, all 3 parts [folder/file/test name] must be defined. found [" + fullTestName + "]" diff --git a/docs/changelog/116689.yaml b/docs/changelog/116689.yaml new file mode 100644 index 0000000000000..0b1d1646868aa --- /dev/null +++ b/docs/changelog/116689.yaml @@ -0,0 +1,10 @@ +pr: 116689 +summary: Deprecate `_source.mode` in mappings +area: Mapping +type: deprecation +issues: [] +deprecation: + title: Deprecate `_source.mode` in mappings + area: Mapping + details: Configuring `_source.mode` in mappings is deprecated and will be removed in future versions. Use `index.mapping.source.mode` index setting instead. + impact: Use `index.mapping.source.mode` index setting instead diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java index 3459a29e98649..9866d94dccc3c 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; -import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.hamcrest.Matcher; @@ -30,9 +29,6 @@ import java.util.Map; import java.util.function.Supplier; -import static org.elasticsearch.test.MapMatcher.assertMap; -import static org.elasticsearch.test.MapMatcher.matchesMap; - public class LogsIndexModeFullClusterRestartIT extends ParameterizedFullClusterRestartTestCase { @ClassRule @@ -169,22 +165,16 @@ public void testLogsIndexing() throws IOException { assertOK(bulkIndexResponse); assertThat(entityAsMap(bulkIndexResponse).get("errors"), Matchers.is(false)); - assertIndexMappingsAndSettings(0, Matchers.nullValue(), matchesMap().extraOk()); - assertIndexMappingsAndSettings( - 1, - Matchers.equalTo("logsdb"), - matchesMap().extraOk().entry("_source", Map.of("mode", "synthetic")) - ); + assertIndexSettings(0, Matchers.nullValue()); + assertIndexSettings(1, Matchers.equalTo("logsdb")); } } - private void assertIndexMappingsAndSettings(int backingIndex, final Matcher indexModeMatcher, final MapMatcher mappingsMatcher) - throws IOException { + private void assertIndexSettings(int backingIndex, final Matcher indexModeMatcher) throws IOException { assertThat( getSettings(client(), getWriteBackingIndex(client(), "logs-apache-production", backingIndex)).get("index.mode"), indexModeMatcher ); - assertMap(getIndexMappingAsMap(getWriteBackingIndex(client(), "logs-apache-production", backingIndex)), mappingsMatcher); } private static Request createDataStream(final String dataStreamName) { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java index 8c369ebc9950d..1eb7cbd3f70c2 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; -import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.hamcrest.Matcher; @@ -30,9 +29,6 @@ import java.util.Map; import java.util.function.Supplier; -import static org.elasticsearch.test.MapMatcher.assertMap; -import static org.elasticsearch.test.MapMatcher.matchesMap; - public class LogsIndexModeRollingUpgradeIT extends AbstractRollingUpgradeTestCase { @ClassRule() @@ -160,14 +156,10 @@ public void testLogsIndexing() throws IOException { assertOK(bulkIndexResponse); assertThat(entityAsMap(bulkIndexResponse).get("errors"), Matchers.is(false)); - assertIndexMappingsAndSettings(0, Matchers.nullValue(), matchesMap().extraOk()); - assertIndexMappingsAndSettings(1, Matchers.nullValue(), matchesMap().extraOk()); - assertIndexMappingsAndSettings(2, Matchers.nullValue(), matchesMap().extraOk()); - assertIndexMappingsAndSettings( - 3, - Matchers.equalTo("logsdb"), - matchesMap().extraOk().entry("_source", Map.of("mode", "synthetic")) - ); + assertIndexSettings(0, Matchers.nullValue()); + assertIndexSettings(1, Matchers.nullValue()); + assertIndexSettings(2, Matchers.nullValue()); + assertIndexSettings(3, Matchers.equalTo("logsdb")); } } @@ -183,13 +175,11 @@ static void enableLogsdbByDefault() throws IOException { assertOK(client().performRequest(request)); } - private void assertIndexMappingsAndSettings(int backingIndex, final Matcher indexModeMatcher, final MapMatcher mappingsMatcher) - throws IOException { + private void assertIndexSettings(int backingIndex, final Matcher indexModeMatcher) throws IOException { assertThat( getSettings(client(), getWriteBackingIndex(client(), "logs-apache-production", backingIndex)).get("index.mode"), indexModeMatcher ); - assertMap(getIndexMappingAsMap(getWriteBackingIndex(client(), "logs-apache-production", backingIndex)), mappingsMatcher); } private static Request createDataStream(final String dataStreamName) { diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 439960228cef6..650d17e41de7f 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -60,4 +60,10 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.skipTest("cat.aliases/10_basic/Deprecated local parameter", "CAT APIs not covered by compatibility policy") task.skipTest("cat.shards/10_basic/Help", "sync_id is removed in 9.0") task.skipTest("search/500_date_range/from, to, include_lower, include_upper deprecated", "deprecated parameters are removed in 9.0") + task.skipTest("tsdb/20_mapping/stored source is supported", "no longer serialize source_mode") + task.skipTest("tsdb/20_mapping/Synthetic source", "no longer serialize source_mode") + task.skipTest("logsdb/10_settings/create logs index", "no longer serialize source_mode") + task.skipTest("logsdb/20_source_mapping/stored _source mode is supported", "no longer serialize source_mode") + task.skipTest("logsdb/20_source_mapping/include/exclude is supported with stored _source", "no longer serialize source_mode") + task.skipTest("logsdb/20_source_mapping/synthetic _source is default", "no longer serialize source_mode") }) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml index d0f89b1b8b6cb..463df7d2ab1bb 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml @@ -76,11 +76,6 @@ create logs index: - is_true: test - match: { test.settings.index.mode: "logsdb" } - - do: - indices.get_mapping: - index: test - - match: { test.mappings._source.mode: synthetic } - --- using default timestamp field mapping: - requires: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml index 27146557bb1be..06a007b8aaca5 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml @@ -13,10 +13,10 @@ synthetic _source is default: index: mode: logsdb - do: - indices.get: + indices.get_settings: index: test-default-source - - - match: { test-default-source.mappings._source.mode: "synthetic" } + - match: { test-default-source.settings.index.mode: logsdb } + - match: { test-default-source.settings.index.mapping.source.mode: null } --- stored _source mode is supported: @@ -28,11 +28,12 @@ stored _source mode is supported: index: mode: logsdb mapping.source.mode: stored + - do: - indices.get: + indices.get_settings: index: test-stored-source - - - match: { test-stored-source.mappings._source.mode: "stored" } + - match: { test-stored-source.settings.index.mode: logsdb } + - match: { test-stored-source.settings.index.mapping.source.mode: stored } --- disabled _source is not supported: @@ -110,7 +111,6 @@ include/exclude is supported with stored _source: indices.get: index: test-includes - - match: { test-includes.mappings._source.mode: "stored" } - match: { test-includes.mappings._source.includes: ["a"] } - do: @@ -129,5 +129,4 @@ include/exclude is supported with stored _source: indices.get: index: test-excludes - - match: { test-excludes.mappings._source.mode: "stored" } - match: { test-excludes.mappings._source.excludes: ["b"] } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml index 4d8f03a6e5e18..9fe3f5e0b7272 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml @@ -450,11 +450,6 @@ nested fields: type: long time_series_metric: gauge - - do: - indices.get_mapping: {} - - - match: {tsdb-synthetic.mappings._source.mode: synthetic} - --- stored source is supported: - requires: @@ -486,12 +481,6 @@ stored source is supported: type: keyword time_series_dimension: true - - do: - indices.get: - index: tsdb_index - - - match: { tsdb_index.mappings._source.mode: "stored" } - --- disabled source is not supported: - requires: diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java index 6a553d5dc5440..8c997a9766baa 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java @@ -11,6 +11,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedFunction; @@ -54,7 +55,7 @@ Settings getAdditionalIndexSettings( /** * Infrastructure class that holds services that can be used by {@link IndexSettingProvider} instances. */ - record Parameters(CheckedFunction mapperServiceFactory) { + record Parameters(ClusterService clusterService, CheckedFunction mapperServiceFactory) { } diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 5746bea12a2d8..7a5f469a57fa1 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -134,6 +134,7 @@ private static Version parseUnchecked(String version) { public static final IndexVersion UPGRADE_TO_LUCENE_10_0_0 = def(9_000_00_0, Version.LUCENE_10_0_0); public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT = def(9_001_00_0, Version.LUCENE_10_0_0); public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID = def(9_002_00_0, Version.LUCENE_10_0_0); + public static final IndexVersion DEPRECATE_SOURCE_MODE_MAPPER = def(9_003_00_0, Version.LUCENE_10_0_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index dd25cd6eb80a3..e5b12f748543f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; @@ -38,6 +39,7 @@ import java.util.Collections; import java.util.List; import java.util.Locale; +import java.util.Map; public class SourceFieldMapper extends MetadataFieldMapper { public static final NodeFeature SYNTHETIC_SOURCE_FALLBACK = new NodeFeature("mapper.source.synthetic_source_fallback"); @@ -68,6 +70,9 @@ public class SourceFieldMapper extends MetadataFieldMapper { return indexMode.defaultSourceMode().name(); }, "index.mapping.source.mode", value -> {}, Setting.Property.Final, Setting.Property.IndexScope); + public static final String DEPRECATION_WARNING = "Configuring source mode in mappings is deprecated and will be removed " + + "in future versions. Use [index.mapping.source.mode] index setting instead."; + /** The source mode */ public enum Mode { DISABLED, @@ -79,28 +84,32 @@ public enum Mode { null, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY + Strings.EMPTY_ARRAY, + false ); private static final SourceFieldMapper STORED = new SourceFieldMapper( Mode.STORED, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY + Strings.EMPTY_ARRAY, + false ); private static final SourceFieldMapper SYNTHETIC = new SourceFieldMapper( Mode.SYNTHETIC, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY + Strings.EMPTY_ARRAY, + false ); private static final SourceFieldMapper DISABLED = new SourceFieldMapper( Mode.DISABLED, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY + Strings.EMPTY_ARRAY, + false ); public static class Defaults { @@ -134,16 +143,7 @@ public static class Builder extends MetadataFieldMapper.Builder { * The default mode for TimeSeries is left empty on purpose, so that mapping printings include the synthetic * source mode. */ - private final Parameter mode = new Parameter<>( - "mode", - true, - () -> null, - (n, c, o) -> Mode.valueOf(o.toString().toUpperCase(Locale.ROOT)), - m -> toType(m).enabled.explicit() ? null : toType(m).mode, - (b, n, v) -> b.field(n, v.toString().toLowerCase(Locale.ROOT)), - v -> v.toString().toLowerCase(Locale.ROOT) - ).setMergeValidator((previous, current, conflicts) -> (previous == current) || current != Mode.STORED) - .setSerializerCheck((includeDefaults, isConfigured, value) -> value != null); // don't emit if `enabled` is configured + private final Parameter mode; private final Parameter> includes = Parameter.stringArrayParam( "includes", false, @@ -158,15 +158,28 @@ public static class Builder extends MetadataFieldMapper.Builder { private final Settings settings; private final IndexMode indexMode; + private boolean serializeMode; private final boolean supportsNonDefaultParameterValues; - public Builder(IndexMode indexMode, final Settings settings, boolean supportsCheckForNonDefaultParams) { + public Builder(IndexMode indexMode, final Settings settings, boolean supportsCheckForNonDefaultParams, boolean serializeMode) { super(Defaults.NAME); this.settings = settings; this.indexMode = indexMode; this.supportsNonDefaultParameterValues = supportsCheckForNonDefaultParams == false || settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); + this.serializeMode = serializeMode; + this.mode = new Parameter<>( + "mode", + true, + () -> null, + (n, c, o) -> Mode.valueOf(o.toString().toUpperCase(Locale.ROOT)), + m -> toType(m).enabled.explicit() ? null : toType(m).mode, + (b, n, v) -> b.field(n, v.toString().toLowerCase(Locale.ROOT)), + v -> v.toString().toLowerCase(Locale.ROOT) + ).setMergeValidator((previous, current, conflicts) -> (previous == current) || current != Mode.STORED) + // don't emit if `enabled` is configured + .setSerializerCheck((includeDefaults, isConfigured, value) -> serializeMode && value != null); } public Builder setSynthetic() { @@ -219,21 +232,22 @@ public SourceFieldMapper build() { if (sourceMode == Mode.SYNTHETIC && (includes.getValue().isEmpty() == false || excludes.getValue().isEmpty() == false)) { throw new IllegalArgumentException("filtering the stored _source is incompatible with synthetic source"); } - - SourceFieldMapper sourceFieldMapper; - if (isDefault()) { + if (mode.isConfigured()) { + serializeMode = true; + } + final SourceFieldMapper sourceFieldMapper; + if (isDefault() && sourceMode == null) { // Needed for bwc so that "mode" is not serialized in case of a standard index with stored source. - if (sourceMode == null) { - sourceFieldMapper = DEFAULT; - } else { - sourceFieldMapper = resolveStaticInstance(sourceMode); - } + sourceFieldMapper = DEFAULT; + } else if (isDefault() && serializeMode == false && sourceMode != null) { + sourceFieldMapper = resolveStaticInstance(sourceMode); } else { sourceFieldMapper = new SourceFieldMapper( sourceMode, enabled.get(), includes.getValue().toArray(Strings.EMPTY_ARRAY), - excludes.getValue().toArray(Strings.EMPTY_ARRAY) + excludes.getValue().toArray(Strings.EMPTY_ARRAY), + serializeMode ); } if (indexMode != null) { @@ -283,15 +297,29 @@ private static SourceFieldMapper resolveStaticInstance(final Mode sourceMode) { if (indexMode == IndexMode.STANDARD && settingSourceMode == Mode.STORED) { return DEFAULT; } - - return resolveStaticInstance(settingSourceMode); + if (c.indexVersionCreated().onOrAfter(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER)) { + return resolveStaticInstance(settingSourceMode); + } else { + return new SourceFieldMapper(settingSourceMode, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, true); + } }, c -> new Builder( c.getIndexSettings().getMode(), c.getSettings(), - c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK) + c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), + c.indexVersionCreated().before(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER) ) - ); + ) { + @Override + public MetadataFieldMapper.Builder parse(String name, Map node, MappingParserContext parserContext) + throws MapperParsingException { + assert name.equals(SourceFieldMapper.NAME) : name; + if (parserContext.indexVersionCreated().after(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER) && node.containsKey("mode")) { + deprecationLogger.critical(DeprecationCategory.MAPPINGS, "mapping_source_mode", SourceFieldMapper.DEPRECATION_WARNING); + } + return super.parse(name, node, parserContext); + } + }; static final class SourceFieldType extends MappedFieldType { private final boolean enabled; @@ -330,8 +358,9 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { } } - // nullable for bwc reasons + // nullable for bwc reasons - TODO: fold this into serializeMode private final @Nullable Mode mode; + private final boolean serializeMode; private final Explicit enabled; /** indicates whether the source will always exist and be complete, for use by features like the update API */ @@ -341,7 +370,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { private final String[] excludes; private final SourceFilter sourceFilter; - private SourceFieldMapper(Mode mode, Explicit enabled, String[] includes, String[] excludes) { + private SourceFieldMapper(Mode mode, Explicit enabled, String[] includes, String[] excludes, boolean serializeMode) { super(new SourceFieldType((enabled.explicit() && enabled.value()) || (enabled.explicit() == false && mode != Mode.DISABLED))); this.mode = mode; this.enabled = enabled; @@ -349,6 +378,7 @@ private SourceFieldMapper(Mode mode, Explicit enabled, String[] include this.includes = includes; this.excludes = excludes; this.complete = stored() && sourceFilter == null; + this.serializeMode = serializeMode; } private static SourceFilter buildSourceFilter(String[] includes, String[] excludes) { @@ -419,7 +449,7 @@ protected String contentType() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(null, Settings.EMPTY, false).init(this); + return new Builder(null, Settings.EMPTY, false, serializeMode).init(this); } /** diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 842fa773ce330..caf65c05cf27d 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -820,7 +820,7 @@ private void construct( .searchOperationListeners(searchOperationListeners) .build(); - final var parameters = new IndexSettingProvider.Parameters(indicesService::createIndexMapperServiceForValidation); + final var parameters = new IndexSettingProvider.Parameters(clusterService, indicesService::createIndexMapperServiceForValidation); IndexSettingProviders indexSettingProviders = new IndexSettingProviders( Sets.union( builtinIndexSettingProviders(), diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java index 399740e6200e6..d4d0e67ff4141 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java @@ -69,7 +69,7 @@ public void testCreateDynamicStringFieldAsKeywordForDimension() throws IOExcepti XContentParser parser = createParser(JsonXContent.jsonXContent, source); SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON); - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Optional.empty()).add( new PassThroughObjectMapper.Builder("labels").setPriority(0).setContainsDimensions().dynamic(ObjectMapper.Dynamic.TRUE) ).build(MapperBuilderContext.root(false, false)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index df6d9380fd141..d7f33b9cdb3ba 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -247,14 +247,14 @@ public void testSyntheticSourceInTimeSeries() throws IOException { }); DocumentMapper mapper = createTimeSeriesModeDocumentMapper(mapping); assertTrue(mapper.sourceMapper().isSynthetic()); - assertEquals("{\"_source\":{\"mode\":\"synthetic\"}}", mapper.sourceMapper().toString()); + assertEquals("{\"_source\":{}}", mapper.sourceMapper().toString()); } public void testSyntheticSourceWithLogsIndexMode() throws IOException { XContentBuilder mapping = fieldMapping(b -> { b.field("type", "keyword"); }); DocumentMapper mapper = createLogsModeDocumentMapper(mapping); assertTrue(mapper.sourceMapper().isSynthetic()); - assertEquals("{\"_source\":{\"mode\":\"synthetic\"}}", mapper.sourceMapper().toString()); + assertEquals("{\"_source\":{}}", mapper.sourceMapper().toString()); } public void testSupportsNonDefaultParameterValues() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index fdc18264e2299..dc70c44a89128 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -384,7 +384,7 @@ public void testSearchRequestRuntimeFieldsAndMultifieldDetection() { public void testSyntheticSourceSearchLookup() throws IOException { // Build a mapping using synthetic source - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Optional.empty()).add( new KeywordFieldMapper.Builder("cat", IndexVersion.current()).ignoreAbove(100) ).build(MapperBuilderContext.root(true, false)); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index c20aded9280fc..dd08107bd67fb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -69,6 +69,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.rest.RestStatus; @@ -112,6 +113,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Optional; @@ -1827,8 +1829,9 @@ public static CreateIndexResponse createIndex(RestClient client, String name, Se if (settings != null && settings.getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) == false) { expectSoftDeletesWarning(request, name); + } else if (isSyntheticSourceConfiguredInMapping(mapping)) { + request.setOptions(expectVersionSpecificWarnings(v -> v.compatible(SourceFieldMapper.DEPRECATION_WARNING))); } - final Response response = client.performRequest(request); try (var parser = responseAsParser(response)) { return TestResponseParsers.parseCreateIndexResponse(parser); @@ -1872,6 +1875,27 @@ protected static void expectSoftDeletesWarning(Request request, String indexName })); } + @SuppressWarnings("unchecked") + protected static boolean isSyntheticSourceConfiguredInMapping(String mapping) { + if (mapping == null) { + return false; + } + var mappings = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + mapping.trim().startsWith("{") ? mapping : '{' + mapping + '}', + false + ); + if (mappings.containsKey("_doc")) { + mappings = (Map) mappings.get("_doc"); + } + Map sourceMapper = (Map) mappings.get(SourceFieldMapper.NAME); + if (sourceMapper == null) { + return false; + } + Object mode = sourceMapper.get("mode"); + return mode != null && mode.toString().toLowerCase(Locale.ROOT).equals("synthetic"); + } + protected static Map getIndexSettings(String index) throws IOException { Request request = new Request("GET", "/" + index + "/_settings"); request.addParameter("flat_settings", "true"); @@ -2269,7 +2293,7 @@ protected static Map> getClusterStateFeatures(RestClient adm */ protected static IndexVersion minimumIndexVersion() throws IOException { final Request request = new Request("GET", "_nodes"); - request.addParameter("filter_path", "nodes.*.version,nodes.*.max_index_version"); + request.addParameter("filter_path", "nodes.*.version,nodes.*.max_index_version,nodes.*.index_version"); final Response response = adminClient().performRequest(request); final Map nodes = ObjectPath.createFromResponse(response).evaluate("nodes"); @@ -2277,10 +2301,13 @@ protected static IndexVersion minimumIndexVersion() throws IOException { IndexVersion minVersion = null; for (Map.Entry node : nodes.entrySet()) { Map nodeData = (Map) node.getValue(); - String versionStr = (String) nodeData.get("max_index_version"); + Object versionStr = nodeData.get("index_version"); + if (versionStr == null) { + versionStr = nodeData.get("max_index_version"); + } // fallback on version if index version is not there IndexVersion indexVersion = versionStr != null - ? IndexVersion.fromId(Integer.parseInt(versionStr)) + ? IndexVersion.fromId(Integer.parseInt(versionStr.toString())) : IndexVersion.fromId( parseLegacyVersion((String) nodeData.get("version")).map(Version::id).orElse(IndexVersions.MINIMUM_COMPATIBLE.id()) ); diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index 8243dcdc9de94..627554f6b261d 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.core.Tuple; import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction; import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; @@ -495,6 +496,8 @@ public void checkWarningHeaders(final List warningHeaders, String testPa } } + unexpected.removeIf(s -> s.endsWith(SourceFieldMapper.DEPRECATION_WARNING + "\"")); + if (unexpected.isEmpty() == false || unmatched.isEmpty() == false || missing.isEmpty() == false diff --git a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java index 53e068ae6126e..0bb4afe51b85a 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.RestStatus; @@ -366,8 +367,10 @@ public void testSyntheticSource() throws Exception { final String leaderIndexName = "synthetic_leader"; if ("leader".equals(targetCluster)) { logger.info("Running against leader cluster"); - createIndex(adminClient(), leaderIndexName, Settings.EMPTY, """ - "_source": {"mode": "synthetic"}, + Settings settings = Settings.builder() + .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) + .build(); + createIndex(adminClient(), leaderIndexName, settings, """ "properties": {"kwd": {"type": "keyword"}}}""", null); for (int i = 0; i < numDocs; i++) { logger.info("Indexing doc [{}]", i); @@ -392,7 +395,6 @@ public void testSyntheticSource() throws Exception { } assertBusy(() -> { verifyDocuments(client(), followIndexName, numDocs); - assertMap(getIndexMappingAsMap(followIndexName), matchesMap().extraOk().entry("_source", Map.of("mode", "synthetic"))); if (overrideNumberOfReplicas) { assertMap(getIndexSettingsAsMap(followIndexName), matchesMap().extraOk().entry("index.number_of_replicas", "0")); } else { diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java index d124fdb5755c3..6f45c9d92fd12 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java @@ -14,6 +14,7 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.index.mapper.BlockLoader; @@ -1456,16 +1457,12 @@ private static void index(String name, String... docs) throws IOException { } private static void createIndex(String name, CheckedConsumer mapping) throws IOException { - Request request = new Request("PUT", "/" + name); XContentBuilder index = JsonXContent.contentBuilder().prettyPrint().startObject(); - index.startObject("mappings"); mapping.accept(index); index.endObject(); - index.endObject(); String configStr = Strings.toString(index); logger.info("index: {} {}", name, configStr); - request.setJsonEntity(configStr); - client().performRequest(request); + ESRestTestCase.createIndex(name, Settings.EMPTY, configStr); } /** diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java index 93ba126e4196f..04d12fd51bae7 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java @@ -67,10 +67,13 @@ public Collection getAdditionalIndexSettingProviders(Index if (DiscoveryNode.isStateless(settings)) { return List.of(logsdbIndexModeSettingsProvider); } - return List.of( - new SyntheticSourceIndexSettingsProvider(licenseService, parameters.mapperServiceFactory(), logsdbIndexModeSettingsProvider), - logsdbIndexModeSettingsProvider + var syntheticSettingProvider = new SyntheticSourceIndexSettingsProvider( + licenseService, + parameters.mapperServiceFactory(), + logsdbIndexModeSettingsProvider, + () -> parameters.clusterService().state().nodes().getMinSupportedIndexVersion() ); + return List.of(syntheticSettingProvider, logsdbIndexModeSettingsProvider); } @Override diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java index e87f10ec19916..1f38ecda19515 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java @@ -26,6 +26,7 @@ import java.io.IOException; import java.time.Instant; import java.util.List; +import java.util.function.Supplier; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_ROUTING_PATH; @@ -39,15 +40,18 @@ final class SyntheticSourceIndexSettingsProvider implements IndexSettingProvider private final SyntheticSourceLicenseService syntheticSourceLicenseService; private final CheckedFunction mapperServiceFactory; private final LogsdbIndexModeSettingsProvider logsdbIndexModeSettingsProvider; + private final Supplier createdIndexVersion; SyntheticSourceIndexSettingsProvider( SyntheticSourceLicenseService syntheticSourceLicenseService, CheckedFunction mapperServiceFactory, - LogsdbIndexModeSettingsProvider logsdbIndexModeSettingsProvider + LogsdbIndexModeSettingsProvider logsdbIndexModeSettingsProvider, + Supplier createdIndexVersion ) { this.syntheticSourceLicenseService = syntheticSourceLicenseService; this.mapperServiceFactory = mapperServiceFactory; this.logsdbIndexModeSettingsProvider = logsdbIndexModeSettingsProvider; + this.createdIndexVersion = createdIndexVersion; } @Override @@ -148,7 +152,7 @@ private IndexMetadata buildIndexMetadataForMapperService( ); int shardReplicas = indexTemplateAndCreateRequestSettings.getAsInt(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0); var finalResolvedSettings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(IndexMetadata.SETTING_VERSION_CREATED, createdIndexVersion.get()) .put(indexTemplateAndCreateRequestSettings) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, dummyShards) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, shardReplicas) diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java index 2d8723a0d8c25..1f5d26eaedf34 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.license.MockLicenseState; @@ -54,7 +55,7 @@ public void setup() { provider = new SyntheticSourceIndexSettingsProvider(syntheticSourceLicenseService, im -> { newMapperServiceCounter.incrementAndGet(); return MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()); - }, getLogsdbIndexModeSettingsProvider(false)); + }, getLogsdbIndexModeSettingsProvider(false), IndexVersion::current); newMapperServiceCounter.set(0); } @@ -336,7 +337,8 @@ public void testGetAdditionalIndexSettingsDowngradeFromSyntheticSourceFileMatch( provider = new SyntheticSourceIndexSettingsProvider( syntheticSourceLicenseService, im -> MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()), - getLogsdbIndexModeSettingsProvider(true) + getLogsdbIndexModeSettingsProvider(true), + IndexVersion::current ); final Settings settings = Settings.EMPTY; diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml index 33fedce3b59c1..792df4dbf639e 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml @@ -470,13 +470,7 @@ create an index with time_series index mode and synthetic source: indices.get_settings: index: "test_time_series_index_mode_synthetic" - match: { test_time_series_index_mode_synthetic.settings.index.mode: time_series } - - - - do: - indices.get_mapping: - index: test_time_series_index_mode_synthetic - - - match: { test_time_series_index_mode_synthetic.mappings._source.mode: synthetic } + - match: { test_time_series_index_mode_synthetic.settings.index.mapping.source.mode: synthetic } --- create an index with logsdb index mode and synthetic source: @@ -493,12 +487,7 @@ create an index with logsdb index mode and synthetic source: indices.get_settings: index: "test_logsdb_index_mode_synthetic" - match: { test_logsdb_index_mode_synthetic.settings.index.mode: logsdb } - - - do: - indices.get_mapping: - index: test_logsdb_index_mode_synthetic - - - match: { test_logsdb_index_mode_synthetic.mappings._source.mode: synthetic } + - match: { test_logsdb_index_mode_synthetic.settings.index.mapping.source.mode: synthetic } --- create an index with time_series index mode and stored source: @@ -524,12 +513,7 @@ create an index with time_series index mode and stored source: indices.get_settings: index: "test_time_series_index_mode_undefined" - match: { test_time_series_index_mode_undefined.settings.index.mode: time_series } - - - do: - indices.get_mapping: - index: test_time_series_index_mode_undefined - - - match: { test_time_series_index_mode_undefined.mappings._source.mode: stored } + - match: { test_time_series_index_mode_undefined.settings.index.mapping.source.mode: stored } --- create an index with logsdb index mode and stored source: @@ -546,12 +530,7 @@ create an index with logsdb index mode and stored source: indices.get_settings: index: "test_logsdb_index_mode_undefined" - match: { test_logsdb_index_mode_undefined.settings.index.mode: logsdb } - - - do: - indices.get_mapping: - index: test_logsdb_index_mode_undefined - - - match: { test_logsdb_index_mode_undefined.mappings._source.mode: stored } + - match: { test_logsdb_index_mode_undefined.settings.index.mapping.source.mode: stored } --- create an index with time_series index mode and disabled source: From 311412db2f7cbda3b225249cb5c4f3b436f90758 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 20 Nov 2024 14:40:47 -0500 Subject: [PATCH 105/386] ESQL: Fix sorts containing _source (#116980) This fixes sorts containing the a `_source` field. It can use the standard encoder for `BytesRef`s. You can't sort *by* a `_source` field, but that doesn't really make sense ayway. --- docs/changelog/116980.yaml | 6 +++ .../xpack/esql/action/EsqlCapabilities.java | 6 +++ .../esql/planner/LocalExecutionPlanner.java | 3 +- .../rest-api-spec/test/esql/140_metadata.yml | 41 ++++++++++++++++++- 4 files changed, 53 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/116980.yaml diff --git a/docs/changelog/116980.yaml b/docs/changelog/116980.yaml new file mode 100644 index 0000000000000..140324fd40b92 --- /dev/null +++ b/docs/changelog/116980.yaml @@ -0,0 +1,6 @@ +pr: 116980 +summary: "ESQL: Fix sorts containing `_source`" +area: ES|QL +type: bug +issues: + - 116659 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index ab3a7e3e7d0b8..4137d863e0f7e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -425,6 +425,12 @@ public enum Cap { */ SORTING_ON_SOURCE_AND_COUNTERS_FORBIDDEN, + /** + * Fix {@code SORT} when the {@code _source} field is not a sort key but + * is being returned. + */ + SORT_RETURNING_SOURCE_OK, + /** * Allow filter per individual aggregation. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index c181f434368e0..1096c917fed4f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -362,11 +362,10 @@ private PhysicalOperation planTopN(TopNExec topNExec, LocalExecutionPlannerConte case VERSION -> TopNEncoder.VERSION; case BOOLEAN, NULL, BYTE, SHORT, INTEGER, LONG, DOUBLE, FLOAT, HALF_FLOAT, DATETIME, DATE_NANOS, DATE_PERIOD, TIME_DURATION, OBJECT, SCALED_FLOAT, UNSIGNED_LONG, DOC_DATA_TYPE, TSID_DATA_TYPE -> TopNEncoder.DEFAULT_SORTABLE; - case GEO_POINT, CARTESIAN_POINT, GEO_SHAPE, CARTESIAN_SHAPE, COUNTER_LONG, COUNTER_INTEGER, COUNTER_DOUBLE -> + case GEO_POINT, CARTESIAN_POINT, GEO_SHAPE, CARTESIAN_SHAPE, COUNTER_LONG, COUNTER_INTEGER, COUNTER_DOUBLE, SOURCE -> TopNEncoder.DEFAULT_UNSORTABLE; // unsupported fields are encoded as BytesRef, we'll use the same encoder; all values should be null at this point case PARTIAL_AGG, UNSUPPORTED -> TopNEncoder.UNSUPPORTED; - case SOURCE -> throw new EsqlIllegalArgumentException("No TopN sorting encoder for type " + inverse.get(channel).type()); }; } List orders = topNExec.order().stream().map(order -> { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml index 83234901ae8f2..35cfbac5e3439 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml @@ -170,4 +170,43 @@ setup: catch: /cannot sort on _source/ esql.query: body: - query: 'FROM test metadata _source | sort _source' + query: 'FROM test metadata _source | SORT _source' + +--- +"sort returning _source is allowed": + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [sort_returning_source_ok] + reason: "Sorts returning _source should be ok, but weren't in older versions" + - do: + esql.query: + body: + query: 'FROM test METADATA _source | SORT case ASC | KEEP case, _source | LIMIT 5' + - length: { columns: 2 } + - length: { values: 3 } + - match: {columns.0.name: "case"} + - match: {columns.0.type: "keyword"} + - match: {columns.1.name: "_source"} + - match: {columns.1.type: "_source"} + - match: {values.0.0: "all_ignored"} + - match: {values.0.1: { + "integer" : "not-an-integer", + "keyword" : "long-keyword", + "case" : "all_ignored" + }} + - match: {values.1.0: "integer_ignored"} + - match: {values.1.1: { + "integer" : "not-an-integer", + "keyword" : "ok", + "case" : "integer_ignored" + }} + - match: {values.2.0: "ok"} + - match: {values.2.1: { + "integer" : 10, + "keyword" : "ok", + "case" : "ok" + }} From 770551498b2a6eb2fdb8b0c7da3fc3fc83609f95 Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Wed, 20 Nov 2024 15:08:44 -0500 Subject: [PATCH 106/386] [ML] Update Deberta tokenizer (#116358) * Was using byte position for end of offset, but it seems like using char position is correct * Update docs/changelog/116358.yaml * Update UnigramTokenizer.java --------- Co-authored-by: Elastic Machine --- docs/changelog/116358.yaml | 5 +++++ .../xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java | 4 +++- 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/116358.yaml diff --git a/docs/changelog/116358.yaml b/docs/changelog/116358.yaml new file mode 100644 index 0000000000000..58b44a1e9bcf5 --- /dev/null +++ b/docs/changelog/116358.yaml @@ -0,0 +1,5 @@ +pr: 116358 +summary: Update Deberta tokenizer +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java index 31deac066cba2..01821f5582471 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java @@ -367,8 +367,10 @@ List tokenize(CharSequence inputSequence, IntToIntFuncti new DelimitedToken.Encoded( Strings.format("<0x%02X>", bytes[i]), pieces[i], + // even though we are changing the number of characters in the output, we don't + // need to change the offsets. The offsets refer to the input characters offsetCorrection.apply(node.startsAtCharPos), - offsetCorrection.apply(startsAtBytes + i) + offsetCorrection.apply(endsAtChars) ) ); } From 3c0a9750cb6bd8ca8ccf188c7a3ef3fa7fc151af Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 20 Nov 2024 15:19:32 -0500 Subject: [PATCH 107/386] ESQL: Test with a data node failure (#117164) Adds a test that always fails on one of the data nodes and makes sure this comes back as a failure. When we build support for partial results we can use this test to simulate it. --- .../xpack/esql/action/EsqlNodeFailureIT.java | 116 ++++++++++++++++++ 1 file changed, 116 insertions(+) create mode 100644 x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java new file mode 100644 index 0000000000000..3a69983a0d86e --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.index.mapper.OnScriptError; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.script.LongFieldScript; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +/** + * Make sure the failures on the data node come back as failures over the wire. + */ +@ESIntegTestCase.ClusterScope(minNumDataNodes = 2) +public class EsqlNodeFailureIT extends AbstractEsqlIntegTestCase { + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopy(super.nodePlugins(), FailingFieldPlugin.class); + } + + /** + * Use a runtime field that fails when loading field values to fail the entire query. + */ + public void testFailureLoadingFields() throws IOException { + XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); + mapping.startObject("runtime"); + { + mapping.startObject("fail_me"); + { + mapping.field("type", "long"); + mapping.startObject("script").field("source", "").field("lang", "fail").endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + client().admin().indices().prepareCreate("fail").setSettings(indexSettings(1, 0)).setMapping(mapping.endObject()).get(); + + int docCount = 100; + List docs = new ArrayList<>(docCount); + for (int d = 0; d < docCount; d++) { + docs.add(client().prepareIndex("ok").setSource("foo", d)); + } + docs.add(client().prepareIndex("fail").setSource("foo", 0)); + indexRandom(true, docs); + + ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> run("FROM fail,ok | LIMIT 100").close()); + assertThat(e.getMessage(), equalTo("test failure")); + } + + public static class FailingFieldPlugin extends Plugin implements ScriptPlugin { + + @Override + public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { + return new ScriptEngine() { + @Override + public String getType() { + return "fail"; + } + + @Override + @SuppressWarnings("unchecked") + public FactoryType compile( + String name, + String code, + ScriptContext context, + Map params + ) { + return (FactoryType) new LongFieldScript.Factory() { + @Override + public LongFieldScript.LeafFactory newFactory( + String fieldName, + Map params, + SearchLookup searchLookup, + OnScriptError onScriptError + ) { + return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { + @Override + public void execute() { + throw new ElasticsearchException("test failure"); + } + }; + } + }; + } + + @Override + public Set> getSupportedContexts() { + return Set.of(LongFieldScript.CONTEXT); + } + }; + } + } +} From abcdbf27b249553fa64527ac6f7128a782638eb3 Mon Sep 17 00:00:00 2001 From: Ankita Kumar Date: Wed, 20 Nov 2024 16:34:25 -0500 Subject: [PATCH 108/386] Metrics for incremental bulk splits (#116765) Add metrics to track incremental bulk request splits due to indexing pressure. Resolves ES-9612 --- docs/changelog/116765.yaml | 5 + .../action/bulk/IncrementalBulkIT.java | 12 +- .../metrics/NodeIndexingMetricsIT.java | 261 ++++++++++++++++++ .../org/elasticsearch/TransportVersions.java | 1 + .../cluster/stats/ClusterStatsNodes.java | 8 +- .../elasticsearch/index/IndexingPressure.java | 20 +- .../index/stats/IndexingPressureStats.java | 34 ++- .../monitor/metrics/NodeMetrics.java | 28 ++ .../cluster/node/stats/NodeStatsTests.java | 2 + .../index/IndexingPressureTests.java | 25 ++ 10 files changed, 390 insertions(+), 6 deletions(-) create mode 100644 docs/changelog/116765.yaml diff --git a/docs/changelog/116765.yaml b/docs/changelog/116765.yaml new file mode 100644 index 0000000000000..ec2357c17acaf --- /dev/null +++ b/docs/changelog/116765.yaml @@ -0,0 +1,5 @@ +pr: 116765 +summary: Metrics for incremental bulk splits +area: Distributed +type: enhancement +issues: [] diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java index 4977d87d5a348..deae022795ad2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java @@ -65,7 +65,7 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { .put(super.nodeSettings(nodeOrdinal, otherSettings)) .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK.getKey(), "512B") .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK_SIZE.getKey(), "2048B") - .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK.getKey(), "2KB") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK.getKey(), "4KB") .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK_SIZE.getKey(), "1024B") .build(); } @@ -161,6 +161,8 @@ public void testIncrementalBulkLowWatermarkBackOff() throws Exception { IndexRequest indexRequest = indexRequest(index); long total = indexRequest.ramBytesUsed(); + long lowWaterMarkSplits = indexingPressure.stats().getLowWaterMarkSplits(); + long highWaterMarkSplits = indexingPressure.stats().getHighWaterMarkSplits(); while (total < 2048) { refCounted.incRef(); handler.addItems(List.of(indexRequest), refCounted::decRef, () -> nextPage.set(true)); @@ -175,6 +177,8 @@ public void testIncrementalBulkLowWatermarkBackOff() throws Exception { handler.addItems(List.of(indexRequest(index)), refCounted::decRef, () -> nextPage.set(true)); assertBusy(() -> assertThat(indexingPressure.stats().getCurrentCombinedCoordinatingAndPrimaryBytes(), equalTo(0L))); + assertBusy(() -> assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(lowWaterMarkSplits + 1))); + assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(highWaterMarkSplits)); PlainActionFuture future = new PlainActionFuture<>(); handler.lastItems(List.of(indexRequest), refCounted::decRef, future); @@ -192,6 +196,8 @@ public void testIncrementalBulkHighWatermarkBackOff() throws Exception { IncrementalBulkService incrementalBulkService = internalCluster().getInstance(IncrementalBulkService.class, nodeName); IndexingPressure indexingPressure = internalCluster().getInstance(IndexingPressure.class, nodeName); ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, nodeName); + long lowWaterMarkSplits = indexingPressure.stats().getLowWaterMarkSplits(); + long highWaterMarkSplits = indexingPressure.stats().getHighWaterMarkSplits(); AbstractRefCounted refCounted = AbstractRefCounted.of(() -> {}); AtomicBoolean nextPage = new AtomicBoolean(false); @@ -217,6 +223,8 @@ public void testIncrementalBulkHighWatermarkBackOff() throws Exception { handlerNoThrottle.addItems(requestsNoThrottle, refCounted::decRef, () -> nextPage.set(true)); assertTrue(nextPage.get()); nextPage.set(false); + assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(highWaterMarkSplits)); + assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(lowWaterMarkSplits)); ArrayList> requestsThrottle = new ArrayList<>(); // Test that a request larger than SPLIT_BULK_HIGH_WATERMARK_SIZE (1KB) is throttled @@ -235,6 +243,8 @@ public void testIncrementalBulkHighWatermarkBackOff() throws Exception { // Wait until we are ready for the next page assertBusy(() -> assertTrue(nextPage.get())); + assertBusy(() -> assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(highWaterMarkSplits + 1))); + assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(lowWaterMarkSplits)); for (IncrementalBulkService.Handler h : handlers) { refCounted.incRef(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java index 9364e7437141e..e4d44212f2854 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java @@ -9,35 +9,48 @@ package org.elasticsearch.monitor.metrics; +import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.IncrementalBulkService; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.telemetry.Measurement; import org.elasticsearch.telemetry.TestTelemetryPlugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Function; import static org.elasticsearch.index.IndexingPressure.MAX_COORDINATING_BYTES; import static org.elasticsearch.index.IndexingPressure.MAX_PRIMARY_BYTES; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThan; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0) public class NodeIndexingMetricsIT extends ESIntegTestCase { @@ -453,6 +466,211 @@ public void testPrimaryDocumentRejectionMetricsFluctuatingOverTime() throws Exce } } + // Borrowed this test from IncrementalBulkIT and added test for metrics to it + public void testIncrementalBulkLowWatermarkSplitMetrics() throws Exception { + final String nodeName = internalCluster().startNode( + Settings.builder() + .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK.getKey(), "512B") + .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK_SIZE.getKey(), "2048B") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK.getKey(), "4KB") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK_SIZE.getKey(), "1024B") + .build() + ); + ensureStableCluster(1); + + String index = "test"; + createIndex(index); + + IncrementalBulkService incrementalBulkService = internalCluster().getInstance(IncrementalBulkService.class, nodeName); + IndexingPressure indexingPressure = internalCluster().getInstance(IndexingPressure.class, nodeName); + final TestTelemetryPlugin testTelemetryPlugin = internalCluster().getInstance(PluginsService.class, nodeName) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + testTelemetryPlugin.resetMeter(); + + IncrementalBulkService.Handler handler = incrementalBulkService.newBulkRequest(); + + AbstractRefCounted refCounted = AbstractRefCounted.of(() -> {}); + AtomicBoolean nextPage = new AtomicBoolean(false); + + IndexRequest indexRequest = indexRequest(index); + long total = indexRequest.ramBytesUsed(); + while (total < 2048) { + refCounted.incRef(); + handler.addItems(List.of(indexRequest), refCounted::decRef, () -> nextPage.set(true)); + assertTrue(nextPage.get()); + nextPage.set(false); + indexRequest = indexRequest(index); + total += indexRequest.ramBytesUsed(); + } + + assertThat(indexingPressure.stats().getCurrentCombinedCoordinatingAndPrimaryBytes(), greaterThan(0L)); + assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(0L)); + + testTelemetryPlugin.collect(); + assertThat( + getSingleRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.low_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + assertThat( + getSingleRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.high_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + + refCounted.incRef(); + handler.addItems(List.of(indexRequest(index)), refCounted::decRef, () -> nextPage.set(true)); + + assertBusy(() -> assertThat(indexingPressure.stats().getCurrentCombinedCoordinatingAndPrimaryBytes(), equalTo(0L))); + assertBusy(() -> assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(1L))); + assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(0L)); + + testTelemetryPlugin.collect(); + assertThat( + getLatestRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.low_watermark_splits.total" + ).getLong(), + equalTo(1L) + ); + assertThat( + getLatestRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.high_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + + PlainActionFuture future = new PlainActionFuture<>(); + handler.lastItems(List.of(indexRequest), refCounted::decRef, future); + + BulkResponse bulkResponse = safeGet(future); + assertNoFailures(bulkResponse); + assertFalse(refCounted.hasReferences()); + } + + // Borrowed this test from IncrementalBulkIT and added test for metrics to it + public void testIncrementalBulkHighWatermarkSplitMetrics() throws Exception { + final String nodeName = internalCluster().startNode( + Settings.builder() + .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK.getKey(), "512B") + .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK_SIZE.getKey(), "2048B") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK.getKey(), "4KB") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK_SIZE.getKey(), "1024B") + .build() + ); + ensureStableCluster(1); + + String index = "test"; + createIndex(index); + + IncrementalBulkService incrementalBulkService = internalCluster().getInstance(IncrementalBulkService.class, nodeName); + IndexingPressure indexingPressure = internalCluster().getInstance(IndexingPressure.class, nodeName); + ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, nodeName); + final TestTelemetryPlugin testTelemetryPlugin = internalCluster().getInstance(PluginsService.class, nodeName) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + testTelemetryPlugin.resetMeter(); + + AbstractRefCounted refCounted = AbstractRefCounted.of(() -> {}); + AtomicBoolean nextPage = new AtomicBoolean(false); + + ArrayList handlers = new ArrayList<>(); + for (int i = 0; i < 4; ++i) { + ArrayList> requests = new ArrayList<>(); + add512BRequests(requests, index); + IncrementalBulkService.Handler handler = incrementalBulkService.newBulkRequest(); + handlers.add(handler); + refCounted.incRef(); + handler.addItems(requests, refCounted::decRef, () -> nextPage.set(true)); + assertTrue(nextPage.get()); + nextPage.set(false); + } + + // Test that a request smaller than SPLIT_BULK_HIGH_WATERMARK_SIZE (1KB) is not throttled + ArrayList> requestsNoThrottle = new ArrayList<>(); + add512BRequests(requestsNoThrottle, index); + IncrementalBulkService.Handler handlerNoThrottle = incrementalBulkService.newBulkRequest(); + handlers.add(handlerNoThrottle); + refCounted.incRef(); + handlerNoThrottle.addItems(requestsNoThrottle, refCounted::decRef, () -> nextPage.set(true)); + assertTrue(nextPage.get()); + nextPage.set(false); + assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(0L)); + + testTelemetryPlugin.collect(); + assertThat( + getSingleRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.low_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + assertThat( + getSingleRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.high_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + + ArrayList> requestsThrottle = new ArrayList<>(); + // Test that a request larger than SPLIT_BULK_HIGH_WATERMARK_SIZE (1KB) is throttled + add512BRequests(requestsThrottle, index); + add512BRequests(requestsThrottle, index); + + CountDownLatch finishLatch = new CountDownLatch(1); + blockWritePool(threadPool, finishLatch); + IncrementalBulkService.Handler handlerThrottled = incrementalBulkService.newBulkRequest(); + refCounted.incRef(); + handlerThrottled.addItems(requestsThrottle, refCounted::decRef, () -> nextPage.set(true)); + assertFalse(nextPage.get()); + finishLatch.countDown(); + + handlers.add(handlerThrottled); + + // Wait until we are ready for the next page + assertBusy(() -> assertTrue(nextPage.get())); + assertBusy(() -> assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(1L))); + assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(0L)); + + testTelemetryPlugin.collect(); + assertThat( + getLatestRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.low_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + assertThat( + getLatestRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.high_watermark_splits.total" + ).getLong(), + equalTo(1L) + ); + + for (IncrementalBulkService.Handler h : handlers) { + refCounted.incRef(); + PlainActionFuture future = new PlainActionFuture<>(); + h.lastItems(List.of(indexRequest(index)), refCounted::decRef, future); + BulkResponse bulkResponse = safeGet(future); + assertNoFailures(bulkResponse); + } + + assertBusy(() -> assertThat(indexingPressure.stats().getCurrentCombinedCoordinatingAndPrimaryBytes(), equalTo(0L))); + refCounted.decRef(); + assertFalse(refCounted.hasReferences()); + testTelemetryPlugin.collect(); + } + private static Measurement getSingleRecordedMetric(Function> metricGetter, String name) { final List measurements = metricGetter.apply(name); assertFalse("Indexing metric is not recorded", measurements.isEmpty()); @@ -470,4 +688,47 @@ private static boolean doublesEquals(double expected, double actual) { final double eps = .0000001; return Math.abs(expected - actual) < eps; } + + private static IndexRequest indexRequest(String index) { + IndexRequest indexRequest = new IndexRequest(); + indexRequest.index(index); + indexRequest.source(Map.of("field", randomAlphaOfLength(10))); + return indexRequest; + } + + private static void add512BRequests(ArrayList> requests, String index) { + long total = 0; + while (total < 512) { + IndexRequest indexRequest = indexRequest(index); + requests.add(indexRequest); + total += indexRequest.ramBytesUsed(); + } + assertThat(total, lessThan(1024L)); + } + + private static void blockWritePool(ThreadPool threadPool, CountDownLatch finishLatch) { + final var threadCount = threadPool.info(ThreadPool.Names.WRITE).getMax(); + final var startBarrier = new CyclicBarrier(threadCount + 1); + final var blockingTask = new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + fail(e); + } + + @Override + protected void doRun() { + safeAwait(startBarrier); + safeAwait(finishLatch); + } + + @Override + public boolean isForceExecution() { + return true; + } + }; + for (int i = 0; i < threadCount; i++) { + threadPool.executor(ThreadPool.Names.WRITE).execute(blockingTask); + } + safeAwait(startBarrier); + } } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 887cfea36a199..95fffb1fe8224 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -204,6 +204,7 @@ static TransportVersion def(int id) { public static final TransportVersion FAST_REFRESH_RCO_2 = def(8_795_00_0); public static final TransportVersion ESQL_ENRICH_RUNTIME_WARNINGS = def(8_796_00_0); public static final TransportVersion INGEST_PIPELINE_CONFIGURATION_AS_MAP = def(8_797_00_0); + public static final TransportVersion INDEXING_PRESSURE_THROTTLING_STATS = def(8_798_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java index c1f867c247345..5c4be62723e07 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java @@ -793,6 +793,8 @@ static class IndexPressureStats implements ToXContentFragment { long currentCoordinatingOps = 0; long currentPrimaryOps = 0; long currentReplicaOps = 0; + long lowWaterMarkSplits = 0; + long highWaterMarkSplits = 0; for (NodeStats nodeStat : nodeStats) { IndexingPressureStats nodeStatIndexingPressureStats = nodeStat.getIndexingPressureStats(); if (nodeStatIndexingPressureStats != null) { @@ -816,6 +818,8 @@ static class IndexPressureStats implements ToXContentFragment { currentReplicaOps += nodeStatIndexingPressureStats.getCurrentReplicaOps(); primaryDocumentRejections += nodeStatIndexingPressureStats.getPrimaryDocumentRejections(); totalCoordinatingRequests += nodeStatIndexingPressureStats.getTotalCoordinatingRequests(); + lowWaterMarkSplits += nodeStatIndexingPressureStats.getLowWaterMarkSplits(); + highWaterMarkSplits += nodeStatIndexingPressureStats.getHighWaterMarkSplits(); } } indexingPressureStats = new IndexingPressureStats( @@ -838,7 +842,9 @@ static class IndexPressureStats implements ToXContentFragment { currentPrimaryOps, currentReplicaOps, primaryDocumentRejections, - totalCoordinatingRequests + totalCoordinatingRequests, + lowWaterMarkSplits, + highWaterMarkSplits ); } diff --git a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java index f80e8a89f5cf2..43ae38fea6018 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java +++ b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java @@ -105,6 +105,9 @@ public class IndexingPressure { private final AtomicLong replicaRejections = new AtomicLong(0); private final AtomicLong primaryDocumentRejections = new AtomicLong(0); + private final AtomicLong lowWaterMarkSplits = new AtomicLong(0); + private final AtomicLong highWaterMarkSplits = new AtomicLong(0); + private final long lowWatermark; private final long lowWatermarkSize; private final long highWatermark; @@ -265,11 +268,20 @@ public Releasable markReplicaOperationStarted(int operations, long bytes, boolea public boolean shouldSplitBulk(long size) { long currentUsage = (currentCombinedCoordinatingAndPrimaryBytes.get() + currentReplicaBytes.get()); - return (currentUsage >= lowWatermark && size >= lowWatermarkSize) || (currentUsage >= highWatermark && size >= highWatermarkSize); + if (currentUsage >= highWatermark && size >= highWatermarkSize) { + highWaterMarkSplits.getAndIncrement(); + logger.trace(() -> Strings.format("Split bulk due to high watermark: current bytes [%d] and size [%d]", currentUsage, size)); + return (true); + } + if (currentUsage >= lowWatermark && size >= lowWatermarkSize) { + lowWaterMarkSplits.getAndIncrement(); + logger.trace(() -> Strings.format("Split bulk due to low watermark: current bytes [%d] and size [%d]", currentUsage, size)); + return (true); + } + return (false); } public IndexingPressureStats stats() { - // TODO: Update stats with new primary/replica/coordinating limits and add throttling stats return new IndexingPressureStats( totalCombinedCoordinatingAndPrimaryBytes.get(), totalCoordinatingBytes.get(), @@ -290,7 +302,9 @@ public IndexingPressureStats stats() { currentPrimaryOps.get(), currentReplicaOps.get(), primaryDocumentRejections.get(), - totalCoordinatingRequests.get() + totalCoordinatingRequests.get(), + lowWaterMarkSplits.get(), + highWaterMarkSplits.get() ); } } diff --git a/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java b/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java index b5197274dd519..0a56db56b2c95 100644 --- a/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java +++ b/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java @@ -36,6 +36,12 @@ public class IndexingPressureStats implements Writeable, ToXContentFragment { private final long primaryDocumentRejections; private final long memoryLimit; + /* Count number of splits due to SPLIT_BULK_LOW_WATERMARK and SPLIT_BULK_HIGH_WATERMARK + These 2 stats are not serialized via X content yet. + */ + private final long lowWaterMarkSplits; + private final long highWaterMarkSplits; + // These fields will be used for additional back-pressure and metrics in the future private final long totalCoordinatingOps; private final long totalCoordinatingRequests; @@ -85,6 +91,14 @@ public IndexingPressureStats(StreamInput in) throws IOException { } else { totalCoordinatingRequests = -1L; } + + if (in.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_THROTTLING_STATS)) { + lowWaterMarkSplits = in.readVLong(); + highWaterMarkSplits = in.readVLong(); + } else { + lowWaterMarkSplits = -1L; + highWaterMarkSplits = -1L; + } } public IndexingPressureStats( @@ -107,7 +121,9 @@ public IndexingPressureStats( long currentPrimaryOps, long currentReplicaOps, long primaryDocumentRejections, - long totalCoordinatingRequests + long totalCoordinatingRequests, + long lowWaterMarkSplits, + long highWaterMarkSplits ) { this.totalCombinedCoordinatingAndPrimaryBytes = totalCombinedCoordinatingAndPrimaryBytes; this.totalCoordinatingBytes = totalCoordinatingBytes; @@ -131,6 +147,9 @@ public IndexingPressureStats( this.primaryDocumentRejections = primaryDocumentRejections; this.totalCoordinatingRequests = totalCoordinatingRequests; + + this.lowWaterMarkSplits = lowWaterMarkSplits; + this.highWaterMarkSplits = highWaterMarkSplits; } @Override @@ -160,6 +179,11 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeVLong(totalCoordinatingRequests); } + + if (out.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_THROTTLING_STATS)) { + out.writeVLong(lowWaterMarkSplits); + out.writeVLong(highWaterMarkSplits); + } } public long getTotalCombinedCoordinatingAndPrimaryBytes() { @@ -242,6 +266,14 @@ public long getTotalCoordinatingRequests() { return totalCoordinatingRequests; } + public long getHighWaterMarkSplits() { + return highWaterMarkSplits; + } + + public long getLowWaterMarkSplits() { + return lowWaterMarkSplits; + } + private static final String COMBINED = "combined_coordinating_and_primary"; private static final String COMBINED_IN_BYTES = "combined_coordinating_and_primary_in_bytes"; private static final String COORDINATING = "coordinating"; diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java index acc26a42e4745..94395193622e0 100644 --- a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java @@ -643,6 +643,34 @@ private void registerAsyncMetrics(MeterRegistry registry) { ) ); + metrics.add( + registry.registerLongAsyncCounter( + "es.indexing.coordinating.low_watermark_splits.total", + "Total number of times bulk requests are split due to SPLIT_BULK_LOW_WATERMARK", + "operations", + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(NodeStats::getIndexingPressureStats) + .map(IndexingPressureStats::getLowWaterMarkSplits) + .orElse(0L) + ) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.indexing.coordinating.high_watermark_splits.total", + "Total number of times bulk requests are split due to SPLIT_BULK_HIGH_WATERMARK", + "operations", + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(NodeStats::getIndexingPressureStats) + .map(IndexingPressureStats::getHighWaterMarkSplits) + .orElse(0L) + ) + ) + ); + metrics.add( registry.registerLongAsyncCounter( "es.flush.total.time", diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java index 7a31f0dcb4631..a7058e5d6cd8c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java @@ -1057,6 +1057,8 @@ public static NodeStats createNodeStats() { randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue), + randomLongBetween(0, maxStatValue), + randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue) ); } diff --git a/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java b/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java index b4130120372a1..8da7ada91856d 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java @@ -37,6 +37,31 @@ public void testMemoryLimitSettingsFallbackToOldSingleLimitSetting() { assertThat(IndexingPressure.MAX_REPLICA_BYTES.get(settings), Matchers.equalTo(ByteSizeValue.ofKb(30))); } + public void testHighAndLowWatermarkSplits() { + IndexingPressure indexingPressure = new IndexingPressure(settings); + + try ( + Releasable ignored1 = indexingPressure.markCoordinatingOperationStarted(10, ByteSizeValue.ofKb(6).getBytes(), false); + Releasable ignored2 = indexingPressure.markCoordinatingOperationStarted(10, ByteSizeValue.ofKb(2).getBytes(), false) + ) { + assertFalse(indexingPressure.shouldSplitBulk(randomIntBetween(1, 1000))); + assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 0L); + assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 0L); + assertTrue(indexingPressure.shouldSplitBulk(randomIntBetween(1025, 10000))); + assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 0L); + assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 1L); + + try (Releasable ignored3 = indexingPressure.markPrimaryOperationStarted(10, ByteSizeValue.ofKb(1).getBytes(), false)) { + assertFalse(indexingPressure.shouldSplitBulk(randomIntBetween(1, 127))); + assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 0L); + assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 1L); + assertTrue(indexingPressure.shouldSplitBulk(randomIntBetween(129, 1000))); + assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 1L); + assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 1L); + } + } + } + public void testHighAndLowWatermarkSettings() { IndexingPressure indexingPressure = new IndexingPressure(settings); From e68f31754c0bbe2c69b933eaf950e5f9462dedea Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 20 Nov 2024 16:41:55 -0500 Subject: [PATCH 109/386] Adds `maxSim` functions for multi_dense_vector fields (#116993) This adds `maxSim` functions, specifically dotProduct and InvHamming. Why these two you might ask? Well, they are the best approximations of whats possible with Col* late interaction type models. Effectively, you want a similarity metric where "greater == better". Regular `hamming` isn't exactly that, but inverting that (just like our `element_type: bit` index for dense_vectors), is a nice approximation with bit vectors and multi-vector scoring. Then, of course, dotProduct is another usage. We will allow dot-product between like elements (bytes -> bytes, floats -> floats) and of course, allow `floats -> bit`, where the stored `bit` elements are applied as a "mask" over the float queries. This allows for some nice asymmetric interactions. This is all behind a feature flag, and I need to write a mountain of docs in a separate PR. --- .../org.elasticsearch.script.score.txt | 2 + .../141_multi_dense_vector_max_sim.yml | 206 ++++++++++ .../action/search/SearchCapabilities.java | 3 + .../script/MultiVectorScoreScriptUtils.java | 372 ++++++++++++++++++ .../field/vectors/BitMultiDenseVector.java | 70 +++- .../field/vectors/ByteMultiDenseVector.java | 54 ++- .../ByteMultiDenseVectorDocValuesField.java | 14 +- .../field/vectors/FloatMultiDenseVector.java | 38 +- .../FloatMultiDenseVectorDocValuesField.java | 15 +- .../field/vectors/MultiDenseVector.java | 21 + .../script/field/vectors/VectorIterator.java | 70 ++++ .../MultiVectorScoreScriptUtilsTests.java | 342 ++++++++++++++++ .../field/vectors/MultiDenseVectorTests.java | 83 ++++ 13 files changed, 1274 insertions(+), 16 deletions(-) create mode 100644 modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/141_multi_dense_vector_max_sim.yml create mode 100644 server/src/main/java/org/elasticsearch/script/MultiVectorScoreScriptUtils.java create mode 100644 server/src/main/java/org/elasticsearch/script/field/vectors/VectorIterator.java create mode 100644 server/src/test/java/org/elasticsearch/script/MultiVectorScoreScriptUtilsTests.java create mode 100644 server/src/test/java/org/elasticsearch/script/field/vectors/MultiDenseVectorTests.java diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.score.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.score.txt index e76db7cfb1d26..5a1d8c002aa17 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.score.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.score.txt @@ -50,5 +50,7 @@ static_import { double cosineSimilarity(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.VectorScoreScriptUtils$CosineSimilarity double dotProduct(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.VectorScoreScriptUtils$DotProduct double hamming(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.VectorScoreScriptUtils$Hamming + double maxSimDotProduct(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.MultiVectorScoreScriptUtils$MaxSimDotProduct + double maxSimInvHamming(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.MultiVectorScoreScriptUtils$MaxSimInvHamming } diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/141_multi_dense_vector_max_sim.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/141_multi_dense_vector_max_sim.yml new file mode 100644 index 0000000000000..caa7c59ab4c42 --- /dev/null +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/141_multi_dense_vector_max_sim.yml @@ -0,0 +1,206 @@ +setup: + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ multi_dense_vector_script_max_sim ] + test_runner_features: capabilities + reason: "Support for multi dense vector max-sim functions capability required" + - skip: + features: headers + + - do: + indices.create: + index: test-index + body: + settings: + number_of_shards: 1 + mappings: + properties: + vector: + type: multi_dense_vector + dims: 5 + byte_vector: + type: multi_dense_vector + dims: 5 + element_type: byte + bit_vector: + type: multi_dense_vector + dims: 40 + element_type: bit + - do: + index: + index: test-index + id: "1" + body: + vector: [[230.0, 300.33, -34.8988, 15.555, -200.0], [-0.5, 100.0, -13, 14.8, -156.0]] + byte_vector: [[8, 5, -15, 1, -7], [-1, 115, -3, 4, -128]] + bit_vector: [[8, 5, -15, 1, -7], [-1, 115, -3, 4, -128]] + + - do: + index: + index: test-index + id: "3" + body: + vector: [[0.5, 111.3, -13.0, 14.8, -156.0]] + byte_vector: [[2, 18, -5, 0, -124]] + bit_vector: [[2, 18, -5, 0, -124]] + + - do: + indices.refresh: {} +--- +"Test max-sim dot product scoring": + - skip: + features: close_to + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimDotProduct(params.query_vector, 'vector')" + params: + query_vector: [[1, 2, 1, 1, 1]] + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 611.316, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 68.90001, error: 0.01}} + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimDotProduct(params.query_vector, 'byte_vector')" + params: + query_vector: [[1, 2, 1, 1, 0]] + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 230, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 33, error: 0.01}} + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimDotProduct(params.query_vector, 'bit_vector')" + params: + query_vector: [[1, 2, 1, 1, 0]] + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 3, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 2, error: 0.01}} + +# doing max-sim dot product with a vector where the stored bit vectors are used as masks + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimDotProduct(params.query_vector, 'bit_vector')" + params: + query_vector: [[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]] + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 190, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 125, error: 0.01}} +--- +"Test max-sim inv hamming scoring": + - skip: + features: close_to + + # inv hamming doesn't apply to float vectors + - do: + catch: bad_request + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimInvHamming(params.query_vector, 'vector')" + params: + query_vector: [[1, 2, 1, 1, 1]] + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimInvHamming(params.query_vector, 'byte_vector')" + params: + query_vector: [[1, 2, 1, 1, 1]] + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "3"} + - close_to: {hits.hits.0._score: {value: 0.675, error: 0.01}} + + - match: {hits.hits.1._id: "1"} + - close_to: {hits.hits.1._score: {value: 0.65, error: 0.01}} + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimInvHamming(params.query_vector, 'bit_vector')" + params: + query_vector: [[1, 2, 1, 1, 1]] + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "3"} + - close_to: {hits.hits.0._score: {value: 0.675, error: 0.01}} + + - match: {hits.hits.1._id: "1"} + - close_to: {hits.hits.1._score: {value: 0.65, error: 0.01}} diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java index 241f30b367782..e5c4826bfce97 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java @@ -40,6 +40,8 @@ private SearchCapabilities() {} private static final String NESTED_RETRIEVER_INNER_HITS_SUPPORT = "nested_retriever_inner_hits_support"; /** Support multi-dense-vector script field access. */ private static final String MULTI_DENSE_VECTOR_SCRIPT_ACCESS = "multi_dense_vector_script_access"; + /** Initial support for multi-dense-vector maxSim functions access. */ + private static final String MULTI_DENSE_VECTOR_SCRIPT_MAX_SIM = "multi_dense_vector_script_max_sim"; private static final String RANDOM_SAMPLER_WITH_SCORED_SUBAGGS = "random_sampler_with_scored_subaggs"; @@ -56,6 +58,7 @@ private SearchCapabilities() {} if (MultiDenseVectorFieldMapper.FEATURE_FLAG.isEnabled()) { capabilities.add(MULTI_DENSE_VECTOR_FIELD_MAPPER); capabilities.add(MULTI_DENSE_VECTOR_SCRIPT_ACCESS); + capabilities.add(MULTI_DENSE_VECTOR_SCRIPT_MAX_SIM); } if (Build.current().isSnapshot()) { capabilities.add(KQL_QUERY_SUPPORTED); diff --git a/server/src/main/java/org/elasticsearch/script/MultiVectorScoreScriptUtils.java b/server/src/main/java/org/elasticsearch/script/MultiVectorScoreScriptUtils.java new file mode 100644 index 0000000000000..136c5e7b57d4b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/MultiVectorScoreScriptUtils.java @@ -0,0 +1,372 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; +import org.elasticsearch.script.field.vectors.DenseVector; +import org.elasticsearch.script.field.vectors.MultiDenseVectorDocValuesField; + +import java.io.IOException; +import java.util.HexFormat; +import java.util.List; + +public class MultiVectorScoreScriptUtils { + + public static class MultiDenseVectorFunction { + protected final ScoreScript scoreScript; + protected final MultiDenseVectorDocValuesField field; + + public MultiDenseVectorFunction(ScoreScript scoreScript, MultiDenseVectorDocValuesField field) { + this.scoreScript = scoreScript; + this.field = field; + } + + void setNextVector() { + try { + field.setNextDocId(scoreScript._getDocId()); + } catch (IOException e) { + throw ExceptionsHelper.convertToElastic(e); + } + if (field.isEmpty()) { + throw new IllegalArgumentException("A document doesn't have a value for a multi-vector field!"); + } + } + } + + public static class ByteMultiDenseVectorFunction extends MultiDenseVectorFunction { + protected final byte[][] queryVector; + + /** + * Constructs a dense vector function used for byte-sized vectors. + * + * @param scoreScript The script in which this function was referenced. + * @param field The vector field. + * @param queryVector The query vector. + */ + public ByteMultiDenseVectorFunction(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, List> queryVector) { + super(scoreScript, field); + if (queryVector.isEmpty()) { + throw new IllegalArgumentException("The query vector is empty."); + } + field.getElementType().checkDimensions(field.get().getDims(), queryVector.get(0).size()); + this.queryVector = new byte[queryVector.size()][queryVector.get(0).size()]; + float[] validateValues = new float[queryVector.size()]; + int lastSize = -1; + for (int i = 0; i < queryVector.size(); i++) { + if (lastSize != -1 && lastSize != queryVector.get(i).size()) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have inconsistent number of dimensions." + ); + } + lastSize = queryVector.get(i).size(); + for (int j = 0; j < queryVector.get(i).size(); j++) { + final Number number = queryVector.get(i).get(j); + byte value = number.byteValue(); + this.queryVector[i][j] = value; + validateValues[i] = number.floatValue(); + } + field.getElementType().checkVectorBounds(validateValues); + } + } + + /** + * Constructs a dense vector function used for byte-sized vectors. + * + * @param scoreScript The script in which this function was referenced. + * @param field The vector field. + * @param queryVector The query vector. + */ + public ByteMultiDenseVectorFunction(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, byte[][] queryVector) { + super(scoreScript, field); + this.queryVector = queryVector; + } + } + + public static class FloatMultiDenseVectorFunction extends MultiDenseVectorFunction { + protected final float[][] queryVector; + + /** + * Constructs a dense vector function used for float vectors. + * + * @param scoreScript The script in which this function was referenced. + * @param field The vector field. + * @param queryVector The query vector. + */ + public FloatMultiDenseVectorFunction( + ScoreScript scoreScript, + MultiDenseVectorDocValuesField field, + List> queryVector + ) { + super(scoreScript, field); + if (queryVector.isEmpty()) { + throw new IllegalArgumentException("The query vector is empty."); + } + DenseVector.checkDimensions(field.get().getDims(), queryVector.get(0).size()); + + this.queryVector = new float[queryVector.size()][queryVector.get(0).size()]; + int lastSize = -1; + for (int i = 0; i < queryVector.size(); i++) { + if (lastSize != -1 && lastSize != queryVector.get(i).size()) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have inconsistent number of dimensions." + ); + } + lastSize = queryVector.get(i).size(); + for (int j = 0; j < queryVector.get(i).size(); j++) { + this.queryVector[i][j] = queryVector.get(i).get(j).floatValue(); + } + field.getElementType().checkVectorBounds(this.queryVector[i]); + } + } + } + + // Calculate Hamming distances between a query's dense vector and documents' dense vectors + public interface MaxSimInvHammingDistanceInterface { + float maxSimInvHamming(); + } + + public static class ByteMaxSimInvHammingDistance extends ByteMultiDenseVectorFunction implements MaxSimInvHammingDistanceInterface { + + public ByteMaxSimInvHammingDistance(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, List> queryVector) { + super(scoreScript, field, queryVector); + } + + public ByteMaxSimInvHammingDistance(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, byte[][] queryVector) { + super(scoreScript, field, queryVector); + } + + public float maxSimInvHamming() { + setNextVector(); + return field.get().maxSimInvHamming(queryVector); + } + } + + private record BytesOrList(byte[][] bytes, List> list) {} + + @SuppressWarnings("unchecked") + private static BytesOrList parseBytes(Object queryVector) { + if (queryVector instanceof List) { + // check if its a list of strings or list of lists + if (((List) queryVector).get(0) instanceof List) { + return new BytesOrList(null, ((List>) queryVector)); + } else if (((List) queryVector).get(0) instanceof String) { + byte[][] parsedQueryVector = new byte[((List) queryVector).size()][]; + int lastSize = -1; + for (int i = 0; i < ((List) queryVector).size(); i++) { + parsedQueryVector[i] = HexFormat.of().parseHex((String) ((List) queryVector).get(i)); + if (lastSize != -1 && lastSize != parsedQueryVector[i].length) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have inconsistent number of dimensions." + ); + } + lastSize = parsedQueryVector[i].length; + } + return new BytesOrList(parsedQueryVector, null); + } else { + throw new IllegalArgumentException("Unsupported input object for byte vectors: " + queryVector.getClass().getName()); + } + } else { + throw new IllegalArgumentException("Unsupported input object for byte vectors: " + queryVector.getClass().getName()); + } + } + + public static final class MaxSimInvHamming { + + private final MaxSimInvHammingDistanceInterface function; + + public MaxSimInvHamming(ScoreScript scoreScript, Object queryVector, String fieldName) { + MultiDenseVectorDocValuesField field = (MultiDenseVectorDocValuesField) scoreScript.field(fieldName); + if (field.getElementType() == DenseVectorFieldMapper.ElementType.FLOAT) { + throw new IllegalArgumentException("hamming distance is only supported for byte or bit vectors"); + } + BytesOrList bytesOrList = parseBytes(queryVector); + if (bytesOrList.bytes != null) { + this.function = new ByteMaxSimInvHammingDistance(scoreScript, field, bytesOrList.bytes); + } else { + this.function = new ByteMaxSimInvHammingDistance(scoreScript, field, bytesOrList.list); + } + } + + public double maxSimInvHamming() { + return function.maxSimInvHamming(); + } + } + + // Calculate a dot product between a query's dense vector and documents' dense vectors + public interface MaxSimDotProductInterface { + double maxSimDotProduct(); + } + + public static class MaxSimBitDotProduct extends MultiDenseVectorFunction implements MaxSimDotProductInterface { + private final byte[][] byteQueryVector; + private final float[][] floatQueryVector; + + public MaxSimBitDotProduct(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, byte[][] queryVector) { + super(scoreScript, field); + if (field.getElementType() != DenseVectorFieldMapper.ElementType.BIT) { + throw new IllegalArgumentException("Cannot calculate bit dot product for non-bit vectors"); + } + int fieldDims = field.get().getDims(); + if (fieldDims != queryVector.length * Byte.SIZE && fieldDims != queryVector.length) { + throw new IllegalArgumentException( + "The query vector has an incorrect number of dimensions. Must be [" + + fieldDims / 8 + + "] for bitwise operations, or [" + + fieldDims + + "] for byte wise operations: provided [" + + queryVector.length + + "]." + ); + } + this.byteQueryVector = queryVector; + this.floatQueryVector = null; + } + + public MaxSimBitDotProduct(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, List> queryVector) { + super(scoreScript, field); + if (queryVector.isEmpty()) { + throw new IllegalArgumentException("The query vector is empty."); + } + if (field.getElementType() != DenseVectorFieldMapper.ElementType.BIT) { + throw new IllegalArgumentException("cannot calculate bit dot product for non-bit vectors"); + } + float[][] floatQueryVector = new float[queryVector.size()][]; + byte[][] byteQueryVector = new byte[queryVector.size()][]; + boolean isFloat = false; + int lastSize = -1; + for (int i = 0; i < queryVector.size(); i++) { + if (lastSize != -1 && lastSize != queryVector.get(i).size()) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have inconsistent number of dimensions." + ); + } + lastSize = queryVector.get(i).size(); + floatQueryVector[i] = new float[queryVector.get(i).size()]; + if (isFloat == false) { + byteQueryVector[i] = new byte[queryVector.get(i).size()]; + } + for (int j = 0; j < queryVector.get(i).size(); j++) { + Number number = queryVector.get(i).get(j); + floatQueryVector[i][j] = number.floatValue(); + if (isFloat == false) { + byteQueryVector[i][j] = number.byteValue(); + } + if (isFloat + || floatQueryVector[i][j] % 1.0f != 0.0f + || floatQueryVector[i][j] < Byte.MIN_VALUE + || floatQueryVector[i][j] > Byte.MAX_VALUE) { + isFloat = true; + } + } + } + int fieldDims = field.get().getDims(); + if (isFloat) { + this.floatQueryVector = floatQueryVector; + this.byteQueryVector = null; + if (fieldDims != floatQueryVector[0].length) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have incorrect number of dimensions. Must be [" + + fieldDims + + "] for float wise operations: provided [" + + floatQueryVector[0].length + + "]." + ); + } + } else { + this.floatQueryVector = null; + this.byteQueryVector = byteQueryVector; + if (fieldDims != byteQueryVector[0].length * Byte.SIZE && fieldDims != byteQueryVector[0].length) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have incorrect number of dimensions. Must be [" + + fieldDims / 8 + + "] for bitwise operations, or [" + + fieldDims + + "] for byte wise operations: provided [" + + byteQueryVector[0].length + + "]." + ); + } + } + } + + @Override + public double maxSimDotProduct() { + setNextVector(); + return byteQueryVector != null ? field.get().maxSimDotProduct(byteQueryVector) : field.get().maxSimDotProduct(floatQueryVector); + } + } + + public static class MaxSimByteDotProduct extends ByteMultiDenseVectorFunction implements MaxSimDotProductInterface { + + public MaxSimByteDotProduct(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, List> queryVector) { + super(scoreScript, field, queryVector); + } + + public MaxSimByteDotProduct(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, byte[][] queryVector) { + super(scoreScript, field, queryVector); + } + + public double maxSimDotProduct() { + setNextVector(); + return field.get().maxSimDotProduct(queryVector); + } + } + + public static class MaxSimFloatDotProduct extends FloatMultiDenseVectorFunction implements MaxSimDotProductInterface { + + public MaxSimFloatDotProduct(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, List> queryVector) { + super(scoreScript, field, queryVector); + } + + public double maxSimDotProduct() { + setNextVector(); + return field.get().maxSimDotProduct(queryVector); + } + } + + public static final class MaxSimDotProduct { + + private final MaxSimDotProductInterface function; + + @SuppressWarnings("unchecked") + public MaxSimDotProduct(ScoreScript scoreScript, Object queryVector, String fieldName) { + MultiDenseVectorDocValuesField field = (MultiDenseVectorDocValuesField) scoreScript.field(fieldName); + function = switch (field.getElementType()) { + case BIT -> { + BytesOrList bytesOrList = parseBytes(queryVector); + if (bytesOrList.bytes != null) { + yield new MaxSimBitDotProduct(scoreScript, field, bytesOrList.bytes); + } else { + yield new MaxSimBitDotProduct(scoreScript, field, bytesOrList.list); + } + } + case BYTE -> { + BytesOrList bytesOrList = parseBytes(queryVector); + if (bytesOrList.bytes != null) { + yield new MaxSimByteDotProduct(scoreScript, field, bytesOrList.bytes); + } else { + yield new MaxSimByteDotProduct(scoreScript, field, bytesOrList.list); + } + } + case FLOAT -> { + if (queryVector instanceof List) { + yield new MaxSimFloatDotProduct(scoreScript, field, (List>) queryVector); + } + throw new IllegalArgumentException("Unsupported input object for float vectors: " + queryVector.getClass().getName()); + } + }; + } + + public double maxSimDotProduct() { + return function.maxSimDotProduct(); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVector.java index 24e19a803ff38..7805816090d51 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVector.java @@ -10,11 +10,13 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.VectorUtil; +import org.elasticsearch.simdvec.ESVectorUtil; -import java.util.Iterator; +import java.util.Arrays; public class BitMultiDenseVector extends ByteMultiDenseVector { - public BitMultiDenseVector(Iterator vectorValues, BytesRef magnitudesBytes, int numVecs, int dims) { + public BitMultiDenseVector(VectorIterator vectorValues, BytesRef magnitudesBytes, int numVecs, int dims) { super(vectorValues, magnitudesBytes, numVecs, dims); } @@ -31,6 +33,70 @@ public void checkDimensions(int qvDims) { } } + @Override + public float maxSimDotProduct(float[][] query) { + vectorValues.reset(); + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], ESVectorUtil.ipFloatBit(query[i], vv)); + } + } + float sums = 0; + for (float m : maxes) { + sums += m; + } + return sums; + } + + @Override + public float maxSimDotProduct(byte[][] query) { + vectorValues.reset(); + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + if (query[0].length == dims) { + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], ESVectorUtil.andBitCount(query[i], vv)); + } + } + } else { + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], ESVectorUtil.ipByteBit(query[i], vv)); + } + } + } + float sum = 0; + for (float m : maxes) { + sum += m; + } + return sum; + } + + @Override + public float maxSimInvHamming(byte[][] query) { + vectorValues.reset(); + int bitCount = this.getDims(); + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], ((bitCount - VectorUtil.xorBitCount(vv, query[i])) / (float) bitCount)); + } + } + float sum = 0; + for (float m : maxes) { + sum += m; + } + return sum; + } + @Override public int getDims() { return dims * Byte.SIZE; diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVector.java index e610d10146b2f..5e9d3e05746c8 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVector.java @@ -10,21 +10,22 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.VectorUtil; import org.elasticsearch.index.mapper.vectors.VectorEncoderDecoder; +import java.util.Arrays; import java.util.Iterator; public class ByteMultiDenseVector implements MultiDenseVector { - protected final Iterator vectorValues; + protected final VectorIterator vectorValues; protected final int numVecs; protected final int dims; - private Iterator floatDocVectors; private float[] magnitudes; private final BytesRef magnitudesBytes; - public ByteMultiDenseVector(Iterator vectorValues, BytesRef magnitudesBytes, int numVecs, int dims) { + public ByteMultiDenseVector(VectorIterator vectorValues, BytesRef magnitudesBytes, int numVecs, int dims) { assert magnitudesBytes.length == numVecs * Float.BYTES; this.vectorValues = vectorValues; this.numVecs = numVecs; @@ -33,11 +34,50 @@ public ByteMultiDenseVector(Iterator vectorValues, BytesRef magnitudesBy } @Override - public Iterator getVectors() { - if (floatDocVectors == null) { - floatDocVectors = new ByteToFloatIteratorWrapper(vectorValues, dims); + public float maxSimDotProduct(float[][] query) { + throw new UnsupportedOperationException("use [float maxSimDotProduct(byte[][] queryVector)] instead"); + } + + @Override + public float maxSimDotProduct(byte[][] query) { + vectorValues.reset(); + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], VectorUtil.dotProduct(query[i], vv)); + } + } + float sum = 0; + for (float m : maxes) { + sum += m; + } + return sum; + } + + @Override + public float maxSimInvHamming(byte[][] query) { + vectorValues.reset(); + int bitCount = dims * Byte.SIZE; + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], ((bitCount - VectorUtil.xorBitCount(vv, query[i])) / (float) bitCount)); + } + } + float sum = 0; + for (float m : maxes) { + sum += m; } - return floatDocVectors; + return sum; + } + + @Override + public Iterator getVectors() { + return new ByteToFloatIteratorWrapper(vectorValues.copy(), dims); } @Override diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVectorDocValuesField.java index d1e062e0a3dee..d45c5b85137f5 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVectorDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVectorDocValuesField.java @@ -23,7 +23,7 @@ public class ByteMultiDenseVectorDocValuesField extends MultiDenseVectorDocValue private final BinaryDocValues magnitudes; protected final int dims; protected int numVecs; - protected Iterator vectorValue; + protected VectorIterator vectorValue; protected boolean decoded; protected BytesRef value; protected BytesRef magnitudesValue; @@ -111,7 +111,7 @@ public boolean isEmpty() { return value == null; } - static class ByteVectorIterator implements Iterator { + static class ByteVectorIterator implements VectorIterator { private final byte[] buffer; private final BytesRef vectorValues; private final int size; @@ -138,5 +138,15 @@ public byte[] next() { idx++; return buffer; } + + @Override + public Iterator copy() { + return new ByteVectorIterator(vectorValues, new byte[buffer.length], size); + } + + @Override + public void reset() { + idx = 0; + } } } diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVector.java index 9ffe8b3b970c4..9c2f7eb6a86d4 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVector.java @@ -10,7 +10,9 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.VectorUtil; +import java.util.Arrays; import java.util.Iterator; import static org.elasticsearch.index.mapper.vectors.VectorEncoderDecoder.getMultiMagnitudes; @@ -21,19 +23,47 @@ public class FloatMultiDenseVector implements MultiDenseVector { private float[] magnitudesArray = null; private final int dims; private final int numVectors; - private final Iterator decodedDocVector; + private final VectorIterator vectorValues; - public FloatMultiDenseVector(Iterator decodedDocVector, BytesRef magnitudes, int numVectors, int dims) { + public FloatMultiDenseVector(VectorIterator decodedDocVector, BytesRef magnitudes, int numVectors, int dims) { assert magnitudes.length == numVectors * Float.BYTES; - this.decodedDocVector = decodedDocVector; + this.vectorValues = decodedDocVector; this.magnitudes = magnitudes; this.numVectors = numVectors; this.dims = dims; } + @Override + public float maxSimDotProduct(float[][] query) { + vectorValues.reset(); + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + while (vectorValues.hasNext()) { + float[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], VectorUtil.dotProduct(query[i], vv)); + } + } + float sum = 0; + for (float m : maxes) { + sum += m; + } + return sum; + } + + @Override + public float maxSimDotProduct(byte[][] query) { + throw new UnsupportedOperationException("use [float maxSimDotProduct(float[][] queryVector)] instead"); + } + + @Override + public float maxSimInvHamming(byte[][] query) { + throw new UnsupportedOperationException("hamming distance is not supported for float vectors"); + } + @Override public Iterator getVectors() { - return decodedDocVector; + return vectorValues.copy(); } @Override diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVectorDocValuesField.java index 356db58d989c5..c7ac7842afd96 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVectorDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVectorDocValuesField.java @@ -110,14 +110,16 @@ private void decodeVectorIfNecessary() { } } - static class FloatVectorIterator implements Iterator { + static class FloatVectorIterator implements VectorIterator { private final float[] buffer; private final FloatBuffer vectorValues; + private final BytesRef vectorValueBytesRef; private final int size; private int idx = 0; FloatVectorIterator(BytesRef vectorValues, float[] buffer, int size) { assert vectorValues.length == (buffer.length * Float.BYTES * size); + this.vectorValueBytesRef = vectorValues; this.vectorValues = ByteBuffer.wrap(vectorValues.bytes, vectorValues.offset, vectorValues.length) .order(ByteOrder.LITTLE_ENDIAN) .asFloatBuffer(); @@ -139,5 +141,16 @@ public float[] next() { idx++; return buffer; } + + @Override + public Iterator copy() { + return new FloatVectorIterator(vectorValueBytesRef, new float[buffer.length], size); + } + + @Override + public void reset() { + idx = 0; + vectorValues.rewind(); + } } } diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVector.java index 85c851dbe545c..7d948cf5a74fa 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVector.java @@ -17,6 +17,12 @@ default void checkDimensions(int qvDims) { checkDimensions(getDims(), qvDims); } + float maxSimDotProduct(float[][] query); + + float maxSimDotProduct(byte[][] query); + + float maxSimInvHamming(byte[][] query); + Iterator getVectors(); float[] getMagnitudes(); @@ -63,6 +69,21 @@ public int getDims() { throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); } + @Override + public float maxSimDotProduct(float[][] query) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public float maxSimDotProduct(byte[][] query) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public float maxSimInvHamming(byte[][] query) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + @Override public int size() { return 0; diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/VectorIterator.java b/server/src/main/java/org/elasticsearch/script/field/vectors/VectorIterator.java new file mode 100644 index 0000000000000..b8615ac877254 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/VectorIterator.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script.field.vectors; + +import java.util.Iterator; + +public interface VectorIterator extends Iterator { + Iterator copy(); + + void reset(); + + static VectorIterator from(float[][] vectors) { + return new VectorIterator<>() { + private int i = 0; + + @Override + public boolean hasNext() { + return i < vectors.length; + } + + @Override + public float[] next() { + return vectors[i++]; + } + + @Override + public Iterator copy() { + return from(vectors); + } + + @Override + public void reset() { + i = 0; + } + }; + } + + static VectorIterator from(byte[][] vectors) { + return new VectorIterator<>() { + private int i = 0; + + @Override + public boolean hasNext() { + return i < vectors.length; + } + + @Override + public byte[] next() { + return vectors[i++]; + } + + @Override + public Iterator copy() { + return from(vectors); + } + + @Override + public void reset() { + i = 0; + } + }; + } +} diff --git a/server/src/test/java/org/elasticsearch/script/MultiVectorScoreScriptUtilsTests.java b/server/src/test/java/org/elasticsearch/script/MultiVectorScoreScriptUtilsTests.java new file mode 100644 index 0000000000000..c4a1699181efc --- /dev/null +++ b/server/src/test/java/org/elasticsearch/script/MultiVectorScoreScriptUtilsTests.java @@ -0,0 +1,342 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script; + +import org.apache.lucene.util.VectorUtil; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; +import org.elasticsearch.index.mapper.vectors.MultiDenseVectorFieldMapper; +import org.elasticsearch.index.mapper.vectors.MultiDenseVectorScriptDocValuesTests; +import org.elasticsearch.script.MultiVectorScoreScriptUtils.MaxSimDotProduct; +import org.elasticsearch.script.MultiVectorScoreScriptUtils.MaxSimInvHamming; +import org.elasticsearch.script.field.vectors.BitMultiDenseVectorDocValuesField; +import org.elasticsearch.script.field.vectors.ByteMultiDenseVectorDocValuesField; +import org.elasticsearch.script.field.vectors.FloatMultiDenseVectorDocValuesField; +import org.elasticsearch.script.field.vectors.MultiDenseVectorDocValuesField; +import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HexFormat; +import java.util.List; + +import static org.hamcrest.Matchers.containsString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class MultiVectorScoreScriptUtilsTests extends ESTestCase { + + @BeforeClass + public static void setup() { + assumeTrue("Requires multi-dense vector support", MultiDenseVectorFieldMapper.FEATURE_FLAG.isEnabled()); + } + + public void testFloatMultiVectorClassBindings() throws IOException { + String fieldName = "vector"; + int dims = 5; + float[][][] docVectors = new float[][][] { + { { 230.0f, 300.33f, -34.8988f, 15.555f, -200.0f }, { 100.0f, 200.0f, -50.0f, 10.0f, -150.0f } } }; + float[][] docMagnitudes = new float[][] { { 0.0f, 0.0f } }; + for (int i = 0; i < docVectors.length; i++) { + for (int j = 0; j < docVectors[i].length; j++) { + docMagnitudes[i][j] = (float) Math.sqrt(VectorUtil.dotProduct(docVectors[i][j], docVectors[i][j])); + } + } + + List> queryVector = List.of(Arrays.asList(0.5f, 111.3f, -13.0f, 14.8f, -156.0f)); + List> invalidQueryVector = List.of(Arrays.asList(0.5, 111.3)); + + List fields = List.of( + new FloatMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(docVectors, ElementType.FLOAT), + MultiDenseVectorScriptDocValuesTests.wrap(docMagnitudes), + "test", + ElementType.FLOAT, + dims + ), + new FloatMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(docVectors, ElementType.FLOAT), + MultiDenseVectorScriptDocValuesTests.wrap(docMagnitudes), + "test", + ElementType.FLOAT, + dims + ) + ); + for (MultiDenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field("vector")).thenAnswer(mock -> field); + + // Test max similarity dot product + MaxSimDotProduct maxSimDotProduct = new MaxSimDotProduct(scoreScript, queryVector, fieldName); + float maxSimDotProductExpected = 65425.625f; // Adjust this value based on expected max similarity + assertEquals( + "maxSimDotProduct result is not equal to the expected value!", + maxSimDotProductExpected, + maxSimDotProduct.maxSimDotProduct(), + 0.001 + ); + + // Check each function rejects query vectors with the wrong dimension + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new MultiVectorScoreScriptUtils.MaxSimDotProduct(scoreScript, invalidQueryVector, fieldName) + ); + assertThat( + e.getMessage(), + containsString("query vector has a different number of dimensions [2] than the document vectors [5]") + ); + e = expectThrows(IllegalArgumentException.class, () -> new MaxSimInvHamming(scoreScript, invalidQueryVector, fieldName)); + assertThat(e.getMessage(), containsString("hamming distance is only supported for byte or bit vectors")); + + // Check scripting infrastructure integration + assertEquals(65425.6249, new MaxSimDotProduct(scoreScript, queryVector, fieldName).maxSimDotProduct(), 0.001); + when(scoreScript._getDocId()).thenReturn(1); + e = expectThrows( + IllegalArgumentException.class, + () -> new MaxSimDotProduct(scoreScript, queryVector, fieldName).maxSimDotProduct() + ); + assertEquals("A document doesn't have a value for a multi-vector field!", e.getMessage()); + } + } + + public void testByteMultiVectorClassBindings() throws IOException { + String fieldName = "vector"; + int dims = 5; + float[][] docVector = new float[][] { { 1, 127, -128, 5, -10 } }; + float[][] magnitudes = new float[][] { { 0 } }; + for (int i = 0; i < docVector.length; i++) { + magnitudes[i][0] = (float) Math.sqrt(VectorUtil.dotProduct(docVector[i], docVector[i])); + } + List> queryVector = List.of(Arrays.asList((byte) 1, (byte) 125, (byte) -12, (byte) 2, (byte) 4)); + List> invalidQueryVector = List.of(Arrays.asList((byte) 1, (byte) 1)); + List hexidecimalString = List.of(HexFormat.of().formatHex(new byte[] { 1, 125, -12, 2, 4 })); + + List fields = List.of( + new ByteMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(new float[][][] { docVector }, ElementType.BYTE), + MultiDenseVectorScriptDocValuesTests.wrap(magnitudes), + "test", + ElementType.BYTE, + dims + ) + ); + for (MultiDenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field(fieldName)).thenAnswer(mock -> field); + + // Check each function rejects query vectors with the wrong dimension + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new MaxSimDotProduct(scoreScript, invalidQueryVector, fieldName) + ); + assertThat( + e.getMessage(), + containsString("query vector has a different number of dimensions [2] than the document vectors [5]") + ); + e = expectThrows(IllegalArgumentException.class, () -> new MaxSimInvHamming(scoreScript, invalidQueryVector, fieldName)); + assertThat( + e.getMessage(), + containsString("query vector has a different number of dimensions [2] than the document vectors [5]") + ); + + // Check scripting infrastructure integration + assertEquals(17382.0, new MaxSimDotProduct(scoreScript, queryVector, fieldName).maxSimDotProduct(), 0.001); + assertEquals(17382.0, new MaxSimDotProduct(scoreScript, hexidecimalString, fieldName).maxSimDotProduct(), 0.001); + assertEquals(0.675, new MaxSimInvHamming(scoreScript, queryVector, fieldName).maxSimInvHamming(), 0.001); + assertEquals(0.675, new MaxSimInvHamming(scoreScript, hexidecimalString, fieldName).maxSimInvHamming(), 0.001); + MaxSimDotProduct maxSimDotProduct = new MaxSimDotProduct(scoreScript, queryVector, fieldName); + when(scoreScript._getDocId()).thenReturn(1); + e = expectThrows(IllegalArgumentException.class, maxSimDotProduct::maxSimDotProduct); + assertEquals("A document doesn't have a value for a multi-vector field!", e.getMessage()); + } + } + + public void testBitMultiVectorClassBindingsDotProduct() throws IOException { + String fieldName = "vector"; + int dims = 8; + float[][] docVector = new float[][] { { 124 } }; + // 124 in binary is b01111100 + List> queryVector = List.of( + Arrays.asList((byte) 1, (byte) 125, (byte) -12, (byte) 2, (byte) 4, (byte) 1, (byte) 125, (byte) -12) + ); + List> floatQueryVector = List.of(Arrays.asList(1.4f, -1.4f, 0.42f, 0.0f, 1f, -1f, -0.42f, 1.2f)); + List> invalidQueryVector = List.of(Arrays.asList((byte) 1, (byte) 1)); + List hexidecimalString = List.of(HexFormat.of().formatHex(new byte[] { 124 })); + + List fields = List.of( + new BitMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(new float[][][] { docVector }, ElementType.BIT), + MultiDenseVectorScriptDocValuesTests.wrap(new float[][] { { 5 } }), + "test", + ElementType.BIT, + dims + ) + ); + for (MultiDenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field(fieldName)).thenAnswer(mock -> field); + + MaxSimDotProduct function = new MaxSimDotProduct(scoreScript, queryVector, fieldName); + assertEquals( + "maxSimDotProduct result is not equal to the expected value!", + -12 + 2 + 4 + 1 + 125, + function.maxSimDotProduct(), + 0.001 + ); + + function = new MaxSimDotProduct(scoreScript, floatQueryVector, fieldName); + assertEquals( + "maxSimDotProduct result is not equal to the expected value!", + 0.42f + 0f + 1f - 1f - 0.42f, + function.maxSimDotProduct(), + 0.001 + ); + + function = new MaxSimDotProduct(scoreScript, hexidecimalString, fieldName); + assertEquals( + "maxSimDotProduct result is not equal to the expected value!", + Integer.bitCount(124), + function.maxSimDotProduct(), + 0.0 + ); + + // Check each function rejects query vectors with the wrong dimension + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new MaxSimDotProduct(scoreScript, invalidQueryVector, fieldName) + ); + assertThat( + e.getMessage(), + containsString( + "query vector contains inner vectors which have incorrect number of dimensions. " + + "Must be [1] for bitwise operations, or [8] for byte wise operations: provided [2]." + ) + ); + } + } + + public void testByteVsFloatSimilarity() throws IOException { + int dims = 5; + float[][] docVector = new float[][] { { 1f, 127f, -128f, 5f, -10f } }; + float[][] magnitudes = new float[][] { { 0 } }; + for (int i = 0; i < docVector.length; i++) { + magnitudes[i][0] = (float) Math.sqrt(VectorUtil.dotProduct(docVector[i], docVector[i])); + } + List> listFloatVector = List.of(Arrays.asList(1f, 125f, -12f, 2f, 4f)); + List> listByteVector = List.of(Arrays.asList((byte) 1, (byte) 125, (byte) -12, (byte) 2, (byte) 4)); + float[][] floatVector = new float[][] { { 1f, 125f, -12f, 2f, 4f } }; + byte[][] byteVector = new byte[][] { { (byte) 1, (byte) 125, (byte) -12, (byte) 2, (byte) 4 } }; + + List fields = List.of( + new FloatMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(new float[][][] { docVector }, ElementType.FLOAT), + MultiDenseVectorScriptDocValuesTests.wrap(magnitudes), + "field1", + ElementType.FLOAT, + dims + ), + new ByteMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(new float[][][] { docVector }, ElementType.BYTE), + MultiDenseVectorScriptDocValuesTests.wrap(magnitudes), + "field3", + ElementType.BYTE, + dims + ) + ); + for (MultiDenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field("vector")).thenAnswer(mock -> field); + + int dotProductExpected = 17382; + MaxSimDotProduct maxSimDotProduct = new MaxSimDotProduct(scoreScript, listFloatVector, "vector"); + assertEquals(field.getName(), dotProductExpected, maxSimDotProduct.maxSimDotProduct(), 0.001); + maxSimDotProduct = new MaxSimDotProduct(scoreScript, listByteVector, "vector"); + assertEquals(field.getName(), dotProductExpected, maxSimDotProduct.maxSimDotProduct(), 0.001); + switch (field.getElementType()) { + case BYTE -> { + assertEquals(field.getName(), dotProductExpected, field.get().maxSimDotProduct(byteVector), 0.001); + UnsupportedOperationException e = expectThrows( + UnsupportedOperationException.class, + () -> field.get().maxSimDotProduct(floatVector) + ); + assertThat(e.getMessage(), containsString("use [float maxSimDotProduct(byte[][] queryVector)] instead")); + } + case FLOAT -> { + assertEquals(field.getName(), dotProductExpected, field.get().maxSimDotProduct(floatVector), 0.001); + UnsupportedOperationException e = expectThrows( + UnsupportedOperationException.class, + () -> field.get().maxSimDotProduct(byteVector) + ); + assertThat(e.getMessage(), containsString("use [float maxSimDotProduct(float[][] queryVector)] instead")); + } + } + } + } + + public void testByteBoundaries() throws IOException { + String fieldName = "vector"; + int dims = 1; + float[] docVector = new float[] { 0 }; + List> greaterThanVector = List.of(List.of(128)); + List> lessThanVector = List.of(List.of(-129)); + List> decimalVector = List.of(List.of(0.5)); + + List fields = List.of( + new ByteMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(new float[][][] { { docVector } }, ElementType.BYTE), + MultiDenseVectorScriptDocValuesTests.wrap(new float[][] { { 1 } }), + "test", + ElementType.BYTE, + dims + ) + ); + + for (MultiDenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field(fieldName)).thenAnswer(mock -> field); + + IllegalArgumentException e; + + e = expectThrows(IllegalArgumentException.class, () -> new MaxSimDotProduct(scoreScript, greaterThanVector, fieldName)); + assertEquals( + "element_type [byte] vectors only support integers between [-128, 127] but found [128.0] at dim [0]; " + + "Preview of invalid vector: [128.0]", + e.getMessage() + ); + + e = expectThrows(IllegalArgumentException.class, () -> new MaxSimDotProduct(scoreScript, lessThanVector, fieldName)); + assertEquals( + e.getMessage(), + "element_type [byte] vectors only support integers between [-128, 127] but found [-129.0] at dim [0]; " + + "Preview of invalid vector: [-129.0]" + ); + e = expectThrows(IllegalArgumentException.class, () -> new MaxSimDotProduct(scoreScript, decimalVector, fieldName)); + assertEquals( + e.getMessage(), + "element_type [byte] vectors only support non-decimal values but found decimal value [0.5] at dim [0]; " + + "Preview of invalid vector: [0.5]" + ); + } + } + + public void testDimMismatch() throws IOException { + + } +} diff --git a/server/src/test/java/org/elasticsearch/script/field/vectors/MultiDenseVectorTests.java b/server/src/test/java/org/elasticsearch/script/field/vectors/MultiDenseVectorTests.java new file mode 100644 index 0000000000000..12f4b931b4d0a --- /dev/null +++ b/server/src/test/java/org/elasticsearch/script/field/vectors/MultiDenseVectorTests.java @@ -0,0 +1,83 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script.field.vectors; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.VectorUtil; +import org.elasticsearch.index.mapper.vectors.MultiDenseVectorFieldMapper; +import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; + +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.util.function.IntFunction; + +public class MultiDenseVectorTests extends ESTestCase { + + @BeforeClass + public static void setup() { + assumeTrue("Requires multi-dense vector support", MultiDenseVectorFieldMapper.FEATURE_FLAG.isEnabled()); + } + + public void testByteUnsupported() { + int count = randomIntBetween(1, 16); + int dims = randomIntBetween(1, 16); + byte[][] docVector = new byte[count][dims]; + float[][] queryVector = new float[count][dims]; + for (int i = 0; i < docVector.length; i++) { + random().nextBytes(docVector[i]); + for (int j = 0; j < dims; j++) { + queryVector[i][j] = randomFloat(); + } + } + + MultiDenseVector knn = newByteVector(docVector); + UnsupportedOperationException e; + + e = expectThrows(UnsupportedOperationException.class, () -> knn.maxSimDotProduct(queryVector)); + assertEquals(e.getMessage(), "use [float maxSimDotProduct(byte[][] queryVector)] instead"); + } + + public void testFloatUnsupported() { + int count = randomIntBetween(1, 16); + int dims = randomIntBetween(1, 16); + float[][] docVector = new float[count][dims]; + byte[][] queryVector = new byte[count][dims]; + for (int i = 0; i < docVector.length; i++) { + random().nextBytes(queryVector[i]); + for (int j = 0; j < dims; j++) { + docVector[i][j] = randomFloat(); + } + } + + MultiDenseVector knn = newFloatVector(docVector); + + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, () -> knn.maxSimDotProduct(queryVector)); + assertEquals(e.getMessage(), "use [float maxSimDotProduct(float[][] queryVector)] instead"); + } + + static MultiDenseVector newFloatVector(float[][] vector) { + BytesRef magnitudes = magnitudes(vector.length, i -> (float) Math.sqrt(VectorUtil.dotProduct(vector[i], vector[i]))); + return new FloatMultiDenseVector(VectorIterator.from(vector), magnitudes, vector.length, vector[0].length); + } + + static MultiDenseVector newByteVector(byte[][] vector) { + BytesRef magnitudes = magnitudes(vector.length, i -> (float) Math.sqrt(VectorUtil.dotProduct(vector[i], vector[i]))); + return new ByteMultiDenseVector(VectorIterator.from(vector), magnitudes, vector.length, vector[0].length); + } + + static BytesRef magnitudes(int count, IntFunction magnitude) { + ByteBuffer magnitudeBuffer = ByteBuffer.allocate(count * Float.BYTES).order(ByteOrder.LITTLE_ENDIAN); + for (int i = 0; i < count; i++) { + magnitudeBuffer.putFloat(magnitude.apply(i)); + } + return new BytesRef(magnitudeBuffer.array()); + } +} From 53e5fab1d7ecc701a635188b745dd45b7ff4e579 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 20 Nov 2024 16:50:10 -0500 Subject: [PATCH 110/386] ESQL: Skip more union type tests (#117186) (#117197) Skip some more union type tests when running against older versions of Elasticsearch because they *now* require `date_nanos` support. Closes #117108 --- .../src/main/resources/union_types.csv-spec | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec index ad6dd4fecc3f7..af987b13acc82 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec @@ -718,6 +718,7 @@ count:long | @timestamp:date multiIndexTsNanosToDatetimeStats required_capability: union_types required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data, sample_data_ts_nanos | EVAL @timestamp = DATE_TRUNC(1 hour, TO_DATETIME(@timestamp)) @@ -754,6 +755,7 @@ multiIndexTsLongStatsDrop required_capability: union_types required_capability: union_types_agg_cast required_capability: casting_operator +required_capability: to_date_nanos FROM sample_data, sample_data_ts_long, sample_data_ts_nanos | STATS count=count(*) BY @timestamp::datetime @@ -774,6 +776,7 @@ multiIndexTsLongStatsInline2 required_capability: union_types required_capability: union_types_agg_cast required_capability: casting_operator +required_capability: to_date_nanos FROM sample_data, sample_data_ts_long, sample_data_ts_nanos | STATS count=count(*) BY @timestamp::datetime @@ -917,6 +920,7 @@ multiIndexIpStringTsLong required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | EVAL @timestamp = TO_DATETIME(@timestamp), client_ip = TO_IP(client_ip) @@ -958,6 +962,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexIpStringTsLongDropped required_capability: union_types required_capability: metadata_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | EVAL @timestamp = TO_DATETIME(@timestamp), client_ip = TO_IP(client_ip) @@ -1000,6 +1005,7 @@ multiIndexIpStringTsLongRename required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | EVAL ts = TO_DATETIME(@timestamp), host_ip = TO_IP(client_ip) @@ -1041,6 +1047,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexIpStringTsLongRenameDropped required_capability: union_types required_capability: metadata_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | EVAL ts = TO_DATETIME(@timestamp), host_ip = TO_IP(client_ip) @@ -1083,6 +1090,7 @@ multiIndexIpStringTsLongRenameToString required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | EVAL ts = TO_STRING(TO_DATETIME(@timestamp)), host_ip = TO_STRING(TO_IP(client_ip)) @@ -1125,6 +1133,7 @@ multiIndexWhereIpStringTsLong required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | WHERE TO_LONG(@timestamp) < 1698068014937 AND TO_STRING(client_ip) == "172.21.2.162" @@ -1141,6 +1150,7 @@ sample_data_ts_long | 3450233 | Connected to 10.1.0.3 multiIndexWhereIpStringTsLongStats required_capability: union_types required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* | WHERE TO_LONG(@timestamp) < 1698068014937 AND TO_STRING(client_ip) == "172.21.2.162" @@ -1157,6 +1167,7 @@ multiIndexWhereIpStringLikeTsLong required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | WHERE TO_LONG(@timestamp) < 1698068014937 AND TO_STRING(client_ip) LIKE "172.21.2.16?" @@ -1173,6 +1184,7 @@ sample_data_ts_long | 3450233 | Connected to 10.1.0.3 multiIndexWhereIpStringLikeTsLongStats required_capability: union_types required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* | WHERE TO_LONG(@timestamp) < 1698068014937 AND TO_STRING(client_ip) LIKE "172.21.2.16?" @@ -1189,6 +1201,7 @@ multiIndexMultiColumnTypesRename required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | WHERE event_duration > 8000000 @@ -1207,6 +1220,7 @@ multiIndexMultiColumnTypesRenameAndKeep required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | WHERE event_duration > 8000000 @@ -1226,6 +1240,7 @@ multiIndexMultiColumnTypesRenameAndDrop required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | WHERE event_duration > 8000000 From 3a1bc05ad0ff7651d08c978c37c26c007f87d3ff Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Wed, 20 Nov 2024 14:14:19 -0800 Subject: [PATCH 111/386] Change synthetic source logic for constant_keyword (#117182) * Change synthetic source logic for constant_keyword * Update docs/changelog/117182.yaml --- docs/changelog/117182.yaml | 6 +++ .../mapper/ConstantKeywordFieldMapper.java | 44 ++++--------------- .../ConstantKeywordFieldMapperTests.java | 11 +++++ 3 files changed, 25 insertions(+), 36 deletions(-) create mode 100644 docs/changelog/117182.yaml diff --git a/docs/changelog/117182.yaml b/docs/changelog/117182.yaml new file mode 100644 index 0000000000000..b5398bec1ef30 --- /dev/null +++ b/docs/changelog/117182.yaml @@ -0,0 +1,6 @@ +pr: 117182 +summary: Change synthetic source logic for `constant_keyword` +area: Mapping +type: bug +issues: + - 117083 diff --git a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java index 216f82552353b..fa5d9428bb0c6 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.constantkeyword.mapper; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; @@ -58,7 +57,6 @@ import java.util.Locale; import java.util.Map; import java.util.Objects; -import java.util.stream.Stream; /** * A {@link FieldMapper} that assigns every document the same value. @@ -356,40 +354,14 @@ protected SyntheticSourceSupport syntheticSourceSupport() { return new SyntheticSourceSupport.Native(SourceLoader.SyntheticFieldLoader.NOTHING); } - var loader = new SourceLoader.SyntheticFieldLoader() { - @Override - public Stream> storedFieldLoaders() { - return Stream.of(); - } - - @Override - public DocValuesLoader docValuesLoader(LeafReader reader, int[] docIdsInLeaf) { - return docId -> true; - } - - @Override - public boolean hasValue() { - return true; - } - - @Override - public void write(XContentBuilder b) throws IOException { - if (fieldType().value != null) { - b.field(leafName(), fieldType().value); - } - } - - @Override - public void reset() { - // NOOP - } - - @Override - public String fieldName() { - return fullPath(); - } - }; + /* + If there was no value in the document, synthetic source should not have the value too. + This is consistent with stored source behavior and is important for scenarios + like reindexing into an index that has a different value of this value in the mapping. - return new SyntheticSourceSupport.Native(loader); + In order to do that we use fallback logic which implements exactly such logic (_source only contains value + if it was in the original document). + */ + return new SyntheticSourceSupport.Fallback(); } } diff --git a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java index 4661fe77e8b11..2b9170afdfd70 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java +++ b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java @@ -333,6 +333,17 @@ public void testNullValueSyntheticSource() throws IOException { assertThat(syntheticSource(mapper, b -> {}), equalTo("{}")); } + public void testNoValueInDocumentSyntheticSource() throws IOException { + DocumentMapper mapper = createSytheticSourceMapperService(mapping(b -> { + b.startObject("field"); + b.field("type", "constant_keyword"); + b.field("value", randomAlphaOfLength(5)); + b.endObject(); + })).documentMapper(); + + assertThat(syntheticSource(mapper, b -> {}), equalTo("{}")); + } + @Override protected boolean supportsEmptyInputArray() { return false; From 4fd2e59ed40a39460efa04ad2195ba62400b0bb3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 21 Nov 2024 09:24:28 +1100 Subject: [PATCH 112/386] Mute org.elasticsearch.xpack.inference.DefaultEndPointsIT testMultipleInferencesTriggeringDownloadAndDeploy #117208 --- muted-tests.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/muted-tests.yml b/muted-tests.yml index 2216873a3a265..cd0b9d91d4833 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -246,7 +246,9 @@ tests: - class: org.elasticsearch.xpack.apmdata.APMYamlTestSuiteIT method: test {yaml=/10_apm/Test template reinstallation} issue: https://github.com/elastic/elasticsearch/issues/116445 - +- class: org.elasticsearch.xpack.inference.DefaultEndPointsIT + method: testMultipleInferencesTriggeringDownloadAndDeploy + issue: https://github.com/elastic/elasticsearch/issues/117208 # Examples: # From 6fb7fe74d50c60ff2ff42e29d8640f8430861f01 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 21 Nov 2024 09:30:15 +1100 Subject: [PATCH 113/386] Add 8.17 to branches.json --- branches.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/branches.json b/branches.json index e81d511a88458..0e23a795664dd 100644 --- a/branches.json +++ b/branches.json @@ -7,6 +7,9 @@ { "branch": "8.16" }, + { + "branch": "8.17" + }, { "branch": "8.x" }, From 2ec1d48a083aa3d9580176b5d6d94d5194248ca7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 21 Nov 2024 09:38:28 +1100 Subject: [PATCH 114/386] Mute org.elasticsearch.xpack.logsdb.qa.StandardVersusLogsStoredSourceChallengeRestIT testEsqlSource #117212 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index cd0b9d91d4833..0331f705951f1 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -249,6 +249,9 @@ tests: - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testMultipleInferencesTriggeringDownloadAndDeploy issue: https://github.com/elastic/elasticsearch/issues/117208 +- class: org.elasticsearch.xpack.logsdb.qa.StandardVersusLogsStoredSourceChallengeRestIT + method: testEsqlSource + issue: https://github.com/elastic/elasticsearch/issues/117212 # Examples: # From 4297e9197bdd71b523229df3a90135d489f084a6 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 20 Nov 2024 22:55:52 +0000 Subject: [PATCH 115/386] Clean up `TransportRemoteClusterStatsAction` (#117119) No need to have an `ActionType<>` here since we never register this as an action the `Client` can invoke. Also no need to use a dummy constructor parameter just to trick the injector into instantiating it, we can instantiate it ourselves like we do with all other subsidiary transport-only actions. Also fixes the parent task so the remote action is a child of the local action rather than a sibling. --- .../cluster/stats/ClusterStatsRequest.java | 7 ++++--- .../stats/TransportClusterStatsAction.java | 10 ++++++---- .../TransportRemoteClusterStatsAction.java | 18 +++++++++--------- 3 files changed, 19 insertions(+), 16 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java index d8db2c5e657b4..ce9b48666d6ed 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java @@ -48,9 +48,10 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, return new CancellableTask(id, type, action, "", parentTaskId, headers); } - public ClusterStatsRequest asRemoteStats() { - this.remoteStats = true; - return this; + public static ClusterStatsRequest newRemoteClusterStatsRequest() { + final var request = new ClusterStatsRequest(); + request.remoteStats = true; + return request; } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index 36b018b5002eb..97585ea9a1024 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -27,6 +27,7 @@ import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.nodes.TransportNodesAction; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterSnapshotStats; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; @@ -108,20 +109,19 @@ public class TransportClusterStatsAction extends TransportNodesAction< private final MetadataStatsCache mappingStatsCache; private final MetadataStatsCache analysisStatsCache; private final RemoteClusterService remoteClusterService; - private final TransportRemoteClusterStatsAction remoteClusterStatsAction; @Inject public TransportClusterStatsAction( ThreadPool threadPool, ClusterService clusterService, TransportService transportService, + Client client, NodeService nodeService, IndicesService indicesService, RepositoriesService repositoriesService, UsageService usageService, ActionFilters actionFilters, - Settings settings, - TransportRemoteClusterStatsAction remoteClusterStatsAction + Settings settings ) { super( TYPE.name(), @@ -141,7 +141,9 @@ public TransportClusterStatsAction( this.analysisStatsCache = new MetadataStatsCache<>(threadPool.getThreadContext(), AnalysisStats::of); this.remoteClusterService = transportService.getRemoteClusterService(); this.settings = settings; - this.remoteClusterStatsAction = remoteClusterStatsAction; + + // register remote-cluster action with transport service only and not as a local-node Action that the Client can invoke + new TransportRemoteClusterStatsAction(client, transportService, actionFilters); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportRemoteClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportRemoteClusterStatsAction.java index 4d57f10807af6..882aaa8b18e15 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportRemoteClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportRemoteClusterStatsAction.java @@ -10,11 +10,11 @@ package org.elasticsearch.action.admin.cluster.stats; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.RemoteClusterActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -27,26 +27,26 @@ public class TransportRemoteClusterStatsAction extends HandledTransportAction { public static final String NAME = "cluster:monitor/stats/remote"; - public static final ActionType TYPE = new ActionType<>(NAME); public static final RemoteClusterActionType REMOTE_TYPE = new RemoteClusterActionType<>( NAME, RemoteClusterStatsResponse::new ); - private final NodeClient client; + + private final Client client; + private final TransportService transportService; @Inject - public TransportRemoteClusterStatsAction(NodeClient client, TransportService transportService, ActionFilters actionFilters) { + public TransportRemoteClusterStatsAction(Client client, TransportService transportService, ActionFilters actionFilters) { super(NAME, transportService, actionFilters, RemoteClusterStatsRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.client = client; + this.transportService = transportService; } @Override protected void doExecute(Task task, RemoteClusterStatsRequest request, ActionListener listener) { - ClusterStatsRequest subRequest = new ClusterStatsRequest().asRemoteStats(); - subRequest.setParentTask(request.getParentTask()); - client.execute( + new ParentTaskAssigningClient(client, transportService.getLocalNode(), task).execute( TransportClusterStatsAction.TYPE, - subRequest, + ClusterStatsRequest.newRemoteClusterStatsRequest(), listener.map( clusterStatsResponse -> new RemoteClusterStatsResponse( clusterStatsResponse.getClusterUUID(), From 4f46924f36640e684abccfbc3656b7335908136e Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Wed, 20 Nov 2024 15:05:42 -0800 Subject: [PATCH 116/386] Split plugin loading into two different phases to support entitlements (#116998) This change loads all the modules and creates the module layers for plugins prior to entitlement checking during the 2nd phase of bootstrap initialization. This will allow us to know what modules exist for both validation and checking prior to actually loading any plugin classes (in a follow up change). There are now two classes: PluginsLoader which does the module loading and layer creation PluginsService which uses a PluginsLoader to create the main plugin classes and start the plugins --- .../script/ScriptScoreBenchmark.java | 4 +- .../elasticsearch/bootstrap/Bootstrap.java | 12 + .../bootstrap/Elasticsearch.java | 6 +- .../java/org/elasticsearch/node/Node.java | 5 +- .../elasticsearch/node/NodeConstruction.java | 8 +- .../node/NodeServiceProvider.java | 5 +- .../elasticsearch/plugins/PluginsLoader.java | 461 ++++++++++++++++++ .../elasticsearch/plugins/PluginsService.java | 425 ++-------------- .../plugins/PluginsLoaderTests.java | 31 ++ .../plugins/PluginsServiceTests.java | 23 +- .../java/org/elasticsearch/node/MockNode.java | 8 +- .../plugins/MockPluginsService.java | 18 +- .../bench/WatcherScheduleEngineBenchmark.java | 22 +- 13 files changed, 591 insertions(+), 437 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java create mode 100644 server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java index 3790be5f279d1..d44586ef4901a 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.script.DocReader; @@ -76,8 +77,7 @@ public class ScriptScoreBenchmark { private final PluginsService pluginsService = new PluginsService( Settings.EMPTY, null, - null, - Path.of(System.getProperty("plugins.dir")) + new PluginsLoader(null, Path.of(System.getProperty("plugins.dir"))) ); private final ScriptModule scriptModule = new ScriptModule(Settings.EMPTY, pluginsService.filterPlugins(ScriptPlugin.class).toList()); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 699198a8e22c2..56d185645e149 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -17,6 +17,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.env.Environment; import org.elasticsearch.node.NodeValidationException; +import org.elasticsearch.plugins.PluginsLoader; import java.io.PrintStream; @@ -42,6 +43,9 @@ class Bootstrap { // the loaded settings for the node, not valid until after phase 2 of initialization private final SetOnce nodeEnv = new SetOnce<>(); + // loads information about plugins required for entitlements in phase 2, used by plugins service in phase 3 + private final SetOnce pluginsLoader = new SetOnce<>(); + Bootstrap(PrintStream out, PrintStream err, ServerArgs args) { this.out = out; this.err = err; @@ -72,6 +76,14 @@ Environment environment() { return nodeEnv.get(); } + void setPluginsLoader(PluginsLoader pluginsLoader) { + this.pluginsLoader.set(pluginsLoader); + } + + PluginsLoader pluginsLoader() { + return pluginsLoader.get(); + } + void exitWithNodeValidationException(NodeValidationException e) { Logger logger = LogManager.getLogger(Elasticsearch.class); logger.error("node validation exception\n{}", e.getMessage()); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 2a83f749e7d33..77875e65ab9b8 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -41,6 +41,7 @@ import org.elasticsearch.nativeaccess.NativeAccess; import org.elasticsearch.node.Node; import org.elasticsearch.node.NodeValidationException; +import org.elasticsearch.plugins.PluginsLoader; import java.io.IOException; import java.io.InputStream; @@ -199,6 +200,9 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { VectorUtil.class ); + // load the plugin Java modules and layers now for use in entitlements + bootstrap.setPluginsLoader(new PluginsLoader(nodeEnv.modulesFile(), nodeEnv.pluginsFile())); + if (Boolean.parseBoolean(System.getProperty("es.entitlements.enabled"))) { logger.info("Bootstrapping Entitlements"); EntitlementBootstrap.bootstrap(); @@ -244,7 +248,7 @@ private static void ensureInitialized(Class... classes) { private static void initPhase3(Bootstrap bootstrap) throws IOException, NodeValidationException { checkLucene(); - Node node = new Node(bootstrap.environment()) { + Node node = new Node(bootstrap.environment(), bootstrap.pluginsLoader()) { @Override protected void validateNodeBeforeAcceptingRequests( final BootstrapContext context, diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index ec4a534fc883b..80c9aafaa84b4 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -69,6 +69,7 @@ import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.MetadataUpgrader; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.readiness.ReadinessService; import org.elasticsearch.repositories.RepositoriesService; @@ -180,8 +181,8 @@ public class Node implements Closeable { * * @param environment the initial environment for this node, which will be added to by plugins */ - public Node(Environment environment) { - this(NodeConstruction.prepareConstruction(environment, new NodeServiceProvider(), true)); + public Node(Environment environment, PluginsLoader pluginsLoader) { + this(NodeConstruction.prepareConstruction(environment, pluginsLoader, new NodeServiceProvider(), true)); } /** diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index caf65c05cf27d..e1fc586424dec 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -164,6 +164,7 @@ import org.elasticsearch.plugins.NetworkPlugin; import org.elasticsearch.plugins.PersistentTaskPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.RecoveryPlannerPlugin; import org.elasticsearch.plugins.ReloadablePlugin; @@ -260,6 +261,7 @@ class NodeConstruction { */ static NodeConstruction prepareConstruction( Environment initialEnvironment, + PluginsLoader pluginsLoader, NodeServiceProvider serviceProvider, boolean forbidPrivateIndexSettings ) { @@ -267,7 +269,7 @@ static NodeConstruction prepareConstruction( try { NodeConstruction constructor = new NodeConstruction(closeables); - Settings settings = constructor.createEnvironment(initialEnvironment, serviceProvider); + Settings settings = constructor.createEnvironment(initialEnvironment, serviceProvider, pluginsLoader); constructor.loadLoggingDataProviders(); TelemetryProvider telemetryProvider = constructor.createTelemetryProvider(settings); ThreadPool threadPool = constructor.createThreadPool(settings, telemetryProvider.getMeterRegistry()); @@ -400,7 +402,7 @@ private static Optional getSinglePlugin(Stream plugins, Class plugi return Optional.of(plugin); } - private Settings createEnvironment(Environment initialEnvironment, NodeServiceProvider serviceProvider) { + private Settings createEnvironment(Environment initialEnvironment, NodeServiceProvider serviceProvider, PluginsLoader pluginsLoader) { // Pass the node settings to the DeprecationLogger class so that it can have the deprecation.skip_deprecated_settings setting: Settings envSettings = initialEnvironment.settings(); DeprecationLogger.initialize(envSettings); @@ -473,7 +475,7 @@ private Settings createEnvironment(Environment initialEnvironment, NodeServicePr (e, apmConfig) -> logger.error("failed to delete temporary APM config file [{}], reason: [{}]", apmConfig, e.getMessage()) ); - pluginsService = serviceProvider.newPluginService(initialEnvironment, envSettings); + pluginsService = serviceProvider.newPluginService(initialEnvironment, pluginsLoader); modules.bindToInstance(PluginsService.class, pluginsService); Settings settings = Node.mergePluginSettings(pluginsService.pluginMap(), envSettings); diff --git a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java index f18655afb8f02..8f2dc4e532ae0 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java +++ b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java @@ -27,6 +27,7 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.recovery.RecoverySettings; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.readiness.ReadinessService; import org.elasticsearch.script.ScriptContext; @@ -51,9 +52,9 @@ */ class NodeServiceProvider { - PluginsService newPluginService(Environment environment, Settings settings) { + PluginsService newPluginService(Environment initialEnvironment, PluginsLoader pluginsLoader) { // this creates a PluginsService with an empty list of classpath plugins - return new PluginsService(settings, environment.configFile(), environment.modulesFile(), environment.pluginsFile()); + return new PluginsService(initialEnvironment.settings(), initialEnvironment.configFile(), pluginsLoader); } ScriptService newScriptService( diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java b/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java new file mode 100644 index 0000000000000..6b3eda6c0c9b4 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java @@ -0,0 +1,461 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.plugins; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.core.PathUtils; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.jdk.JarHell; +import org.elasticsearch.jdk.ModuleQualifiedExportsService; + +import java.io.IOException; +import java.lang.ModuleLayer.Controller; +import java.lang.module.Configuration; +import java.lang.module.ModuleFinder; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.net.URLClassLoader; +import java.nio.file.Path; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Stream; + +import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory; +import static org.elasticsearch.jdk.ModuleQualifiedExportsService.addExportsService; +import static org.elasticsearch.jdk.ModuleQualifiedExportsService.exposeQualifiedExportsAndOpens; + +/** + * This class is used to load modules and module layers for each plugin during + * node initialization prior to enablement of entitlements. This allows entitlements + * to have all the plugin information they need prior to starting. + */ +public class PluginsLoader { + + /** + * Contains information about the {@link ClassLoader} required to load a plugin + */ + public interface PluginLayer { + /** + * @return Information about the bundle of jars used in this plugin + */ + PluginBundle pluginBundle(); + + /** + * @return The {@link ClassLoader} used to instantiate the main class for the plugin + */ + ClassLoader pluginClassLoader(); + } + + /** + * Contains information about the {@link ClassLoader}s and {@link ModuleLayer} required for loading a plugin + * @param pluginBundle Information about the bundle of jars used in this plugin + * @param pluginClassLoader The {@link ClassLoader} used to instantiate the main class for the plugin + * @param spiClassLoader The exported {@link ClassLoader} visible to other Java modules + * @param spiModuleLayer The exported {@link ModuleLayer} visible to other Java modules + */ + private record LoadedPluginLayer( + PluginBundle pluginBundle, + ClassLoader pluginClassLoader, + ClassLoader spiClassLoader, + ModuleLayer spiModuleLayer + ) implements PluginLayer { + + public LoadedPluginLayer { + Objects.requireNonNull(pluginBundle); + Objects.requireNonNull(pluginClassLoader); + Objects.requireNonNull(spiClassLoader); + Objects.requireNonNull(spiModuleLayer); + } + } + + /** + * Tuple of module layer and loader. + * Modular Plugins have a plugin specific loader and layer. + * Non-Modular plugins have a plugin specific loader and the boot layer. + */ + public record LayerAndLoader(ModuleLayer layer, ClassLoader loader) { + + public LayerAndLoader { + Objects.requireNonNull(layer); + Objects.requireNonNull(loader); + } + + public static LayerAndLoader ofLoader(ClassLoader loader) { + return new LayerAndLoader(ModuleLayer.boot(), loader); + } + } + + private static final Logger logger = LogManager.getLogger(PluginsLoader.class); + private static final Module serverModule = PluginsLoader.class.getModule(); + + private final List moduleDescriptors; + private final List pluginDescriptors; + private final Map loadedPluginLayers; + + /** + * Constructs a new PluginsLoader + * + * @param modulesDirectory The directory modules exist in, or null if modules should not be loaded from the filesystem + * @param pluginsDirectory The directory plugins exist in, or null if plugins should not be loaded from the filesystem + */ + @SuppressWarnings("this-escape") + public PluginsLoader(Path modulesDirectory, Path pluginsDirectory) { + + Map> qualifiedExports = new HashMap<>(ModuleQualifiedExportsService.getBootServices()); + addServerExportsService(qualifiedExports); + + Set seenBundles = new LinkedHashSet<>(); + + // load (elasticsearch) module layers + if (modulesDirectory != null) { + try { + Set modules = PluginsUtils.getModuleBundles(modulesDirectory); + moduleDescriptors = modules.stream().map(PluginBundle::pluginDescriptor).toList(); + seenBundles.addAll(modules); + } catch (IOException ex) { + throw new IllegalStateException("Unable to initialize modules", ex); + } + } else { + moduleDescriptors = Collections.emptyList(); + } + + // load plugin layers + if (pluginsDirectory != null) { + try { + // TODO: remove this leniency, but tests bogusly rely on it + if (isAccessibleDirectory(pluginsDirectory, logger)) { + PluginsUtils.checkForFailedPluginRemovals(pluginsDirectory); + Set plugins = PluginsUtils.getPluginBundles(pluginsDirectory); + pluginDescriptors = plugins.stream().map(PluginBundle::pluginDescriptor).toList(); + seenBundles.addAll(plugins); + } else { + pluginDescriptors = Collections.emptyList(); + } + } catch (IOException ex) { + throw new IllegalStateException("Unable to initialize plugins", ex); + } + } else { + pluginDescriptors = Collections.emptyList(); + } + + this.loadedPluginLayers = Collections.unmodifiableMap(loadPluginLayers(seenBundles, qualifiedExports)); + } + + public List moduleDescriptors() { + return moduleDescriptors; + } + + public List pluginDescriptors() { + return pluginDescriptors; + } + + public Stream pluginLayers() { + return loadedPluginLayers.values().stream().map(Function.identity()); + } + + private Map loadPluginLayers( + Set bundles, + Map> qualifiedExports + ) { + Map loaded = new LinkedHashMap<>(); + Map> transitiveUrls = new HashMap<>(); + List sortedBundles = PluginsUtils.sortBundles(bundles); + if (sortedBundles.isEmpty() == false) { + Set systemLoaderURLs = JarHell.parseModulesAndClassPath(); + for (PluginBundle bundle : sortedBundles) { + PluginsUtils.checkBundleJarHell(systemLoaderURLs, bundle, transitiveUrls); + loadPluginLayer(bundle, loaded, qualifiedExports); + } + } + + return loaded; + } + + private void loadPluginLayer( + PluginBundle bundle, + Map loaded, + Map> qualifiedExports + ) { + String name = bundle.plugin.getName(); + logger.debug(() -> "Loading bundle: " + name); + + PluginsUtils.verifyCompatibility(bundle.plugin); + + // collect the list of extended plugins + List extendedPlugins = new ArrayList<>(); + for (String extendedPluginName : bundle.plugin.getExtendedPlugins()) { + LoadedPluginLayer extendedPlugin = loaded.get(extendedPluginName); + assert extendedPlugin != null; + assert extendedPlugin.spiClassLoader() != null : "All non-classpath plugins should be loaded with a classloader"; + extendedPlugins.add(extendedPlugin); + } + + final ClassLoader parentLoader = ExtendedPluginsClassLoader.create( + getClass().getClassLoader(), + extendedPlugins.stream().map(LoadedPluginLayer::spiClassLoader).toList() + ); + LayerAndLoader spiLayerAndLoader = null; + if (bundle.hasSPI()) { + spiLayerAndLoader = createSPI(bundle, parentLoader, extendedPlugins, qualifiedExports); + } + + final ClassLoader pluginParentLoader = spiLayerAndLoader == null ? parentLoader : spiLayerAndLoader.loader(); + final LayerAndLoader pluginLayerAndLoader = createPlugin( + bundle, + pluginParentLoader, + extendedPlugins, + spiLayerAndLoader, + qualifiedExports + ); + final ClassLoader pluginClassLoader = pluginLayerAndLoader.loader(); + + if (spiLayerAndLoader == null) { + // use full implementation for plugins extending this one + spiLayerAndLoader = pluginLayerAndLoader; + } + + loaded.put(name, new LoadedPluginLayer(bundle, pluginClassLoader, spiLayerAndLoader.loader, spiLayerAndLoader.layer)); + } + + static LayerAndLoader createSPI( + PluginBundle bundle, + ClassLoader parentLoader, + List extendedPlugins, + Map> qualifiedExports + ) { + final PluginDescriptor plugin = bundle.plugin; + if (plugin.getModuleName().isPresent()) { + logger.debug(() -> "Loading bundle: " + plugin.getName() + ", creating spi, modular"); + return createSpiModuleLayer( + bundle.spiUrls, + parentLoader, + extendedPlugins.stream().map(LoadedPluginLayer::spiModuleLayer).toList(), + qualifiedExports + ); + } else { + logger.debug(() -> "Loading bundle: " + plugin.getName() + ", creating spi, non-modular"); + return LayerAndLoader.ofLoader(URLClassLoader.newInstance(bundle.spiUrls.toArray(new URL[0]), parentLoader)); + } + } + + static LayerAndLoader createPlugin( + PluginBundle bundle, + ClassLoader pluginParentLoader, + List extendedPlugins, + LayerAndLoader spiLayerAndLoader, + Map> qualifiedExports + ) { + final PluginDescriptor plugin = bundle.plugin; + if (plugin.getModuleName().isPresent()) { + logger.debug(() -> "Loading bundle: " + plugin.getName() + ", modular"); + var parentLayers = Stream.concat( + Stream.ofNullable(spiLayerAndLoader != null ? spiLayerAndLoader.layer() : null), + extendedPlugins.stream().map(LoadedPluginLayer::spiModuleLayer) + ).toList(); + return createPluginModuleLayer(bundle, pluginParentLoader, parentLayers, qualifiedExports); + } else if (plugin.isStable()) { + logger.debug(() -> "Loading bundle: " + plugin.getName() + ", non-modular as synthetic module"); + return LayerAndLoader.ofLoader( + UberModuleClassLoader.getInstance( + pluginParentLoader, + ModuleLayer.boot(), + "synthetic." + toModuleName(plugin.getName()), + bundle.allUrls, + Set.of("org.elasticsearch.server") // TODO: instead of denying server, allow only jvm + stable API modules + ) + ); + } else { + logger.debug(() -> "Loading bundle: " + plugin.getName() + ", non-modular"); + return LayerAndLoader.ofLoader(URLClassLoader.newInstance(bundle.urls.toArray(URL[]::new), pluginParentLoader)); + } + } + + static LayerAndLoader createSpiModuleLayer( + Set urls, + ClassLoader parentLoader, + List parentLayers, + Map> qualifiedExports + ) { + // assert bundle.plugin.getModuleName().isPresent(); + return createModuleLayer( + null, // no entry point + spiModuleName(urls), + urlsToPaths(urls), + parentLoader, + parentLayers, + qualifiedExports + ); + } + + static LayerAndLoader createPluginModuleLayer( + PluginBundle bundle, + ClassLoader parentLoader, + List parentLayers, + Map> qualifiedExports + ) { + assert bundle.plugin.getModuleName().isPresent(); + return createModuleLayer( + bundle.plugin.getClassname(), + bundle.plugin.getModuleName().get(), + urlsToPaths(bundle.urls), + parentLoader, + parentLayers, + qualifiedExports + ); + } + + static LayerAndLoader createModuleLayer( + String className, + String moduleName, + Path[] paths, + ClassLoader parentLoader, + List parentLayers, + Map> qualifiedExports + ) { + logger.debug(() -> "Loading bundle: creating module layer and loader for module " + moduleName); + var finder = ModuleFinder.of(paths); + + var configuration = Configuration.resolveAndBind( + ModuleFinder.of(), + parentConfigurationOrBoot(parentLayers), + finder, + Set.of(moduleName) + ); + var controller = privilegedDefineModulesWithOneLoader(configuration, parentLayersOrBoot(parentLayers), parentLoader); + var pluginModule = controller.layer().findModule(moduleName).get(); + ensureEntryPointAccessible(controller, pluginModule, className); + // export/open upstream modules to this plugin module + exposeQualifiedExportsAndOpens(pluginModule, qualifiedExports); + // configure qualified exports/opens to other modules/plugins + addPluginExportsServices(qualifiedExports, controller); + logger.debug(() -> "Loading bundle: created module layer and loader for module " + moduleName); + return new LayerAndLoader(controller.layer(), privilegedFindLoader(controller.layer(), moduleName)); + } + + /** Determines the module name of the SPI module, given its URL. */ + static String spiModuleName(Set spiURLS) { + ModuleFinder finder = ModuleFinder.of(urlsToPaths(spiURLS)); + var mrefs = finder.findAll(); + assert mrefs.size() == 1 : "Expected a single module, got:" + mrefs; + return mrefs.stream().findFirst().get().descriptor().name(); + } + + // package-visible for testing + static String toModuleName(String name) { + String result = name.replaceAll("\\W+", ".") // replace non-alphanumeric character strings with dots + .replaceAll("(^[^A-Za-z_]*)", "") // trim non-alpha or underscore characters from start + .replaceAll("\\.$", "") // trim trailing dot + .toLowerCase(Locale.getDefault()); + assert ModuleSupport.isPackageName(result); + return result; + } + + static final String toPackageName(String className) { + assert className.endsWith(".") == false; + int index = className.lastIndexOf('.'); + if (index == -1) { + throw new IllegalStateException("invalid class name:" + className); + } + return className.substring(0, index); + } + + @SuppressForbidden(reason = "I need to convert URL's to Paths") + static final Path[] urlsToPaths(Set urls) { + return urls.stream().map(PluginsLoader::uncheckedToURI).map(PathUtils::get).toArray(Path[]::new); + } + + static final URI uncheckedToURI(URL url) { + try { + return url.toURI(); + } catch (URISyntaxException e) { + throw new AssertionError(new IOException(e)); + } + } + + private static List parentConfigurationOrBoot(List parentLayers) { + if (parentLayers == null || parentLayers.isEmpty()) { + return List.of(ModuleLayer.boot().configuration()); + } else { + return parentLayers.stream().map(ModuleLayer::configuration).toList(); + } + } + + /** Ensures that the plugins main class (its entry point), if any, is accessible to the server. */ + private static void ensureEntryPointAccessible(Controller controller, Module pluginModule, String className) { + if (className != null) { + controller.addOpens(pluginModule, toPackageName(className), serverModule); + } + } + + @SuppressWarnings("removal") + static Controller privilegedDefineModulesWithOneLoader(Configuration cf, List parentLayers, ClassLoader parentLoader) { + return AccessController.doPrivileged( + (PrivilegedAction) () -> ModuleLayer.defineModulesWithOneLoader(cf, parentLayers, parentLoader) + ); + } + + @SuppressWarnings("removal") + static ClassLoader privilegedFindLoader(ModuleLayer layer, String name) { + return AccessController.doPrivileged((PrivilegedAction) () -> layer.findLoader(name)); + } + + private static List parentLayersOrBoot(List parentLayers) { + if (parentLayers == null || parentLayers.isEmpty()) { + return List.of(ModuleLayer.boot()); + } else { + return parentLayers; + } + } + + protected void addServerExportsService(Map> qualifiedExports) { + var exportsService = new ModuleQualifiedExportsService(serverModule) { + @Override + protected void addExports(String pkg, Module target) { + serverModule.addExports(pkg, target); + } + + @Override + protected void addOpens(String pkg, Module target) { + serverModule.addOpens(pkg, target); + } + }; + addExportsService(qualifiedExports, exportsService, serverModule.getName()); + } + + private static void addPluginExportsServices(Map> qualifiedExports, Controller controller) { + for (Module module : controller.layer().modules()) { + var exportsService = new ModuleQualifiedExportsService(module) { + @Override + protected void addExports(String pkg, Module target) { + controller.addExports(module, pkg, target); + } + + @Override + protected void addOpens(String pkg, Module target) { + controller.addOpens(module, pkg, target); + } + }; + addExportsService(qualifiedExports, exportsService, module.getName()); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java index d5dd6d62d615e..cfdb7aaf0b509 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -23,34 +23,22 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.core.PathUtils; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.Tuple; -import org.elasticsearch.jdk.JarHell; -import org.elasticsearch.jdk.ModuleQualifiedExportsService; import org.elasticsearch.node.ReportingService; +import org.elasticsearch.plugins.PluginsLoader.PluginLayer; import org.elasticsearch.plugins.scanners.StablePluginsRegistry; import org.elasticsearch.plugins.spi.SPIClassIterator; import java.io.IOException; -import java.lang.ModuleLayer.Controller; -import java.lang.module.Configuration; -import java.lang.module.ModuleFinder; import java.lang.reflect.Constructor; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.net.URLClassLoader; import java.nio.file.Path; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; -import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; @@ -63,10 +51,6 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory; -import static org.elasticsearch.jdk.ModuleQualifiedExportsService.addExportsService; -import static org.elasticsearch.jdk.ModuleQualifiedExportsService.exposeQualifiedExportsAndOpens; - public class PluginsService implements ReportingService { public StablePluginsRegistry getStablePluginRegistry() { @@ -77,33 +61,18 @@ public StablePluginsRegistry getStablePluginRegistry() { * A loaded plugin is one for which Elasticsearch has successfully constructed an instance of the plugin's class * @param descriptor Metadata about the plugin, usually loaded from plugin properties * @param instance The constructed instance of the plugin's main class - * @param loader The classloader for the plugin - * @param layer The module layer for the plugin */ - record LoadedPlugin(PluginDescriptor descriptor, Plugin instance, ClassLoader loader, ModuleLayer layer) { + record LoadedPlugin(PluginDescriptor descriptor, Plugin instance) { LoadedPlugin { Objects.requireNonNull(descriptor); Objects.requireNonNull(instance); - Objects.requireNonNull(loader); - Objects.requireNonNull(layer); - } - - /** - * Creates a loaded classpath plugin. A classpath plugin is a plugin loaded - * by the system classloader and defined to the unnamed module of the boot layer. - */ - LoadedPlugin(PluginDescriptor descriptor, Plugin instance) { - this(descriptor, instance, PluginsService.class.getClassLoader(), ModuleLayer.boot()); } } private static final Logger logger = LogManager.getLogger(PluginsService.class); private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(PluginsService.class); - private final Settings settings; - private final Path configPath; - /** * We keep around a list of plugins and modules. The order of * this list is that which the plugins and modules were loaded in. @@ -117,69 +86,32 @@ record LoadedPlugin(PluginDescriptor descriptor, Plugin instance, ClassLoader lo /** * Constructs a new PluginService * - * @param settings The settings of the system - * @param modulesDirectory The directory modules exist in, or null if modules should not be loaded from the filesystem - * @param pluginsDirectory The directory plugins exist in, or null if plugins should not be loaded from the filesystem + * @param settings The settings for this node + * @param configPath The configuration path for this node + * @param pluginsLoader the information required to complete loading of plugins */ - @SuppressWarnings("this-escape") - public PluginsService(Settings settings, Path configPath, Path modulesDirectory, Path pluginsDirectory) { - this.settings = settings; - this.configPath = configPath; - - Map> qualifiedExports = new HashMap<>(ModuleQualifiedExportsService.getBootServices()); - addServerExportsService(qualifiedExports); - - Set seenBundles = new LinkedHashSet<>(); - - // load modules - List modulesList = new ArrayList<>(); - Set moduleNameList = new HashSet<>(); - if (modulesDirectory != null) { - try { - Set modules = PluginsUtils.getModuleBundles(modulesDirectory); - modules.stream().map(PluginBundle::pluginDescriptor).forEach(m -> { - modulesList.add(m); - moduleNameList.add(m.getName()); - }); - seenBundles.addAll(modules); - } catch (IOException ex) { - throw new IllegalStateException("Unable to initialize modules", ex); - } - } + public PluginsService(Settings settings, Path configPath, PluginsLoader pluginsLoader) { + Map loadedPlugins = loadPluginBundles(settings, configPath, pluginsLoader); - // load plugins - List pluginsList = new ArrayList<>(); - if (pluginsDirectory != null) { - try { - // TODO: remove this leniency, but tests bogusly rely on it - if (isAccessibleDirectory(pluginsDirectory, logger)) { - PluginsUtils.checkForFailedPluginRemovals(pluginsDirectory); - Set plugins = PluginsUtils.getPluginBundles(pluginsDirectory); - plugins.stream().map(PluginBundle::pluginDescriptor).forEach(pluginsList::add); - seenBundles.addAll(plugins); - } - } catch (IOException ex) { - throw new IllegalStateException("Unable to initialize plugins", ex); - } - } - - LinkedHashMap loadedPlugins = loadBundles(seenBundles, qualifiedExports); + var modulesDescriptors = pluginsLoader.moduleDescriptors(); + var pluginDescriptors = pluginsLoader.pluginDescriptors(); var inspector = PluginIntrospector.getInstance(); - this.info = new PluginsAndModules(getRuntimeInfos(inspector, pluginsList, loadedPlugins), modulesList); + this.info = new PluginsAndModules(getRuntimeInfos(inspector, pluginDescriptors, loadedPlugins), modulesDescriptors); this.plugins = List.copyOf(loadedPlugins.values()); - checkDeprecations(inspector, pluginsList, loadedPlugins); + checkDeprecations(inspector, pluginDescriptors, loadedPlugins); checkMandatoryPlugins( - pluginsList.stream().map(PluginDescriptor::getName).collect(Collectors.toSet()), + pluginDescriptors.stream().map(PluginDescriptor::getName).collect(Collectors.toSet()), new HashSet<>(MANDATORY_SETTING.get(settings)) ); // we don't log jars in lib/ we really shouldn't log modules, // but for now: just be transparent so we can debug any potential issues + Set moduleNames = new HashSet<>(modulesDescriptors.stream().map(PluginDescriptor::getName).toList()); for (String name : loadedPlugins.keySet()) { - if (moduleNameList.contains(name)) { + if (moduleNames.contains(name)) { logger.info("loaded module [{}]", name); } else { logger.info("loaded plugin [{}]", name); @@ -282,23 +214,11 @@ protected List plugins() { return this.plugins; } - private LinkedHashMap loadBundles( - Set bundles, - Map> qualifiedExports - ) { - LinkedHashMap loaded = new LinkedHashMap<>(); - Map> transitiveUrls = new HashMap<>(); - List sortedBundles = PluginsUtils.sortBundles(bundles); - if (sortedBundles.isEmpty() == false) { - Set systemLoaderURLs = JarHell.parseModulesAndClassPath(); - for (PluginBundle bundle : sortedBundles) { - PluginsUtils.checkBundleJarHell(systemLoaderURLs, bundle, transitiveUrls); - loadBundle(bundle, loaded, qualifiedExports); - } - } - - loadExtensions(loaded.values()); - return loaded; + private Map loadPluginBundles(Settings settings, Path configPath, PluginsLoader pluginsLoader) { + Map loadedPlugins = new LinkedHashMap<>(); + pluginsLoader.pluginLayers().forEach(pl -> loadBundle(pl, loadedPlugins, settings, configPath)); + loadExtensions(loadedPlugins.values()); + return loadedPlugins; } // package-private for test visibility @@ -443,68 +363,43 @@ private static String extensionConstructorMessage(Class extensi return "constructor for extension [" + extensionClass.getName() + "] of type [" + extensionPointType.getName() + "]"; } - private void loadBundle( - PluginBundle bundle, - Map loaded, - Map> qualifiedExports - ) { - String name = bundle.plugin.getName(); - logger.debug(() -> "Loading bundle: " + name); - - PluginsUtils.verifyCompatibility(bundle.plugin); + private void loadBundle(PluginLayer pluginLayer, Map loadedPlugins, Settings settings, Path configPath) { + String name = pluginLayer.pluginBundle().plugin.getName(); + logger.debug(() -> "Loading plugin bundle: " + name); - // collect the list of extended plugins + // validate the list of extended plugins List extendedPlugins = new ArrayList<>(); - for (String extendedPluginName : bundle.plugin.getExtendedPlugins()) { - LoadedPlugin extendedPlugin = loaded.get(extendedPluginName); + for (String extendedPluginName : pluginLayer.pluginBundle().plugin.getExtendedPlugins()) { + LoadedPlugin extendedPlugin = loadedPlugins.get(extendedPluginName); assert extendedPlugin != null; if (ExtensiblePlugin.class.isInstance(extendedPlugin.instance()) == false) { throw new IllegalStateException("Plugin [" + name + "] cannot extend non-extensible plugin [" + extendedPluginName + "]"); } - assert extendedPlugin.loader() != null : "All non-classpath plugins should be loaded with a classloader"; extendedPlugins.add(extendedPlugin); logger.debug( - () -> "Loading bundle: " + name + ", ext plugins: " + extendedPlugins.stream().map(lp -> lp.descriptor().getName()).toList() + () -> "Loading plugin bundle: " + + name + + ", ext plugins: " + + extendedPlugins.stream().map(lp -> lp.descriptor().getName()).toList() ); } - final ClassLoader parentLoader = ExtendedPluginsClassLoader.create( - getClass().getClassLoader(), - extendedPlugins.stream().map(LoadedPlugin::loader).toList() - ); - LayerAndLoader spiLayerAndLoader = null; - if (bundle.hasSPI()) { - spiLayerAndLoader = createSPI(bundle, parentLoader, extendedPlugins, qualifiedExports); - } - - final ClassLoader pluginParentLoader = spiLayerAndLoader == null ? parentLoader : spiLayerAndLoader.loader(); - final LayerAndLoader pluginLayerAndLoader = createPlugin( - bundle, - pluginParentLoader, - extendedPlugins, - spiLayerAndLoader, - qualifiedExports - ); - final ClassLoader pluginClassLoader = pluginLayerAndLoader.loader(); - - if (spiLayerAndLoader == null) { - // use full implementation for plugins extending this one - spiLayerAndLoader = pluginLayerAndLoader; - } + PluginBundle pluginBundle = pluginLayer.pluginBundle(); + ClassLoader pluginClassLoader = pluginLayer.pluginClassLoader(); // reload SPI with any new services from the plugin - reloadLuceneSPI(pluginClassLoader); + reloadLuceneSPI(pluginLayer.pluginClassLoader()); ClassLoader cl = Thread.currentThread().getContextClassLoader(); try { // Set context class loader to plugin's class loader so that plugins // that have dependencies with their own SPI endpoints have a chance to load // and initialize them appropriately. - privilegedSetContextClassLoader(pluginClassLoader); + privilegedSetContextClassLoader(pluginLayer.pluginClassLoader()); Plugin plugin; - if (bundle.pluginDescriptor().isStable()) { - stablePluginsRegistry.scanBundleForStablePlugins(bundle, pluginClassLoader); + if (pluginBundle.pluginDescriptor().isStable()) { + stablePluginsRegistry.scanBundleForStablePlugins(pluginBundle, pluginClassLoader); /* Contrary to old plugins we don't need an instance of the plugin here. Stable plugin register components (like CharFilterFactory) in stable plugin registry, which is then used in AnalysisModule @@ -514,16 +409,16 @@ Stable plugin register components (like CharFilterFactory) in stable plugin regi We need to pass a name though so that we can show that a plugin was loaded (via cluster state api) This might need to be revisited once support for settings is added */ - plugin = new StablePluginPlaceHolder(bundle.plugin.getName()); + plugin = new StablePluginPlaceHolder(pluginBundle.plugin.getName()); } else { - Class pluginClass = loadPluginClass(bundle.plugin.getClassname(), pluginClassLoader); + Class pluginClass = loadPluginClass(pluginBundle.plugin.getClassname(), pluginClassLoader); if (pluginClassLoader != pluginClass.getClassLoader()) { throw new IllegalStateException( "Plugin [" + name + "] must reference a class loader local Plugin class [" - + bundle.plugin.getClassname() + + pluginBundle.plugin.getClassname() + "] (class loader [" + pluginClass.getClassLoader() + "])" @@ -531,75 +426,12 @@ We need to pass a name though so that we can show that a plugin was loaded (via } plugin = loadPlugin(pluginClass, settings, configPath); } - loaded.put(name, new LoadedPlugin(bundle.plugin, plugin, spiLayerAndLoader.loader(), spiLayerAndLoader.layer())); + loadedPlugins.put(name, new LoadedPlugin(pluginBundle.plugin, plugin)); } finally { privilegedSetContextClassLoader(cl); } } - static LayerAndLoader createSPI( - PluginBundle bundle, - ClassLoader parentLoader, - List extendedPlugins, - Map> qualifiedExports - ) { - final PluginDescriptor plugin = bundle.plugin; - if (plugin.getModuleName().isPresent()) { - logger.debug(() -> "Loading bundle: " + plugin.getName() + ", creating spi, modular"); - return createSpiModuleLayer( - bundle.spiUrls, - parentLoader, - extendedPlugins.stream().map(LoadedPlugin::layer).toList(), - qualifiedExports - ); - } else { - logger.debug(() -> "Loading bundle: " + plugin.getName() + ", creating spi, non-modular"); - return LayerAndLoader.ofLoader(URLClassLoader.newInstance(bundle.spiUrls.toArray(new URL[0]), parentLoader)); - } - } - - static LayerAndLoader createPlugin( - PluginBundle bundle, - ClassLoader pluginParentLoader, - List extendedPlugins, - LayerAndLoader spiLayerAndLoader, - Map> qualifiedExports - ) { - final PluginDescriptor plugin = bundle.plugin; - if (plugin.getModuleName().isPresent()) { - logger.debug(() -> "Loading bundle: " + plugin.getName() + ", modular"); - var parentLayers = Stream.concat( - Stream.ofNullable(spiLayerAndLoader != null ? spiLayerAndLoader.layer() : null), - extendedPlugins.stream().map(LoadedPlugin::layer) - ).toList(); - return createPluginModuleLayer(bundle, pluginParentLoader, parentLayers, qualifiedExports); - } else if (plugin.isStable()) { - logger.debug(() -> "Loading bundle: " + plugin.getName() + ", non-modular as synthetic module"); - return LayerAndLoader.ofLoader( - UberModuleClassLoader.getInstance( - pluginParentLoader, - ModuleLayer.boot(), - "synthetic." + toModuleName(plugin.getName()), - bundle.allUrls, - Set.of("org.elasticsearch.server") // TODO: instead of denying server, allow only jvm + stable API modules - ) - ); - } else { - logger.debug(() -> "Loading bundle: " + plugin.getName() + ", non-modular"); - return LayerAndLoader.ofLoader(URLClassLoader.newInstance(bundle.urls.toArray(URL[]::new), pluginParentLoader)); - } - } - - // package-visible for testing - static String toModuleName(String name) { - String result = name.replaceAll("\\W+", ".") // replace non-alphanumeric character strings with dots - .replaceAll("(^[^A-Za-z_]*)", "") // trim non-alpha or underscore characters from start - .replaceAll("\\.$", "") // trim trailing dot - .toLowerCase(Locale.getDefault()); - assert ModuleSupport.isPackageName(result); - return result; - } - private static void checkDeprecations( PluginIntrospector inspector, List pluginDescriptors, @@ -706,173 +538,6 @@ public final Stream filterPlugins(Class type) { return plugins().stream().filter(x -> type.isAssignableFrom(x.instance().getClass())).map(p -> ((T) p.instance())); } - static LayerAndLoader createPluginModuleLayer( - PluginBundle bundle, - ClassLoader parentLoader, - List parentLayers, - Map> qualifiedExports - ) { - assert bundle.plugin.getModuleName().isPresent(); - return createModuleLayer( - bundle.plugin.getClassname(), - bundle.plugin.getModuleName().get(), - urlsToPaths(bundle.urls), - parentLoader, - parentLayers, - qualifiedExports - ); - } - - static final LayerAndLoader createSpiModuleLayer( - Set urls, - ClassLoader parentLoader, - List parentLayers, - Map> qualifiedExports - ) { - // assert bundle.plugin.getModuleName().isPresent(); - return createModuleLayer( - null, // no entry point - spiModuleName(urls), - urlsToPaths(urls), - parentLoader, - parentLayers, - qualifiedExports - ); - } - - private static final Module serverModule = PluginsService.class.getModule(); - - static LayerAndLoader createModuleLayer( - String className, - String moduleName, - Path[] paths, - ClassLoader parentLoader, - List parentLayers, - Map> qualifiedExports - ) { - logger.debug(() -> "Loading bundle: creating module layer and loader for module " + moduleName); - var finder = ModuleFinder.of(paths); - - var configuration = Configuration.resolveAndBind( - ModuleFinder.of(), - parentConfigurationOrBoot(parentLayers), - finder, - Set.of(moduleName) - ); - var controller = privilegedDefineModulesWithOneLoader(configuration, parentLayersOrBoot(parentLayers), parentLoader); - var pluginModule = controller.layer().findModule(moduleName).get(); - ensureEntryPointAccessible(controller, pluginModule, className); - // export/open upstream modules to this plugin module - exposeQualifiedExportsAndOpens(pluginModule, qualifiedExports); - // configure qualified exports/opens to other modules/plugins - addPluginExportsServices(qualifiedExports, controller); - logger.debug(() -> "Loading bundle: created module layer and loader for module " + moduleName); - return new LayerAndLoader(controller.layer(), privilegedFindLoader(controller.layer(), moduleName)); - } - - private static List parentLayersOrBoot(List parentLayers) { - if (parentLayers == null || parentLayers.isEmpty()) { - return List.of(ModuleLayer.boot()); - } else { - return parentLayers; - } - } - - private static List parentConfigurationOrBoot(List parentLayers) { - if (parentLayers == null || parentLayers.isEmpty()) { - return List.of(ModuleLayer.boot().configuration()); - } else { - return parentLayers.stream().map(ModuleLayer::configuration).toList(); - } - } - - /** Ensures that the plugins main class (its entry point), if any, is accessible to the server. */ - private static void ensureEntryPointAccessible(Controller controller, Module pluginModule, String className) { - if (className != null) { - controller.addOpens(pluginModule, toPackageName(className), serverModule); - } - } - - protected void addServerExportsService(Map> qualifiedExports) { - final Module serverModule = PluginsService.class.getModule(); - var exportsService = new ModuleQualifiedExportsService(serverModule) { - @Override - protected void addExports(String pkg, Module target) { - serverModule.addExports(pkg, target); - } - - @Override - protected void addOpens(String pkg, Module target) { - serverModule.addOpens(pkg, target); - } - }; - addExportsService(qualifiedExports, exportsService, serverModule.getName()); - } - - private static void addPluginExportsServices(Map> qualifiedExports, Controller controller) { - for (Module module : controller.layer().modules()) { - var exportsService = new ModuleQualifiedExportsService(module) { - @Override - protected void addExports(String pkg, Module target) { - controller.addExports(module, pkg, target); - } - - @Override - protected void addOpens(String pkg, Module target) { - controller.addOpens(module, pkg, target); - } - }; - addExportsService(qualifiedExports, exportsService, module.getName()); - } - } - - /** Determines the module name of the SPI module, given its URL. */ - static String spiModuleName(Set spiURLS) { - ModuleFinder finder = ModuleFinder.of(urlsToPaths(spiURLS)); - var mrefs = finder.findAll(); - assert mrefs.size() == 1 : "Expected a single module, got:" + mrefs; - return mrefs.stream().findFirst().get().descriptor().name(); - } - - /** - * Tuple of module layer and loader. - * Modular Plugins have a plugin specific loader and layer. - * Non-Modular plugins have a plugin specific loader and the boot layer. - */ - record LayerAndLoader(ModuleLayer layer, ClassLoader loader) { - - LayerAndLoader { - Objects.requireNonNull(layer); - Objects.requireNonNull(loader); - } - - static LayerAndLoader ofLoader(ClassLoader loader) { - return new LayerAndLoader(ModuleLayer.boot(), loader); - } - } - - @SuppressForbidden(reason = "I need to convert URL's to Paths") - static final Path[] urlsToPaths(Set urls) { - return urls.stream().map(PluginsService::uncheckedToURI).map(PathUtils::get).toArray(Path[]::new); - } - - static final URI uncheckedToURI(URL url) { - try { - return url.toURI(); - } catch (URISyntaxException e) { - throw new AssertionError(new IOException(e)); - } - } - - static final String toPackageName(String className) { - assert className.endsWith(".") == false; - int index = className.lastIndexOf('.'); - if (index == -1) { - throw new IllegalStateException("invalid class name:" + className); - } - return className.substring(0, index); - } - @SuppressWarnings("removal") private static void privilegedSetContextClassLoader(ClassLoader loader) { AccessController.doPrivileged((PrivilegedAction) () -> { @@ -880,16 +545,4 @@ private static void privilegedSetContextClassLoader(ClassLoader loader) { return null; }); } - - @SuppressWarnings("removal") - static Controller privilegedDefineModulesWithOneLoader(Configuration cf, List parentLayers, ClassLoader parentLoader) { - return AccessController.doPrivileged( - (PrivilegedAction) () -> ModuleLayer.defineModulesWithOneLoader(cf, parentLayers, parentLoader) - ); - } - - @SuppressWarnings("removal") - static ClassLoader privilegedFindLoader(ModuleLayer layer, String name) { - return AccessController.doPrivileged((PrivilegedAction) () -> layer.findLoader(name)); - } } diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java new file mode 100644 index 0000000000000..059cb15551acb --- /dev/null +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.plugins; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class PluginsLoaderTests extends ESTestCase { + + public void testToModuleName() { + assertThat(PluginsLoader.toModuleName("module.name"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("module-name"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("module-name1"), equalTo("module.name1")); + assertThat(PluginsLoader.toModuleName("1module-name"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("module-name!"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("module!@#name!"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("!module-name!"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("module_name"), equalTo("module_name")); + assertThat(PluginsLoader.toModuleName("-module-name-"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("_module_name"), equalTo("_module_name")); + assertThat(PluginsLoader.toModuleName("_"), equalTo("_")); + } +} diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index f927a12b50da3..b84f1d2c7635c 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -66,12 +66,12 @@ public class PluginsServiceTests extends ESTestCase { public static class FilterablePlugin extends Plugin implements ScriptPlugin {} static PluginsService newPluginsService(Settings settings) { - return new PluginsService(settings, null, null, TestEnvironment.newEnvironment(settings).pluginsFile()) { + return new PluginsService(settings, null, new PluginsLoader(null, TestEnvironment.newEnvironment(settings).pluginsFile()) { @Override protected void addServerExportsService(Map> qualifiedExports) { // tests don't run modular } - }; + }); } static PluginsService newMockPluginsService(List> classpathPlugins) { @@ -875,20 +875,6 @@ public void testCanCreateAClassLoader() { assertEquals(this.getClass().getClassLoader(), loader.getParent()); } - public void testToModuleName() { - assertThat(PluginsService.toModuleName("module.name"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("module-name"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("module-name1"), equalTo("module.name1")); - assertThat(PluginsService.toModuleName("1module-name"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("module-name!"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("module!@#name!"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("!module-name!"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("module_name"), equalTo("module_name")); - assertThat(PluginsService.toModuleName("-module-name-"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("_module_name"), equalTo("_module_name")); - assertThat(PluginsService.toModuleName("_"), equalTo("_")); - } - static final class Loader extends ClassLoader { Loader(ClassLoader parent) { super(parent); @@ -896,16 +882,17 @@ static final class Loader extends ClassLoader { } // Closes the URLClassLoaders and UberModuleClassloaders of plugins loaded by the given plugin service. + // We can use the direct ClassLoader from the plugin because tests do not use any parent SPI ClassLoaders. static void closePluginLoaders(PluginsService pluginService) { for (var lp : pluginService.plugins()) { - if (lp.loader() instanceof URLClassLoader urlClassLoader) { + if (lp.instance().getClass().getClassLoader() instanceof URLClassLoader urlClassLoader) { try { PrivilegedOperations.closeURLClassLoader(urlClassLoader); } catch (IOException unexpected) { throw new UncheckedIOException(unexpected); } } - if (lp.loader() instanceof UberModuleClassLoader loader) { + if (lp.instance().getClass().getClassLoader() instanceof UberModuleClassLoader loader) { try { PrivilegedOperations.closeURLClassLoader(loader.getInternalLoader()); } catch (Exception e) { diff --git a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java index 40fb4f91c77d0..38c7b1eb04772 100644 --- a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java +++ b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java @@ -31,6 +31,7 @@ import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.plugins.MockPluginsService; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.readiness.MockReadinessService; import org.elasticsearch.readiness.ReadinessService; @@ -279,10 +280,11 @@ private MockNode( final Collection> classpathPlugins, final boolean forbidPrivateIndexSettings ) { - super(NodeConstruction.prepareConstruction(environment, new MockServiceProvider() { + super(NodeConstruction.prepareConstruction(environment, null, new MockServiceProvider() { + @Override - PluginsService newPluginService(Environment environment, Settings settings) { - return new MockPluginsService(settings, environment, classpathPlugins); + PluginsService newPluginService(Environment environment, PluginsLoader pluginsLoader) { + return new MockPluginsService(environment.settings(), environment, classpathPlugins); } }, forbidPrivateIndexSettings)); diff --git a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java index e4734f9cf021e..d51b2cfb450bc 100644 --- a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java +++ b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java @@ -20,7 +20,6 @@ import org.elasticsearch.plugins.spi.SPIClassIterator; import java.lang.reflect.Constructor; -import java.nio.file.Path; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -44,14 +43,18 @@ public class MockPluginsService extends PluginsService { * @param classpathPlugins Plugins that exist in the classpath which should be loaded */ public MockPluginsService(Settings settings, Environment environment, Collection> classpathPlugins) { - super(settings, environment.configFile(), environment.modulesFile(), environment.pluginsFile()); + super(settings, environment.configFile(), new PluginsLoader(environment.modulesFile(), environment.pluginsFile()) { - final Path configPath = environment.configFile(); + @Override + protected void addServerExportsService(Map> qualifiedExports) { + // tests don't run modular + } + }); List pluginsLoaded = new ArrayList<>(); for (Class pluginClass : classpathPlugins) { - Plugin plugin = loadPlugin(pluginClass, settings, configPath); + Plugin plugin = loadPlugin(pluginClass, settings, environment.configFile()); PluginDescriptor pluginInfo = new PluginDescriptor( pluginClass.getName(), "classpath plugin", @@ -69,7 +72,7 @@ public MockPluginsService(Settings settings, Environment environment, Collection if (logger.isTraceEnabled()) { logger.trace("plugin loaded from classpath [{}]", pluginInfo); } - pluginsLoaded.add(new LoadedPlugin(pluginInfo, plugin, pluginClass.getClassLoader(), ModuleLayer.boot())); + pluginsLoaded.add(new LoadedPlugin(pluginInfo, plugin)); } loadExtensions(pluginsLoaded); this.classpathPlugins = List.copyOf(pluginsLoaded); @@ -169,9 +172,4 @@ private static List createExtensions( } return extensions; } - - @Override - protected void addServerExportsService(Map> qualifiedExports) { - // tests don't run modular - } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java index 1691a464d8061..99fb626ad9474 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java @@ -16,11 +16,13 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.node.InternalSettingsPreparer; import org.elasticsearch.node.MockNode; import org.elasticsearch.node.Node; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; @@ -96,18 +98,18 @@ public static void main(String[] args) throws Exception { ); System.out.println("and heap_max=" + JvmInfo.jvmInfo().getMem().getHeapMax()); + Environment internalNodeEnv = InternalSettingsPreparer.prepareEnvironment( + Settings.builder().put(SETTINGS).put("node.data", false).build(), + emptyMap(), + null, + () -> { + throw new IllegalArgumentException("settings must have [node.name]"); + } + ); + // First clean everything and index the watcher (but not via put alert api!) try ( - Node node = new Node( - InternalSettingsPreparer.prepareEnvironment( - Settings.builder().put(SETTINGS).put("node.data", false).build(), - emptyMap(), - null, - () -> { - throw new IllegalArgumentException("settings must have [node.name]"); - } - ) - ).start() + Node node = new Node(internalNodeEnv, new PluginsLoader(internalNodeEnv.modulesFile(), internalNodeEnv.pluginsFile())).start() ) { final Client client = node.client(); ClusterHealthResponse response = client.admin().cluster().prepareHealth(TimeValue.THIRTY_SECONDS).setWaitForNodes("2").get(); From 4cc9f5de6c14bd7b96df30977e17995c389a1162 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Wed, 20 Nov 2024 16:09:46 -0700 Subject: [PATCH 117/386] Revert Remove direct cloning of BytesTransportRequests (#117200) Reverts #114808 and unmutes #117024 which was a related failure. --- muted-tests.yml | 3 --- .../test/transport/MockTransportService.java | 20 +++++++++++++++++-- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 0331f705951f1..710cdea8f1564 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -237,9 +237,6 @@ tests: - class: org.elasticsearch.upgrades.QueryBuilderBWCIT method: testQueryBuilderBWC {cluster=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/116990 -- class: org.elasticsearch.discovery.ClusterDisruptionIT - method: testAckedIndexing - issue: https://github.com/elastic/elasticsearch/issues/117024 - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/esql/esql-across-clusters/line_197} issue: https://github.com/elastic/elasticsearch/issues/117099 diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index 18c591166e720..fd376fcd07688 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -39,6 +39,7 @@ import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; @@ -49,6 +50,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.tasks.MockTaskManager; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.BytesTransportRequest; import org.elasticsearch.transport.ClusterConnectionManager; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.ConnectionProfile; @@ -584,8 +586,13 @@ public void sendRequest( // poor mans request cloning... BytesStreamOutput bStream = new BytesStreamOutput(); request.writeTo(bStream); - RequestHandlerRegistry reg = MockTransportService.this.getRequestHandler(action); - final TransportRequest clonedRequest = reg.newRequest(bStream.bytes().streamInput()); + final TransportRequest clonedRequest; + if (request instanceof BytesTransportRequest) { + clonedRequest = copyRawBytesForBwC(bStream); + } else { + RequestHandlerRegistry reg = MockTransportService.this.getRequestHandler(action); + clonedRequest = reg.newRequest(bStream.bytes().streamInput()); + } assert clonedRequest.getClass().equals(MasterNodeRequestHelper.unwrapTermOverride(request).getClass()) : clonedRequest + " vs " + request; @@ -633,6 +640,15 @@ protected void doRun() throws IOException { } } + // Some request handlers read back a BytesTransportRequest + // into a different class that cannot be re-serialized (i.e. JOIN_VALIDATE_ACTION_NAME), + // in those cases we just copy the raw bytes back to a BytesTransportRequest. + // This is only needed for the BwC for JOIN_VALIDATE_ACTION_NAME and can be removed in the next major + @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) + private static TransportRequest copyRawBytesForBwC(BytesStreamOutput bStream) throws IOException { + return new BytesTransportRequest(bStream.bytes().streamInput()); + } + @Override public void clearCallback() { synchronized (this) { From afa3abcec5968161cbb2aa126104ea8daad7c3d2 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Wed, 20 Nov 2024 17:34:44 -0600 Subject: [PATCH 118/386] Reindex data stream persistent task (#116780) --- .../ReindexDataStreamTransportActionIT.java | 152 +++++++++++++++++ .../datastreams/DataStreamsPlugin.java | 65 +++++++- .../ReindexDataStreamTransportAction.java | 93 +++++++++++ ...indexDataStreamPersistentTaskExecutor.java | 121 ++++++++++++++ .../ReindexDataStreamPersistentTaskState.java | 63 +++++++ .../task/ReindexDataStreamStatus.java | 95 +++++++++++ .../task/ReindexDataStreamTask.java | 86 ++++++++++ .../task/ReindexDataStreamTaskParams.java | 86 ++++++++++ ...dexDataStreamPersistentTaskStateTests.java | 38 +++++ .../task/ReindexDataStreamStatusTests.java | 157 ++++++++++++++++++ .../ReindexDataStreamTaskParamsTests.java | 70 ++++++++ .../org/elasticsearch/TransportVersions.java | 1 + .../datastreams/ReindexDataStreamAction.java | 119 +++++++++++++ .../ReindexDataStreamResponseTests.java | 52 ++++++ 14 files changed, 1197 insertions(+), 1 deletion(-) create mode 100644 modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportActionIT.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportAction.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskExecutor.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskState.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatus.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTask.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParams.java create mode 100644 modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskStateTests.java create mode 100644 modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatusTests.java create mode 100644 modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParamsTests.java create mode 100644 server/src/main/java/org/elasticsearch/action/datastreams/ReindexDataStreamAction.java create mode 100644 server/src/test/java/org/elasticsearch/action/datastreams/ReindexDataStreamResponseTests.java diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportActionIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportActionIT.java new file mode 100644 index 0000000000000..fdc96892d4b27 --- /dev/null +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportActionIT.java @@ -0,0 +1,152 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.admin.indices.rollover.RolloverRequestBuilder; +import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; +import org.elasticsearch.action.datastreams.CreateDataStreamAction; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamRequest; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.datastreams.DataStreamsPlugin; +import org.elasticsearch.datastreams.task.ReindexDataStreamTask; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.XContentType; + +import java.util.Collection; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class ReindexDataStreamTransportActionIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return List.of(DataStreamsPlugin.class); + } + + public void testNonExistentDataStream() { + String nonExistentDataStreamName = randomAlphaOfLength(50); + ReindexDataStreamRequest reindexDataStreamRequest = new ReindexDataStreamRequest(nonExistentDataStreamName); + assertThrows( + ResourceNotFoundException.class, + () -> client().execute(new ActionType(ReindexDataStreamAction.NAME), reindexDataStreamRequest) + .actionGet() + ); + } + + public void testAlreadyUpToDateDataStream() throws Exception { + String dataStreamName = randomAlphaOfLength(50).toLowerCase(Locale.ROOT); + ReindexDataStreamRequest reindexDataStreamRequest = new ReindexDataStreamRequest(dataStreamName); + createDataStream(dataStreamName); + ReindexDataStreamResponse response = client().execute( + new ActionType(ReindexDataStreamAction.NAME), + reindexDataStreamRequest + ).actionGet(); + String persistentTaskId = response.getTaskId(); + assertThat(persistentTaskId, equalTo("reindex-data-stream-" + dataStreamName)); + AtomicReference runningTask = new AtomicReference<>(); + for (TransportService transportService : internalCluster().getInstances(TransportService.class)) { + TaskManager taskManager = transportService.getTaskManager(); + Map tasksMap = taskManager.getCancellableTasks(); + Optional> optionalTask = taskManager.getCancellableTasks() + .entrySet() + .stream() + .filter(entry -> entry.getValue().getType().equals("persistent")) + .filter( + entry -> entry.getValue() instanceof ReindexDataStreamTask + && persistentTaskId.equals((((ReindexDataStreamTask) entry.getValue()).getPersistentTaskId())) + ) + .findAny(); + optionalTask.ifPresent( + longCancellableTaskEntry -> runningTask.compareAndSet(null, (ReindexDataStreamTask) longCancellableTaskEntry.getValue()) + ); + } + ReindexDataStreamTask task = runningTask.get(); + assertNotNull(task); + assertThat(task.getStatus().complete(), equalTo(true)); + assertNull(task.getStatus().exception()); + assertThat(task.getStatus().pending(), equalTo(0)); + assertThat(task.getStatus().inProgress(), equalTo(0)); + assertThat(task.getStatus().errors().size(), equalTo(0)); + } + + private void createDataStream(String dataStreamName) { + final TransportPutComposableIndexTemplateAction.Request putComposableTemplateRequest = + new TransportPutComposableIndexTemplateAction.Request("my-template"); + putComposableTemplateRequest.indexTemplate( + ComposableIndexTemplate.builder() + .indexPatterns(List.of(dataStreamName)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .template(Template.builder().build()) + .build() + ); + final AcknowledgedResponse putComposableTemplateResponse = safeGet( + client().execute(TransportPutComposableIndexTemplateAction.TYPE, putComposableTemplateRequest) + ); + assertThat(putComposableTemplateResponse.isAcknowledged(), is(true)); + + final CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + dataStreamName + ); + final AcknowledgedResponse createDataStreamResponse = safeGet( + client().execute(CreateDataStreamAction.INSTANCE, createDataStreamRequest) + ); + assertThat(createDataStreamResponse.isAcknowledged(), is(true)); + indexDocs(dataStreamName); + safeGet(new RolloverRequestBuilder(client()).setRolloverTarget(dataStreamName).lazy(false).execute()); + indexDocs(dataStreamName); + safeGet(new RolloverRequestBuilder(client()).setRolloverTarget(dataStreamName).lazy(false).execute()); + } + + private void indexDocs(String dataStreamName) { + int docs = randomIntBetween(5, 10); + CountDownLatch countDownLatch = new CountDownLatch(docs); + for (int i = 0; i < docs; i++) { + var indexRequest = new IndexRequest(dataStreamName).opType(DocWriteRequest.OpType.CREATE); + final String doc = "{ \"@timestamp\": \"2099-05-06T16:21:15.000Z\", \"message\": \"something cool happened\" }"; + indexRequest.source(doc, XContentType.JSON); + client().index(indexRequest, new ActionListener<>() { + @Override + public void onResponse(DocWriteResponse docWriteResponse) { + countDownLatch.countDown(); + } + + @Override + public void onFailure(Exception e) { + fail("Indexing request should have succeeded eventually, failed with " + e.getMessage()); + } + }); + } + safeAwait(countDownLatch); + } + +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java index cb7445705537a..2f3b63d27ca35 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java @@ -19,19 +19,23 @@ import org.elasticsearch.action.datastreams.MigrateToDataStreamAction; import org.elasticsearch.action.datastreams.ModifyDataStreamsAction; import org.elasticsearch.action.datastreams.PromoteDataStreamAction; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction; import org.elasticsearch.action.datastreams.lifecycle.ExplainDataStreamLifecycleAction; import org.elasticsearch.action.datastreams.lifecycle.GetDataStreamLifecycleAction; import org.elasticsearch.action.datastreams.lifecycle.PutDataStreamLifecycleAction; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.datastreams.action.CreateDataStreamTransportAction; @@ -40,6 +44,7 @@ import org.elasticsearch.datastreams.action.MigrateToDataStreamTransportAction; import org.elasticsearch.datastreams.action.ModifyDataStreamsTransportAction; import org.elasticsearch.datastreams.action.PromoteDataStreamTransportAction; +import org.elasticsearch.datastreams.action.ReindexDataStreamTransportAction; import org.elasticsearch.datastreams.action.TransportGetDataStreamsAction; import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleErrorStore; import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService; @@ -73,14 +78,27 @@ import org.elasticsearch.datastreams.rest.RestMigrateToDataStreamAction; import org.elasticsearch.datastreams.rest.RestModifyDataStreamsAction; import org.elasticsearch.datastreams.rest.RestPromoteDataStreamAction; +import org.elasticsearch.datastreams.task.ReindexDataStreamPersistentTaskExecutor; +import org.elasticsearch.datastreams.task.ReindexDataStreamPersistentTaskState; +import org.elasticsearch.datastreams.task.ReindexDataStreamStatus; +import org.elasticsearch.datastreams.task.ReindexDataStreamTask; +import org.elasticsearch.datastreams.task.ReindexDataStreamTaskParams; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.health.HealthIndicatorService; import org.elasticsearch.index.IndexSettingProvider; +import org.elasticsearch.persistent.PersistentTaskParams; +import org.elasticsearch.persistent.PersistentTaskState; +import org.elasticsearch.persistent.PersistentTasksExecutor; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.HealthPlugin; +import org.elasticsearch.plugins.PersistentTaskPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.ParseField; import java.io.IOException; import java.time.Clock; @@ -93,7 +111,7 @@ import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.DATA_STREAM_LIFECYCLE_ORIGIN; -public class DataStreamsPlugin extends Plugin implements ActionPlugin, HealthPlugin { +public class DataStreamsPlugin extends Plugin implements ActionPlugin, HealthPlugin, PersistentTaskPlugin { public static final Setting TIME_SERIES_POLL_INTERVAL = Setting.timeSetting( "time_series.poll_interval", @@ -244,6 +262,7 @@ public Collection createComponents(PluginServices services) { actions.add(new ActionHandler<>(PutDataStreamOptionsAction.INSTANCE, TransportPutDataStreamOptionsAction.class)); actions.add(new ActionHandler<>(DeleteDataStreamOptionsAction.INSTANCE, TransportDeleteDataStreamOptionsAction.class)); } + actions.add(new ActionHandler<>(ReindexDataStreamAction.INSTANCE, ReindexDataStreamTransportAction.class)); return actions; } @@ -302,4 +321,48 @@ public void close() throws IOException { public Collection getHealthIndicatorServices() { return List.of(dataStreamLifecycleHealthIndicatorService.get()); } + + @Override + public List getNamedXContent() { + return List.of( + new NamedXContentRegistry.Entry( + PersistentTaskState.class, + new ParseField(ReindexDataStreamPersistentTaskState.NAME), + ReindexDataStreamPersistentTaskState::fromXContent + ), + new NamedXContentRegistry.Entry( + PersistentTaskParams.class, + new ParseField(ReindexDataStreamTaskParams.NAME), + ReindexDataStreamTaskParams::fromXContent + ) + ); + } + + @Override + public List getNamedWriteables() { + return List.of( + new NamedWriteableRegistry.Entry( + PersistentTaskState.class, + ReindexDataStreamPersistentTaskState.NAME, + ReindexDataStreamPersistentTaskState::new + ), + new NamedWriteableRegistry.Entry( + PersistentTaskParams.class, + ReindexDataStreamTaskParams.NAME, + ReindexDataStreamTaskParams::new + ), + new NamedWriteableRegistry.Entry(Task.Status.class, ReindexDataStreamStatus.NAME, ReindexDataStreamStatus::new) + ); + } + + @Override + public List> getPersistentTasksExecutor( + ClusterService clusterService, + ThreadPool threadPool, + Client client, + SettingsModule settingsModule, + IndexNameExpressionResolver expressionResolver + ) { + return List.of(new ReindexDataStreamPersistentTaskExecutor(client, clusterService, ReindexDataStreamTask.TASK_NAME, threadPool)); + } } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportAction.java new file mode 100644 index 0000000000000..0a86985c6c7b2 --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportAction.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.action; + +import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamRequest; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.datastreams.task.ReindexDataStreamTask; +import org.elasticsearch.datastreams.task.ReindexDataStreamTaskParams; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +/* + * This transport action creates a new persistent task for reindexing the source data stream given in the request. On successful creation + * of the persistent task, it responds with the persistent task id so that the user can monitor the persistent task. + */ +public class ReindexDataStreamTransportAction extends HandledTransportAction { + private final PersistentTasksService persistentTasksService; + private final TransportService transportService; + private final ClusterService clusterService; + + @Inject + public ReindexDataStreamTransportAction( + TransportService transportService, + ActionFilters actionFilters, + PersistentTasksService persistentTasksService, + ClusterService clusterService + ) { + super( + ReindexDataStreamAction.NAME, + true, + transportService, + actionFilters, + ReindexDataStreamRequest::new, + transportService.getThreadPool().executor(ThreadPool.Names.GENERIC) + ); + this.transportService = transportService; + this.persistentTasksService = persistentTasksService; + this.clusterService = clusterService; + } + + @Override + protected void doExecute(Task task, ReindexDataStreamRequest request, ActionListener listener) { + String sourceDataStreamName = request.getSourceDataStream(); + Metadata metadata = clusterService.state().metadata(); + DataStream dataStream = metadata.dataStreams().get(sourceDataStreamName); + if (dataStream == null) { + listener.onFailure(new ResourceNotFoundException("Data stream named [{}] does not exist", sourceDataStreamName)); + return; + } + int totalIndices = dataStream.getIndices().size(); + int totalIndicesToBeUpgraded = (int) dataStream.getIndices() + .stream() + .filter(index -> metadata.index(index).getCreationVersion().isLegacyIndexVersion()) + .count(); + ReindexDataStreamTaskParams params = new ReindexDataStreamTaskParams( + sourceDataStreamName, + transportService.getThreadPool().absoluteTimeInMillis(), + totalIndices, + totalIndicesToBeUpgraded + ); + String persistentTaskId = getPersistentTaskId(sourceDataStreamName); + persistentTasksService.sendStartRequest( + persistentTaskId, + ReindexDataStreamTask.TASK_NAME, + params, + null, + ActionListener.wrap(startedTask -> listener.onResponse(new ReindexDataStreamResponse(persistentTaskId)), listener::onFailure) + ); + } + + private String getPersistentTaskId(String dataStreamName) throws ResourceAlreadyExistsException { + return "reindex-data-stream-" + dataStreamName; + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskExecutor.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskExecutor.java new file mode 100644 index 0000000000000..f10d2e7b356fb --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskExecutor.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.GetDataStreamAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.Index; +import org.elasticsearch.persistent.AllocatedPersistentTask; +import org.elasticsearch.persistent.PersistentTaskState; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.persistent.PersistentTasksExecutor; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.List; +import java.util.Map; + +public class ReindexDataStreamPersistentTaskExecutor extends PersistentTasksExecutor { + private static final TimeValue TASK_KEEP_ALIVE_TIME = TimeValue.timeValueDays(1); + private final Client client; + private final ClusterService clusterService; + private final ThreadPool threadPool; + + public ReindexDataStreamPersistentTaskExecutor(Client client, ClusterService clusterService, String taskName, ThreadPool threadPool) { + super(taskName, threadPool.generic()); + this.client = client; + this.clusterService = clusterService; + this.threadPool = threadPool; + } + + @Override + protected ReindexDataStreamTask createTask( + long id, + String type, + String action, + TaskId parentTaskId, + PersistentTasksCustomMetadata.PersistentTask taskInProgress, + Map headers + ) { + ReindexDataStreamTaskParams params = taskInProgress.getParams(); + return new ReindexDataStreamTask( + params.startTime(), + params.totalIndices(), + params.totalIndicesToBeUpgraded(), + threadPool, + id, + type, + action, + "id=" + taskInProgress.getId(), + parentTaskId, + headers + ); + } + + @Override + protected void nodeOperation(AllocatedPersistentTask task, ReindexDataStreamTaskParams params, PersistentTaskState state) { + String sourceDataStream = params.getSourceDataStream(); + GetDataStreamAction.Request request = new GetDataStreamAction.Request(TimeValue.MAX_VALUE, new String[] { sourceDataStream }); + assert task instanceof ReindexDataStreamTask; + final ReindexDataStreamTask reindexDataStreamTask = (ReindexDataStreamTask) task; + client.execute(GetDataStreamAction.INSTANCE, request, ActionListener.wrap(response -> { + List dataStreamInfos = response.getDataStreams(); + if (dataStreamInfos.size() == 1) { + List indices = dataStreamInfos.getFirst().getDataStream().getIndices(); + List indicesToBeReindexed = indices.stream() + .filter(index -> clusterService.state().getMetadata().index(index).getCreationVersion().isLegacyIndexVersion()) + .toList(); + reindexDataStreamTask.setPendingIndices(indicesToBeReindexed.stream().map(Index::getName).toList()); + for (Index index : indicesToBeReindexed) { + // TODO This is just a placeholder. This is where the real data stream reindex logic will go + } + + completeSuccessfulPersistentTask(reindexDataStreamTask); + } else { + completeFailedPersistentTask(reindexDataStreamTask, new ElasticsearchException("data stream does not exist")); + } + }, reindexDataStreamTask::markAsFailed)); + } + + private void completeSuccessfulPersistentTask(ReindexDataStreamTask persistentTask) { + persistentTask.reindexSucceeded(); + threadPool.schedule(persistentTask::markAsCompleted, getTimeToLive(persistentTask), threadPool.generic()); + } + + private void completeFailedPersistentTask(ReindexDataStreamTask persistentTask, Exception e) { + persistentTask.reindexFailed(e); + threadPool.schedule(() -> persistentTask.markAsFailed(e), getTimeToLive(persistentTask), threadPool.generic()); + } + + private TimeValue getTimeToLive(ReindexDataStreamTask reindexDataStreamTask) { + PersistentTasksCustomMetadata persistentTasksCustomMetadata = clusterService.state() + .getMetadata() + .custom(PersistentTasksCustomMetadata.TYPE); + PersistentTasksCustomMetadata.PersistentTask persistentTask = persistentTasksCustomMetadata.getTask( + reindexDataStreamTask.getPersistentTaskId() + ); + PersistentTaskState state = persistentTask.getState(); + final long completionTime; + if (state == null) { + completionTime = threadPool.absoluteTimeInMillis(); + reindexDataStreamTask.updatePersistentTaskState( + new ReindexDataStreamPersistentTaskState(completionTime), + ActionListener.noop() + ); + } else { + completionTime = ((ReindexDataStreamPersistentTaskState) state).completionTime(); + } + return TimeValue.timeValueMillis(TASK_KEEP_ALIVE_TIME.millis() - (threadPool.absoluteTimeInMillis() - completionTime)); + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskState.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskState.java new file mode 100644 index 0000000000000..d6f32a3d34a7a --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskState.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.persistent.PersistentTaskState; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public record ReindexDataStreamPersistentTaskState(long completionTime) implements Task.Status, PersistentTaskState { + public static final String NAME = ReindexDataStreamTask.TASK_NAME; + private static final String COMPLETION_TIME_FIELD = "completion_time"; + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + true, + args -> new ReindexDataStreamPersistentTaskState((long) args[0]) + ); + static { + PARSER.declareLong(constructorArg(), new ParseField(COMPLETION_TIME_FIELD)); + } + + public ReindexDataStreamPersistentTaskState(StreamInput in) throws IOException { + this(in.readLong()); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(completionTime); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(COMPLETION_TIME_FIELD, completionTime); + builder.endObject(); + return builder; + } + + public static ReindexDataStreamPersistentTaskState fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatus.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatus.java new file mode 100644 index 0000000000000..10dfded853a13 --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatus.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public record ReindexDataStreamStatus( + long persistentTaskStartTime, + int totalIndices, + int totalIndicesToBeUpgraded, + boolean complete, + Exception exception, + int inProgress, + int pending, + List> errors +) implements Task.Status { + public ReindexDataStreamStatus { + Objects.requireNonNull(errors); + } + + public static final String NAME = "ReindexDataStreamStatus"; + + public ReindexDataStreamStatus(StreamInput in) throws IOException { + this( + in.readLong(), + in.readInt(), + in.readInt(), + in.readBoolean(), + in.readException(), + in.readInt(), + in.readInt(), + in.readCollectionAsList(in1 -> Tuple.tuple(in1.readString(), in1.readException())) + ); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(persistentTaskStartTime); + out.writeInt(totalIndices); + out.writeInt(totalIndicesToBeUpgraded); + out.writeBoolean(complete); + out.writeException(exception); + out.writeInt(inProgress); + out.writeInt(pending); + out.writeCollection(errors, (out1, tuple) -> { + out1.writeString(tuple.v1()); + out1.writeException(tuple.v2()); + }); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("start_time", persistentTaskStartTime); + builder.field("complete", complete); + builder.field("total_indices", totalIndices); + builder.field("total_indices_requiring_upgrade", totalIndicesToBeUpgraded); + builder.field("successes", totalIndicesToBeUpgraded - (inProgress + pending + errors.size())); + builder.field("in_progress", inProgress); + builder.field("pending", pending); + builder.startArray("errors"); + for (Tuple error : errors) { + builder.startObject(); + builder.field("index", error.v1()); + builder.field("message", error.v2().getMessage()); + builder.endObject(); + } + builder.endArray(); + if (exception != null) { + builder.field("exception", exception.getMessage()); + } + builder.endObject(); + return builder; + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTask.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTask.java new file mode 100644 index 0000000000000..2ae244679659f --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTask.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.core.Tuple; +import org.elasticsearch.persistent.AllocatedPersistentTask; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public class ReindexDataStreamTask extends AllocatedPersistentTask { + public static final String TASK_NAME = "reindex-data-stream"; + private final long persistentTaskStartTime; + private final int totalIndices; + private final int totalIndicesToBeUpgraded; + private final ThreadPool threadPool; + private boolean complete = false; + private Exception exception; + private List inProgress = new ArrayList<>(); + private List pending = List.of(); + private List> errors = new ArrayList<>(); + + public ReindexDataStreamTask( + long persistentTaskStartTime, + int totalIndices, + int totalIndicesToBeUpgraded, + ThreadPool threadPool, + long id, + String type, + String action, + String description, + TaskId parentTask, + Map headers + ) { + super(id, type, action, description, parentTask, headers); + this.persistentTaskStartTime = persistentTaskStartTime; + this.totalIndices = totalIndices; + this.totalIndicesToBeUpgraded = totalIndicesToBeUpgraded; + this.threadPool = threadPool; + } + + @Override + public ReindexDataStreamStatus getStatus() { + return new ReindexDataStreamStatus( + persistentTaskStartTime, + totalIndices, + totalIndicesToBeUpgraded, + complete, + exception, + inProgress.size(), + pending.size(), + errors + ); + } + + public void reindexSucceeded() { + this.complete = true; + } + + public void reindexFailed(Exception e) { + this.complete = true; + this.exception = e; + } + + public void setInProgressIndices(List inProgressIndices) { + this.inProgress = inProgressIndices; + } + + public void setPendingIndices(List pendingIndices) { + this.pending = pendingIndices; + } + + public void addErrorIndex(String index, Exception error) { + this.errors.add(Tuple.tuple(index, error)); + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParams.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParams.java new file mode 100644 index 0000000000000..5efbc6b672216 --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParams.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.persistent.PersistentTaskParams; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public record ReindexDataStreamTaskParams(String sourceDataStream, long startTime, int totalIndices, int totalIndicesToBeUpgraded) + implements + PersistentTaskParams { + + public static final String NAME = ReindexDataStreamTask.TASK_NAME; + private static final String SOURCE_DATA_STREAM_FIELD = "source_data_stream"; + private static final String START_TIME_FIELD = "start_time"; + private static final String TOTAL_INDICES_FIELD = "total_indices"; + private static final String TOTAL_INDICES_TO_BE_UPGRADED_FIELD = "total_indices_to_be_upgraded"; + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + true, + args -> new ReindexDataStreamTaskParams((String) args[0], (long) args[1], (int) args[2], (int) args[3]) + ); + static { + PARSER.declareString(constructorArg(), new ParseField(SOURCE_DATA_STREAM_FIELD)); + PARSER.declareLong(constructorArg(), new ParseField(START_TIME_FIELD)); + PARSER.declareInt(constructorArg(), new ParseField(TOTAL_INDICES_FIELD)); + PARSER.declareInt(constructorArg(), new ParseField(TOTAL_INDICES_TO_BE_UPGRADED_FIELD)); + } + + public ReindexDataStreamTaskParams(StreamInput in) throws IOException { + this(in.readString(), in.readLong(), in.readInt(), in.readInt()); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.REINDEX_DATA_STREAMS; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(sourceDataStream); + out.writeLong(startTime); + out.writeInt(totalIndices); + out.writeInt(totalIndicesToBeUpgraded); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject() + .field(SOURCE_DATA_STREAM_FIELD, sourceDataStream) + .field(START_TIME_FIELD, startTime) + .field(TOTAL_INDICES_FIELD, totalIndices) + .field(TOTAL_INDICES_TO_BE_UPGRADED_FIELD, totalIndicesToBeUpgraded) + .endObject(); + } + + public String getSourceDataStream() { + return sourceDataStream; + } + + public static ReindexDataStreamTaskParams fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } +} diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskStateTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskStateTests.java new file mode 100644 index 0000000000000..be11bff131909 --- /dev/null +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskStateTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +public class ReindexDataStreamPersistentTaskStateTests extends AbstractXContentSerializingTestCase { + @Override + protected ReindexDataStreamPersistentTaskState doParseInstance(XContentParser parser) throws IOException { + return ReindexDataStreamPersistentTaskState.fromXContent(parser); + } + + @Override + protected Writeable.Reader instanceReader() { + return ReindexDataStreamPersistentTaskState::new; + } + + @Override + protected ReindexDataStreamPersistentTaskState createTestInstance() { + return new ReindexDataStreamPersistentTaskState(randomNegativeLong()); + } + + @Override + protected ReindexDataStreamPersistentTaskState mutateInstance(ReindexDataStreamPersistentTaskState instance) throws IOException { + return new ReindexDataStreamPersistentTaskState(instance.completionTime() + 1); + } +} diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatusTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatusTests.java new file mode 100644 index 0000000000000..8f0fabc2ce7ee --- /dev/null +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatusTests.java @@ -0,0 +1,157 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static java.util.Map.entry; +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.equalTo; + +public class ReindexDataStreamStatusTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return ReindexDataStreamStatus::new; + } + + @Override + protected ReindexDataStreamStatus createTestInstance() { + return new ReindexDataStreamStatus( + randomLong(), + randomNegativeInt(), + randomNegativeInt(), + randomBoolean(), + nullableTestException(), + randomNegativeInt(), + randomNegativeInt(), + randomErrorList() + ); + } + + private Exception nullableTestException() { + if (randomBoolean()) { + return testException(); + } + return null; + } + + private Exception testException() { + /* + * Unfortunately ElasticsearchException doesn't have an equals and just falls back to Object::equals. So we can't test for equality + * when we're using an exception. So always just use null. + */ + return null; + } + + private List randomList() { + return randomList(0); + } + + private List randomList(int minSize) { + return randomList(minSize, Math.max(minSize, 100), () -> randomAlphaOfLength(50)); + } + + private List> randomErrorList() { + return randomErrorList(0); + } + + private List> randomErrorList(int minSize) { + return randomList(minSize, Math.max(minSize, 100), () -> Tuple.tuple(randomAlphaOfLength(30), testException())); + } + + @Override + protected ReindexDataStreamStatus mutateInstance(ReindexDataStreamStatus instance) throws IOException { + long startTime = instance.persistentTaskStartTime(); + int totalIndices = instance.totalIndices(); + int totalIndicesToBeUpgraded = instance.totalIndicesToBeUpgraded(); + boolean complete = instance.complete(); + Exception exception = instance.exception(); + int inProgress = instance.inProgress(); + int pending = instance.pending(); + List> errors = instance.errors(); + switch (randomIntBetween(0, 6)) { + case 0 -> startTime = randomLong(); + case 1 -> totalIndices = totalIndices + 1; + case 2 -> totalIndicesToBeUpgraded = totalIndicesToBeUpgraded + 1; + case 3 -> complete = complete == false; + case 4 -> inProgress = inProgress + 1; + case 5 -> pending = pending + 1; + case 6 -> errors = randomErrorList(errors.size() + 1); + default -> throw new UnsupportedOperationException(); + } + return new ReindexDataStreamStatus( + startTime, + totalIndices, + totalIndicesToBeUpgraded, + complete, + exception, + inProgress, + pending, + errors + ); + } + + public void testToXContent() throws IOException { + ReindexDataStreamStatus status = new ReindexDataStreamStatus( + 1234L, + 200, + 100, + true, + new ElasticsearchException("the whole task failed"), + 12, + 8, + List.of( + Tuple.tuple("index7", new ElasticsearchException("index7 failed")), + Tuple.tuple("index8", new ElasticsearchException("index8 " + "failed")) + ) + ); + try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent)) { + builder.humanReadable(true); + status.toXContent(builder, EMPTY_PARAMS); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + Map parserMap = parser.map(); + assertThat( + parserMap, + equalTo( + Map.ofEntries( + entry("start_time", 1234), + entry("total_indices", 200), + entry("total_indices_requiring_upgrade", 100), + entry("complete", true), + entry("exception", "the whole task failed"), + entry("successes", 78), + entry("in_progress", 12), + entry("pending", 8), + entry( + "errors", + List.of( + Map.of("index", "index7", "message", "index7 failed"), + Map.of("index", "index8", "message", "index8 failed") + ) + ) + ) + ) + ); + } + } + } +} diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParamsTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParamsTests.java new file mode 100644 index 0000000000000..55098bf4a68d5 --- /dev/null +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParamsTests.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.equalTo; + +public class ReindexDataStreamTaskParamsTests extends AbstractXContentSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return ReindexDataStreamTaskParams::new; + } + + @Override + protected ReindexDataStreamTaskParams createTestInstance() { + return new ReindexDataStreamTaskParams(randomAlphaOfLength(50), randomLong(), randomNonNegativeInt(), randomNonNegativeInt()); + } + + @Override + protected ReindexDataStreamTaskParams mutateInstance(ReindexDataStreamTaskParams instance) { + String sourceDataStream = instance.sourceDataStream(); + long startTime = instance.startTime(); + int totalIndices = instance.totalIndices(); + int totalIndicesToBeUpgraded = instance.totalIndicesToBeUpgraded(); + switch (randomIntBetween(0, 3)) { + case 0 -> sourceDataStream = randomAlphaOfLength(50); + case 1 -> startTime = randomLong(); + case 2 -> totalIndices = totalIndices + 1; + case 3 -> totalIndices = totalIndicesToBeUpgraded + 1; + default -> throw new UnsupportedOperationException(); + } + return new ReindexDataStreamTaskParams(sourceDataStream, startTime, totalIndices, totalIndicesToBeUpgraded); + } + + @Override + protected ReindexDataStreamTaskParams doParseInstance(XContentParser parser) { + return ReindexDataStreamTaskParams.fromXContent(parser); + } + + public void testToXContent() throws IOException { + ReindexDataStreamTaskParams params = createTestInstance(); + try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent)) { + builder.humanReadable(true); + params.toXContent(builder, EMPTY_PARAMS); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + Map parserMap = parser.map(); + assertThat(parserMap.get("source_data_stream"), equalTo(params.sourceDataStream())); + assertThat(((Number) parserMap.get("start_time")).longValue(), equalTo(params.startTime())); + } + } + } +} diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 95fffb1fe8224..688d2aaf905a6 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -205,6 +205,7 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_ENRICH_RUNTIME_WARNINGS = def(8_796_00_0); public static final TransportVersion INGEST_PIPELINE_CONFIGURATION_AS_MAP = def(8_797_00_0); public static final TransportVersion INDEXING_PRESSURE_THROTTLING_STATS = def(8_798_00_0); + public static final TransportVersion REINDEX_DATA_STREAMS = def(8_799_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/ReindexDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/ReindexDataStreamAction.java new file mode 100644 index 0000000000000..814c512c43bec --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/datastreams/ReindexDataStreamAction.java @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.action.datastreams; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +public class ReindexDataStreamAction extends ActionType { + + public static final ReindexDataStreamAction INSTANCE = new ReindexDataStreamAction(); + public static final String NAME = "indices:admin/data_stream/reindex"; + + public ReindexDataStreamAction() { + super(NAME); + } + + public static class ReindexDataStreamResponse extends ActionResponse implements ToXContentObject { + private final String taskId; + + public ReindexDataStreamResponse(String taskId) { + super(); + this.taskId = taskId; + } + + public ReindexDataStreamResponse(StreamInput in) throws IOException { + super(in); + this.taskId = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(taskId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("task", getTaskId()); + builder.endObject(); + return builder; + } + + public String getTaskId() { + return taskId; + } + + @Override + public int hashCode() { + return Objects.hashCode(taskId); + } + + @Override + public boolean equals(Object other) { + return other instanceof ReindexDataStreamResponse && taskId.equals(((ReindexDataStreamResponse) other).taskId); + } + + } + + public static class ReindexDataStreamRequest extends ActionRequest { + private final String sourceDataStream; + + public ReindexDataStreamRequest(String sourceDataStream) { + super(); + this.sourceDataStream = sourceDataStream; + } + + public ReindexDataStreamRequest(StreamInput in) throws IOException { + super(in); + this.sourceDataStream = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(sourceDataStream); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public boolean getShouldStoreResult() { + return true; // do not wait_for_completion + } + + public String getSourceDataStream() { + return sourceDataStream; + } + + @Override + public int hashCode() { + return Objects.hashCode(sourceDataStream); + } + + @Override + public boolean equals(Object other) { + return other instanceof ReindexDataStreamRequest + && sourceDataStream.equals(((ReindexDataStreamRequest) other).sourceDataStream); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/ReindexDataStreamResponseTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/ReindexDataStreamResponseTests.java new file mode 100644 index 0000000000000..fe839c28aab88 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/datastreams/ReindexDataStreamResponseTests.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.action.datastreams; + +import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamResponse; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.equalTo; + +public class ReindexDataStreamResponseTests extends AbstractWireSerializingTestCase { + @Override + protected Writeable.Reader instanceReader() { + return ReindexDataStreamResponse::new; + } + + @Override + protected ReindexDataStreamResponse createTestInstance() { + return new ReindexDataStreamResponse(randomAlphaOfLength(40)); + } + + @Override + protected ReindexDataStreamResponse mutateInstance(ReindexDataStreamResponse instance) { + return createTestInstance(); + } + + public void testToXContent() throws IOException { + ReindexDataStreamResponse response = createTestInstance(); + try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent)) { + builder.humanReadable(true); + response.toXContent(builder, EMPTY_PARAMS); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + assertThat(parser.map(), equalTo(Map.of("task", response.getTaskId()))); + } + } + } +} From 4e04a7bddcd33f798c54c62463d0d8a082a4ca81 Mon Sep 17 00:00:00 2001 From: Mike Pellegrini Date: Wed, 20 Nov 2024 19:08:56 -0500 Subject: [PATCH 119/386] [ML] Increase Model Download Timeout In YAML Tests (#117066) Increase the timeout used in inference YAML tests for model download. Fixes #116899. --- muted-tests.yml | 5 ----- .../org/elasticsearch/xpack/inference/InferenceRestIT.java | 2 +- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 710cdea8f1564..49f7e69bbf06e 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -223,14 +223,9 @@ tests: - class: org.elasticsearch.search.basic.SearchWithRandomIOExceptionsIT method: testRandomDirectoryIOExceptions issue: https://github.com/elastic/elasticsearch/issues/114824 -- class: org.elasticsearch.xpack.inference.InferenceRestIT - method: test {p0=inference/30_semantic_text_inference/Calculates embeddings using the default ELSER 2 endpoint} - issue: https://github.com/elastic/elasticsearch/issues/116542 - class: org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluatorTests method: testTermQuery issue: https://github.com/elastic/elasticsearch/issues/116879 -- class: org.elasticsearch.xpack.inference.InferenceRestIT - issue: https://github.com/elastic/elasticsearch/issues/116899 - class: org.elasticsearch.xpack.restart.QueryBuilderBWCIT method: testQueryBuilderBWC {p0=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/116989 diff --git a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java index fe406722ae1e2..8d8ad94d608d7 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java +++ b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java @@ -36,7 +36,7 @@ protected Settings restClientSettings() { var baseSettings = super.restClientSettings(); return Settings.builder() .put(baseSettings) - .put(CLIENT_SOCKET_TIMEOUT, "120s") // Long timeout for model download + .put(CLIENT_SOCKET_TIMEOUT, "300s") // Long timeout for model download .build(); } From 6e5038009f8ee6c58dc54ff686da05c688ae499f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 21 Nov 2024 16:31:34 +1100 Subject: [PATCH 120/386] Mute org.elasticsearch.ingest.geoip.EnterpriseGeoIpDownloaderIT testEnterpriseDownloaderTask #115163 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 49f7e69bbf06e..fa467896a7b34 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -244,6 +244,9 @@ tests: - class: org.elasticsearch.xpack.logsdb.qa.StandardVersusLogsStoredSourceChallengeRestIT method: testEsqlSource issue: https://github.com/elastic/elasticsearch/issues/117212 +- class: org.elasticsearch.ingest.geoip.EnterpriseGeoIpDownloaderIT + method: testEnterpriseDownloaderTask + issue: https://github.com/elastic/elasticsearch/issues/115163 # Examples: # From 654c37c188710f83779dcb0a87295d2cb5658629 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Thu, 21 Nov 2024 07:34:32 +0100 Subject: [PATCH 121/386] [Entitlements] Consider only system modules in the boot layer (#117017) --- .../api/ElasticsearchEntitlementChecker.java | 37 +++++++++++++++---- 1 file changed, 30 insertions(+), 7 deletions(-) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index 6d5dbd4098aa9..cff3117d032a4 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -13,7 +13,11 @@ import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; +import java.lang.module.ModuleFinder; +import java.lang.module.ModuleReference; import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; /** * Implementation of the {@link EntitlementChecker} interface, providing additional @@ -23,12 +27,35 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker { private static final Logger logger = LogManager.getLogger(ElasticsearchEntitlementChecker.class); + private static final Set systemModules = findSystemModules(); + + private static Set findSystemModules() { + var systemModulesDescriptors = ModuleFinder.ofSystem() + .findAll() + .stream() + .map(ModuleReference::descriptor) + .collect(Collectors.toUnmodifiableSet()); + + return ModuleLayer.boot() + .modules() + .stream() + .filter(m -> systemModulesDescriptors.contains(m.getDescriptor())) + .collect(Collectors.toUnmodifiableSet()); + } + @Override public void checkSystemExit(Class callerClass, int status) { var requestingModule = requestingModule(callerClass); if (isTriviallyAllowed(requestingModule)) { return; } + + // TODO: this will be checked using policies + if (requestingModule.isNamed() && requestingModule.getName().equals("org.elasticsearch.server")) { + logger.debug("Allowed: caller in {} is entitled to exit the JVM", requestingModule.getName()); + return; + } + // Hard-forbidden until we develop the permission granting scheme throw new NotEntitledException("Missing entitlement for " + requestingModule); } @@ -36,7 +63,7 @@ public void checkSystemExit(Class callerClass, int status) { private static Module requestingModule(Class callerClass) { if (callerClass != null) { Module callerModule = callerClass.getModule(); - if (callerModule.getLayer() != ModuleLayer.boot()) { + if (systemModules.contains(callerModule) == false) { // fast path return callerModule; } @@ -50,7 +77,7 @@ private static Module requestingModule(Class callerClass) { .walk( s -> s.skip(framesToSkip) .map(f -> f.getDeclaringClass().getModule()) - .filter(m -> m.getLayer() != ModuleLayer.boot()) + .filter(m -> systemModules.contains(m) == false) .findFirst() ); return module.orElse(null); @@ -58,11 +85,7 @@ private static Module requestingModule(Class callerClass) { private static boolean isTriviallyAllowed(Module requestingModule) { if (requestingModule == null) { - logger.debug("Trivially allowed: Entire call stack is in the boot module layer"); - return true; - } - if (requestingModule == System.class.getModule()) { - logger.debug("Trivially allowed: Caller is in {}", System.class.getModule().getName()); + logger.debug("Trivially allowed: entire call stack is in composed of classes in system modules"); return true; } logger.trace("Not trivially allowed"); From adcc5bed1eee126fa4caa7b4a6fa30851de89fdb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Thu, 21 Nov 2024 07:40:55 +0100 Subject: [PATCH 122/386] [Entitlements] Implement entry point definitions via checker function signature (#116754) --- .../impl/InstrumentationServiceImpl.java | 106 ++++++- .../impl/InstrumenterImpl.java | 26 +- .../impl/InstrumentationServiceImplTests.java | 262 ++++++++++++++++++ .../impl/InstrumenterTests.java | 215 ++++++++++++-- .../bridge/EntitlementChecker.java | 2 +- .../EntitlementInitialization.java | 27 +- .../instrumentation/CheckerMethod.java | 23 ++ .../InstrumentationService.java | 5 +- .../instrumentation/MethodKey.java | 7 +- .../api/ElasticsearchEntitlementChecker.java | 2 +- 10 files changed, 615 insertions(+), 60 deletions(-) create mode 100644 libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java create mode 100644 libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/CheckerMethod.java diff --git a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java index f5fe8d41c2243..a3bbb611f3e68 100644 --- a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java @@ -9,19 +9,29 @@ package org.elasticsearch.entitlement.instrumentation.impl; +import org.elasticsearch.entitlement.instrumentation.CheckerMethod; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.instrumentation.Instrumenter; import org.elasticsearch.entitlement.instrumentation.MethodKey; +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.MethodVisitor; +import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; +import java.io.IOException; import java.lang.reflect.Method; -import java.lang.reflect.Modifier; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.stream.Stream; public class InstrumentationServiceImpl implements InstrumentationService { + @Override - public Instrumenter newInstrumenter(String classNameSuffix, Map instrumentationMethods) { + public Instrumenter newInstrumenter(String classNameSuffix, Map instrumentationMethods) { return new InstrumenterImpl(classNameSuffix, instrumentationMethods); } @@ -33,9 +43,97 @@ public MethodKey methodKeyForTarget(Method targetMethod) { return new MethodKey( Type.getInternalName(targetMethod.getDeclaringClass()), targetMethod.getName(), - Stream.of(actualType.getArgumentTypes()).map(Type::getInternalName).toList(), - Modifier.isStatic(targetMethod.getModifiers()) + Stream.of(actualType.getArgumentTypes()).map(Type::getInternalName).toList() ); } + @Override + public Map lookupMethodsToInstrument(String entitlementCheckerClassName) throws ClassNotFoundException, + IOException { + var methodsToInstrument = new HashMap(); + var checkerClass = Class.forName(entitlementCheckerClassName); + var classFileInfo = InstrumenterImpl.getClassFileInfo(checkerClass); + ClassReader reader = new ClassReader(classFileInfo.bytecodes()); + ClassVisitor visitor = new ClassVisitor(Opcodes.ASM9) { + @Override + public MethodVisitor visitMethod( + int access, + String checkerMethodName, + String checkerMethodDescriptor, + String signature, + String[] exceptions + ) { + var mv = super.visitMethod(access, checkerMethodName, checkerMethodDescriptor, signature, exceptions); + + var checkerMethodArgumentTypes = Type.getArgumentTypes(checkerMethodDescriptor); + var methodToInstrument = parseCheckerMethodSignature(checkerMethodName, checkerMethodArgumentTypes); + + var checkerParameterDescriptors = Arrays.stream(checkerMethodArgumentTypes).map(Type::getDescriptor).toList(); + var checkerMethod = new CheckerMethod(Type.getInternalName(checkerClass), checkerMethodName, checkerParameterDescriptors); + + methodsToInstrument.put(methodToInstrument, checkerMethod); + + return mv; + } + }; + reader.accept(visitor, 0); + return methodsToInstrument; + } + + private static final Type CLASS_TYPE = Type.getType(Class.class); + + static MethodKey parseCheckerMethodSignature(String checkerMethodName, Type[] checkerMethodArgumentTypes) { + var classNameStartIndex = checkerMethodName.indexOf('$'); + var classNameEndIndex = checkerMethodName.lastIndexOf('$'); + + if (classNameStartIndex == -1 || classNameStartIndex >= classNameEndIndex) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Checker method %s has incorrect name format. " + + "It should be either check$$methodName (instance) or check$package_ClassName$methodName (static)", + checkerMethodName + ) + ); + } + + // No "className" (check$$methodName) -> method is static, and we'll get the class from the actual typed argument + final boolean targetMethodIsStatic = classNameStartIndex + 1 != classNameEndIndex; + final String targetMethodName = checkerMethodName.substring(classNameEndIndex + 1); + + final String targetClassName; + final List targetParameterTypes; + if (targetMethodIsStatic) { + if (checkerMethodArgumentTypes.length < 1 || CLASS_TYPE.equals(checkerMethodArgumentTypes[0]) == false) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Checker method %s has incorrect argument types. " + "It must have a first argument of Class type.", + checkerMethodName + ) + ); + } + + targetClassName = checkerMethodName.substring(classNameStartIndex + 1, classNameEndIndex).replace('_', '/'); + targetParameterTypes = Arrays.stream(checkerMethodArgumentTypes).skip(1).map(Type::getInternalName).toList(); + } else { + if (checkerMethodArgumentTypes.length < 2 + || CLASS_TYPE.equals(checkerMethodArgumentTypes[0]) == false + || checkerMethodArgumentTypes[1].getSort() != Type.OBJECT) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Checker method %s has incorrect argument types. " + + "It must have a first argument of Class type, and a second argument of the class containing the method to " + + "instrument", + checkerMethodName + ) + ); + } + var targetClassType = checkerMethodArgumentTypes[1]; + targetClassName = targetClassType.getInternalName(); + targetParameterTypes = Arrays.stream(checkerMethodArgumentTypes).skip(2).map(Type::getInternalName).toList(); + } + return new MethodKey(targetClassName, targetMethodName, targetParameterTypes); + } } diff --git a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java index 63c9ccd80be70..53e76372b107d 100644 --- a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java @@ -9,6 +9,7 @@ package org.elasticsearch.entitlement.instrumentation.impl; +import org.elasticsearch.entitlement.instrumentation.CheckerMethod; import org.elasticsearch.entitlement.instrumentation.Instrumenter; import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.objectweb.asm.AnnotationVisitor; @@ -23,7 +24,6 @@ import java.io.IOException; import java.io.InputStream; -import java.lang.reflect.Method; import java.util.Map; import java.util.stream.Stream; @@ -40,9 +40,9 @@ public class InstrumenterImpl implements Instrumenter { * To avoid class name collisions during testing without an agent to replace classes in-place. */ private final String classNameSuffix; - private final Map instrumentationMethods; + private final Map instrumentationMethods; - public InstrumenterImpl(String classNameSuffix, Map instrumentationMethods) { + public InstrumenterImpl(String classNameSuffix, Map instrumentationMethods) { this.classNameSuffix = classNameSuffix; this.instrumentationMethods = instrumentationMethods; } @@ -138,12 +138,7 @@ public MethodVisitor visitMethod(int access, String name, String descriptor, Str var mv = super.visitMethod(access, name, descriptor, signature, exceptions); if (isAnnotationPresent == false) { boolean isStatic = (access & ACC_STATIC) != 0; - var key = new MethodKey( - className, - name, - Stream.of(Type.getArgumentTypes(descriptor)).map(Type::getInternalName).toList(), - isStatic - ); + var key = new MethodKey(className, name, Stream.of(Type.getArgumentTypes(descriptor)).map(Type::getInternalName).toList()); var instrumentationMethod = instrumentationMethods.get(key); if (instrumentationMethod != null) { // LOGGER.debug("Will instrument method {}", key); @@ -177,7 +172,7 @@ private void addClassAnnotationIfNeeded() { class EntitlementMethodVisitor extends MethodVisitor { private final boolean instrumentedMethodIsStatic; private final String instrumentedMethodDescriptor; - private final Method instrumentationMethod; + private final CheckerMethod instrumentationMethod; private boolean hasCallerSensitiveAnnotation = false; EntitlementMethodVisitor( @@ -185,7 +180,7 @@ class EntitlementMethodVisitor extends MethodVisitor { MethodVisitor methodVisitor, boolean instrumentedMethodIsStatic, String instrumentedMethodDescriptor, - Method instrumentationMethod + CheckerMethod instrumentationMethod ) { super(api, methodVisitor); this.instrumentedMethodIsStatic = instrumentedMethodIsStatic; @@ -262,9 +257,12 @@ private void forwardIncomingArguments() { private void invokeInstrumentationMethod() { mv.visitMethodInsn( INVOKEINTERFACE, - Type.getInternalName(instrumentationMethod.getDeclaringClass()), - instrumentationMethod.getName(), - Type.getMethodDescriptor(instrumentationMethod), + instrumentationMethod.className(), + instrumentationMethod.methodName(), + Type.getMethodDescriptor( + Type.VOID_TYPE, + instrumentationMethod.parameterDescriptors().stream().map(Type::getType).toArray(Type[]::new) + ), true ); } diff --git a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java new file mode 100644 index 0000000000000..c0ff5d59d3c72 --- /dev/null +++ b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java @@ -0,0 +1,262 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.instrumentation.impl; + +import org.elasticsearch.entitlement.instrumentation.CheckerMethod; +import org.elasticsearch.entitlement.instrumentation.InstrumentationService; +import org.elasticsearch.entitlement.instrumentation.MethodKey; +import org.elasticsearch.test.ESTestCase; +import org.objectweb.asm.Type; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; + +@ESTestCase.WithoutSecurityManager +public class InstrumentationServiceImplTests extends ESTestCase { + + final InstrumentationService instrumentationService = new InstrumentationServiceImpl(); + + static class TestTargetClass {} + + interface TestChecker { + void check$org_example_TestTargetClass$staticMethod(Class clazz, int arg0, String arg1, Object arg2); + + void check$$instanceMethodNoArgs(Class clazz, TestTargetClass that); + + void check$$instanceMethodWithArgs(Class clazz, TestTargetClass that, int x, int y); + } + + interface TestCheckerOverloads { + void check$org_example_TestTargetClass$staticMethodWithOverload(Class clazz, int x, int y); + + void check$org_example_TestTargetClass$staticMethodWithOverload(Class clazz, int x, String y); + } + + public void testInstrumentationTargetLookup() throws IOException, ClassNotFoundException { + Map methodsMap = instrumentationService.lookupMethodsToInstrument(TestChecker.class.getName()); + + assertThat(methodsMap, aMapWithSize(3)); + assertThat( + methodsMap, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "staticMethod", List.of("I", "java/lang/String", "java/lang/Object"))), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker", + "check$org_example_TestTargetClass$staticMethod", + List.of("Ljava/lang/Class;", "I", "Ljava/lang/String;", "Ljava/lang/Object;") + ) + ) + ) + ); + assertThat( + methodsMap, + hasEntry( + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass", + "instanceMethodNoArgs", + List.of() + ) + ), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker", + "check$$instanceMethodNoArgs", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass;" + ) + ) + ) + ) + ); + assertThat( + methodsMap, + hasEntry( + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass", + "instanceMethodWithArgs", + List.of("I", "I") + ) + ), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker", + "check$$instanceMethodWithArgs", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass;", + "I", + "I" + ) + ) + ) + ) + ); + } + + public void testInstrumentationTargetLookupWithOverloads() throws IOException, ClassNotFoundException { + Map methodsMap = instrumentationService.lookupMethodsToInstrument(TestCheckerOverloads.class.getName()); + + assertThat(methodsMap, aMapWithSize(2)); + assertThat( + methodsMap, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "staticMethodWithOverload", List.of("I", "java/lang/String"))), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerOverloads", + "check$org_example_TestTargetClass$staticMethodWithOverload", + List.of("Ljava/lang/Class;", "I", "Ljava/lang/String;") + ) + ) + ) + ); + assertThat( + methodsMap, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "staticMethodWithOverload", List.of("I", "I"))), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerOverloads", + "check$org_example_TestTargetClass$staticMethodWithOverload", + List.of("Ljava/lang/Class;", "I", "I") + ) + ) + ) + ); + } + + public void testParseCheckerMethodSignatureStaticMethod() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$org_example_TestClass$staticMethod", + new Type[] { Type.getType(Class.class) } + ); + + assertThat(methodKey, equalTo(new MethodKey("org/example/TestClass", "staticMethod", List.of()))); + } + + public void testParseCheckerMethodSignatureStaticMethodWithArgs() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$org_example_TestClass$staticMethod", + new Type[] { Type.getType(Class.class), Type.getType("I"), Type.getType(String.class) } + ); + + assertThat(methodKey, equalTo(new MethodKey("org/example/TestClass", "staticMethod", List.of("I", "java/lang/String")))); + } + + public void testParseCheckerMethodSignatureStaticMethodInnerClass() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$org_example_TestClass$InnerClass$staticMethod", + new Type[] { Type.getType(Class.class) } + ); + + assertThat(methodKey, equalTo(new MethodKey("org/example/TestClass$InnerClass", "staticMethod", List.of()))); + } + + public void testParseCheckerMethodSignatureIncorrectName() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature("check$staticMethod", new Type[] { Type.getType(Class.class) }) + ); + + assertThat(exception.getMessage(), containsString("has incorrect name format")); + } + + public void testParseCheckerMethodSignatureStaticMethodIncorrectArgumentCount() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature("check$ClassName$staticMethod", new Type[] {}) + ); + assertThat(exception.getMessage(), containsString("It must have a first argument of Class type")); + } + + public void testParseCheckerMethodSignatureStaticMethodIncorrectArgumentType() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$ClassName$staticMethod", + new Type[] { Type.getType(String.class) } + ) + ); + assertThat(exception.getMessage(), containsString("It must have a first argument of Class type")); + } + + public void testParseCheckerMethodSignatureInstanceMethod() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$$instanceMethod", + new Type[] { Type.getType(Class.class), Type.getType(TestTargetClass.class) } + ); + + assertThat( + methodKey, + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass", + "instanceMethod", + List.of() + ) + ) + ); + } + + public void testParseCheckerMethodSignatureInstanceMethodWithArgs() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$$instanceMethod", + new Type[] { Type.getType(Class.class), Type.getType(TestTargetClass.class), Type.getType("I"), Type.getType(String.class) } + ); + + assertThat( + methodKey, + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass", + "instanceMethod", + List.of("I", "java/lang/String") + ) + ) + ); + } + + public void testParseCheckerMethodSignatureInstanceMethodIncorrectArgumentTypes() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature("check$$instanceMethod", new Type[] { Type.getType(String.class) }) + ); + assertThat(exception.getMessage(), containsString("It must have a first argument of Class type")); + } + + public void testParseCheckerMethodSignatureInstanceMethodIncorrectArgumentCount() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature("check$$instanceMethod", new Type[] { Type.getType(Class.class) }) + ); + assertThat(exception.getMessage(), containsString("a second argument of the class containing the method to instrument")); + } + + public void testParseCheckerMethodSignatureInstanceMethodIncorrectArgumentTypes2() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$$instanceMethod", + new Type[] { Type.getType(Class.class), Type.getType("I") } + ) + ); + assertThat(exception.getMessage(), containsString("a second argument of the class containing the method to instrument")); + } +} diff --git a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java index 9a57e199d4907..e3f5539999be5 100644 --- a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java +++ b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java @@ -11,7 +11,9 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.entitlement.bridge.EntitlementChecker; +import org.elasticsearch.entitlement.instrumentation.CheckerMethod; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; +import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.ESTestCase; @@ -22,11 +24,12 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Arrays; -import java.util.stream.Collectors; +import java.util.Map; import static org.elasticsearch.entitlement.instrumentation.impl.ASMUtils.bytecode2text; import static org.elasticsearch.entitlement.instrumentation.impl.InstrumenterImpl.getClassFileInfo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.startsWith; import static org.objectweb.asm.Opcodes.INVOKESTATIC; /** @@ -53,7 +56,12 @@ public void initialize() { * Contains all the virtual methods from {@link ClassToInstrument}, * allowing this test to call them on the dynamically loaded instrumented class. */ - public interface Testable {} + public interface Testable { + // This method is here to demonstrate Instrumenter does not get confused by overloads + void someMethod(int arg); + + void someMethod(int arg, String anotherArg); + } /** * This is a placeholder for real class library methods. @@ -71,10 +79,26 @@ public static void systemExit(int status) { public static void anotherSystemExit(int status) { assertEquals(123, status); } + + public void someMethod(int arg) {} + + public void someMethod(int arg, String anotherArg) {} + + public static void someStaticMethod(int arg) {} + + public static void someStaticMethod(int arg, String anotherArg) {} } static final class TestException extends RuntimeException {} + public interface MockEntitlementChecker extends EntitlementChecker { + void checkSomeStaticMethod(Class clazz, int arg); + + void checkSomeStaticMethod(Class clazz, int arg, String anotherArg); + + void checkSomeInstanceMethod(Class clazz, Testable that, int arg, String anotherArg); + } + /** * We're not testing the permission checking logic here; * only that the instrumented methods are calling the correct check methods with the correct arguments. @@ -82,7 +106,7 @@ static final class TestException extends RuntimeException {} * just to demonstrate that the injected bytecodes succeed in calling these methods. * It also asserts that the arguments are correct. */ - public static class TestEntitlementChecker implements EntitlementChecker { + public static class TestEntitlementChecker implements MockEntitlementChecker { /** * This allows us to test that the instrumentation is correct in both cases: * if the check throws, and if it doesn't. @@ -90,9 +114,12 @@ public static class TestEntitlementChecker implements EntitlementChecker { volatile boolean isActive; int checkSystemExitCallCount = 0; + int checkSomeStaticMethodIntCallCount = 0; + int checkSomeStaticMethodIntStringCallCount = 0; + int checkSomeInstanceMethodCallCount = 0; @Override - public void checkSystemExit(Class callerClass, int status) { + public void check$java_lang_System$exit(Class callerClass, int status) { checkSystemExitCallCount++; assertSame(InstrumenterTests.class, callerClass); assertEquals(123, status); @@ -104,11 +131,48 @@ private void throwIfActive() { throw new TestException(); } } + + @Override + public void checkSomeStaticMethod(Class callerClass, int arg) { + checkSomeStaticMethodIntCallCount++; + assertSame(InstrumenterTests.class, callerClass); + assertEquals(123, arg); + throwIfActive(); + } + + @Override + public void checkSomeStaticMethod(Class callerClass, int arg, String anotherArg) { + checkSomeStaticMethodIntStringCallCount++; + assertSame(InstrumenterTests.class, callerClass); + assertEquals(123, arg); + assertEquals("abc", anotherArg); + throwIfActive(); + } + + @Override + public void checkSomeInstanceMethod(Class callerClass, Testable that, int arg, String anotherArg) { + checkSomeInstanceMethodCallCount++; + assertSame(InstrumenterTests.class, callerClass); + assertThat( + that.getClass().getName(), + startsWith("org.elasticsearch.entitlement.instrumentation.impl.InstrumenterTests$ClassToInstrument") + ); + assertEquals(123, arg); + assertEquals("def", anotherArg); + throwIfActive(); + } } public void testClassIsInstrumented() throws Exception { var classToInstrument = ClassToInstrument.class; - var instrumenter = createInstrumenter(classToInstrument, "systemExit"); + + CheckerMethod checkerMethod = getCheckerMethod(EntitlementChecker.class, "check$java_lang_System$exit", Class.class, int.class); + Map methods = Map.of( + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("systemExit", int.class)), + checkerMethod + ); + + var instrumenter = createInstrumenter(methods); byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); @@ -117,7 +181,7 @@ public void testClassIsInstrumented() throws Exception { } Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( - ClassToInstrument.class.getName() + "_NEW", + classToInstrument.getName() + "_NEW", newBytecode ); @@ -134,7 +198,14 @@ public void testClassIsInstrumented() throws Exception { public void testClassIsNotInstrumentedTwice() throws Exception { var classToInstrument = ClassToInstrument.class; - var instrumenter = createInstrumenter(classToInstrument, "systemExit"); + + CheckerMethod checkerMethod = getCheckerMethod(EntitlementChecker.class, "check$java_lang_System$exit", Class.class, int.class); + Map methods = Map.of( + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("systemExit", int.class)), + checkerMethod + ); + + var instrumenter = createInstrumenter(methods); InstrumenterImpl.ClassFileInfo initial = getClassFileInfo(classToInstrument); var internalClassName = Type.getInternalName(classToInstrument); @@ -146,7 +217,7 @@ public void testClassIsNotInstrumentedTwice() throws Exception { logger.trace(() -> Strings.format("Bytecode after 2nd instrumentation:\n%s", bytecode2text(instrumentedTwiceBytecode))); Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( - ClassToInstrument.class.getName() + "_NEW_NEW", + classToInstrument.getName() + "_NEW_NEW", instrumentedTwiceBytecode ); @@ -159,7 +230,16 @@ public void testClassIsNotInstrumentedTwice() throws Exception { public void testClassAllMethodsAreInstrumentedFirstPass() throws Exception { var classToInstrument = ClassToInstrument.class; - var instrumenter = createInstrumenter(classToInstrument, "systemExit", "anotherSystemExit"); + + CheckerMethod checkerMethod = getCheckerMethod(EntitlementChecker.class, "check$java_lang_System$exit", Class.class, int.class); + Map methods = Map.of( + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("systemExit", int.class)), + checkerMethod, + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("anotherSystemExit", int.class)), + checkerMethod + ); + + var instrumenter = createInstrumenter(methods); InstrumenterImpl.ClassFileInfo initial = getClassFileInfo(classToInstrument); var internalClassName = Type.getInternalName(classToInstrument); @@ -171,7 +251,7 @@ public void testClassAllMethodsAreInstrumentedFirstPass() throws Exception { logger.trace(() -> Strings.format("Bytecode after 2nd instrumentation:\n%s", bytecode2text(instrumentedTwiceBytecode))); Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( - ClassToInstrument.class.getName() + "_NEW_NEW", + classToInstrument.getName() + "_NEW_NEW", instrumentedTwiceBytecode ); @@ -185,22 +265,78 @@ public void testClassAllMethodsAreInstrumentedFirstPass() throws Exception { assertThat(getTestEntitlementChecker().checkSystemExitCallCount, is(2)); } - /** This test doesn't replace ClassToInstrument in-place but instead loads a separate - * class ClassToInstrument_NEW that contains the instrumentation. Because of this, - * we need to configure the Transformer to use a MethodKey and instrumentationMethod - * with slightly different signatures (using the common interface Testable) which - * is not what would happen when it's run by the agent. - */ - private InstrumenterImpl createInstrumenter(Class classToInstrument, String... methodNames) throws NoSuchMethodException { - Method v1 = EntitlementChecker.class.getMethod("checkSystemExit", Class.class, int.class); - var methods = Arrays.stream(methodNames).map(name -> { - try { - return instrumentationService.methodKeyForTarget(classToInstrument.getMethod(name, int.class)); - } catch (NoSuchMethodException e) { - throw new RuntimeException(e); - } - }).collect(Collectors.toUnmodifiableMap(name -> name, name -> v1)); + public void testInstrumenterWorksWithOverloads() throws Exception { + var classToInstrument = ClassToInstrument.class; + + Map methods = Map.of( + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("someStaticMethod", int.class)), + getCheckerMethod(MockEntitlementChecker.class, "checkSomeStaticMethod", Class.class, int.class), + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("someStaticMethod", int.class, String.class)), + getCheckerMethod(MockEntitlementChecker.class, "checkSomeStaticMethod", Class.class, int.class, String.class) + ); + + var instrumenter = createInstrumenter(methods); + + byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); + if (logger.isTraceEnabled()) { + logger.trace("Bytecode after instrumentation:\n{}", bytecode2text(newBytecode)); + } + + Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( + classToInstrument.getName() + "_NEW", + newBytecode + ); + + getTestEntitlementChecker().isActive = true; + + // After checking is activated, everything should throw + assertThrows(TestException.class, () -> callStaticMethod(newClass, "someStaticMethod", 123)); + assertThrows(TestException.class, () -> callStaticMethod(newClass, "someStaticMethod", 123, "abc")); + + assertThat(getTestEntitlementChecker().checkSomeStaticMethodIntCallCount, is(1)); + assertThat(getTestEntitlementChecker().checkSomeStaticMethodIntStringCallCount, is(1)); + } + + public void testInstrumenterWorksWithInstanceMethodsAndOverloads() throws Exception { + var classToInstrument = ClassToInstrument.class; + + Map methods = Map.of( + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("someMethod", int.class, String.class)), + getCheckerMethod(MockEntitlementChecker.class, "checkSomeInstanceMethod", Class.class, Testable.class, int.class, String.class) + ); + + var instrumenter = createInstrumenter(methods); + + byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); + + if (logger.isTraceEnabled()) { + logger.trace("Bytecode after instrumentation:\n{}", bytecode2text(newBytecode)); + } + + Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( + classToInstrument.getName() + "_NEW", + newBytecode + ); + + getTestEntitlementChecker().isActive = true; + + Testable testTargetClass = (Testable) (newClass.getConstructor().newInstance()); + + // This overload is not instrumented, so it will not throw + testTargetClass.someMethod(123); + assertThrows(TestException.class, () -> testTargetClass.someMethod(123, "def")); + + assertThat(getTestEntitlementChecker().checkSomeInstanceMethodCallCount, is(1)); + } + + /** This test doesn't replace classToInstrument in-place but instead loads a separate + * class with the same class name plus a "_NEW" suffix (classToInstrument.class.getName() + "_NEW") + * that contains the instrumentation. Because of this, we need to configure the Transformer to use a + * MethodKey and instrumentationMethod with slightly different signatures (using the common interface + * Testable) which is not what would happen when it's run by the agent. + */ + private InstrumenterImpl createInstrumenter(Map methods) throws NoSuchMethodException { Method getter = InstrumenterTests.class.getMethod("getTestEntitlementChecker"); return new InstrumenterImpl("_NEW", methods) { /** @@ -220,13 +356,38 @@ protected void pushEntitlementChecker(MethodVisitor mv) { }; } + private static CheckerMethod getCheckerMethod(Class clazz, String methodName, Class... parameterTypes) + throws NoSuchMethodException { + var method = clazz.getMethod(methodName, parameterTypes); + return new CheckerMethod( + Type.getInternalName(clazz), + method.getName(), + Arrays.stream(Type.getArgumentTypes(method)).map(Type::getDescriptor).toList() + ); + } + /** * Calling a static method of a dynamically loaded class is significantly more cumbersome * than calling a virtual method. */ - private static void callStaticMethod(Class c, String methodName, int status) throws NoSuchMethodException, IllegalAccessException { + private static void callStaticMethod(Class c, String methodName, int arg) throws NoSuchMethodException, IllegalAccessException { + try { + c.getMethod(methodName, int.class).invoke(null, arg); + } catch (InvocationTargetException e) { + Throwable cause = e.getCause(); + if (cause instanceof TestException n) { + // Sometimes we're expecting this one! + throw n; + } else { + throw new AssertionError(cause); + } + } + } + + private static void callStaticMethod(Class c, String methodName, int arg1, String arg2) throws NoSuchMethodException, + IllegalAccessException { try { - c.getMethod(methodName, int.class).invoke(null, status); + c.getMethod(methodName, int.class, String.class).invoke(null, arg1, arg2); } catch (InvocationTargetException e) { Throwable cause = e.getCause(); if (cause instanceof TestException n) { diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index 5ebb7d00e26f5..167c93c90df5c 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -10,5 +10,5 @@ package org.elasticsearch.entitlement.bridge; public interface EntitlementChecker { - void checkSystemExit(Class callerClass, int status); + void check$java_lang_System$exit(Class callerClass, int status); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index 155d5a27c606b..30c6045d1ccef 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -11,15 +11,16 @@ import org.elasticsearch.core.internal.provider.ProviderLocator; import org.elasticsearch.entitlement.bridge.EntitlementChecker; +import org.elasticsearch.entitlement.instrumentation.CheckerMethod; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.elasticsearch.entitlement.instrumentation.Transformer; import org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementChecker; import java.lang.instrument.Instrumentation; -import java.lang.reflect.Method; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; /** * Called by the agent during {@code agentmain} to configure the entitlement system, @@ -40,18 +41,24 @@ public static EntitlementChecker checker() { public static void initialize(Instrumentation inst) throws Exception { manager = new ElasticsearchEntitlementChecker(); - // TODO: Configure actual entitlement grants instead of this hardcoded one - Method targetMethod = System.class.getMethod("exit", int.class); - Method instrumentationMethod = Class.forName("org.elasticsearch.entitlement.bridge.EntitlementChecker") - .getMethod("checkSystemExit", Class.class, int.class); - Map methodMap = Map.of(INSTRUMENTER_FACTORY.methodKeyForTarget(targetMethod), instrumentationMethod); + Map methodMap = INSTRUMENTER_FACTORY.lookupMethodsToInstrument( + "org.elasticsearch.entitlement.bridge.EntitlementChecker" + ); - inst.addTransformer(new Transformer(INSTRUMENTER_FACTORY.newInstrumenter("", methodMap), Set.of(internalName(System.class))), true); - inst.retransformClasses(System.class); + var classesToTransform = methodMap.keySet().stream().map(MethodKey::className).collect(Collectors.toSet()); + + inst.addTransformer(new Transformer(INSTRUMENTER_FACTORY.newInstrumenter("", methodMap), classesToTransform), true); + // TODO: should we limit this array somehow? + var classesToRetransform = classesToTransform.stream().map(EntitlementInitialization::internalNameToClass).toArray(Class[]::new); + inst.retransformClasses(classesToRetransform); } - private static String internalName(Class c) { - return c.getName().replace('.', '/'); + private static Class internalNameToClass(String internalName) { + try { + return Class.forName(internalName.replace('/', '.'), false, ClassLoader.getPlatformClassLoader()); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } } private static final InstrumentationService INSTRUMENTER_FACTORY = new ProviderLocator<>( diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/CheckerMethod.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/CheckerMethod.java new file mode 100644 index 0000000000000..c20a75a61a608 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/CheckerMethod.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.instrumentation; + +import java.util.List; + +/** + * A structure to use as a representation of the checker method the instrumentation will inject. + * + * @param className the "internal name" of the class: includes the package info, but with periods replaced by slashes + * @param methodName the checker method name + * @param parameterDescriptors a list of + * type descriptors) + * for methodName parameters. + */ +public record CheckerMethod(String className, String methodName, List parameterDescriptors) {} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java index 25fa84ec7c4ba..12316bfb043c5 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java @@ -9,6 +9,7 @@ package org.elasticsearch.entitlement.instrumentation; +import java.io.IOException; import java.lang.reflect.Method; import java.util.Map; @@ -16,10 +17,12 @@ * The SPI service entry point for instrumentation. */ public interface InstrumentationService { - Instrumenter newInstrumenter(String classNameSuffix, Map instrumentationMethods); + Instrumenter newInstrumenter(String classNameSuffix, Map instrumentationMethods); /** * @return a {@link MethodKey} suitable for looking up the given {@code targetMethod} in the entitlements trampoline */ MethodKey methodKeyForTarget(Method targetMethod); + + Map lookupMethodsToInstrument(String entitlementCheckerClassName) throws ClassNotFoundException, IOException; } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java index 54e09c10bcc57..256a4d709d9dc 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java @@ -12,7 +12,10 @@ import java.util.List; /** + * A structure to use as a key/lookup for a method target of instrumentation * - * @param className the "internal name" of the class: includes the package info, but with periods replaced by slashes + * @param className the "internal name" of the class: includes the package info, but with periods replaced by slashes + * @param methodName the method name + * @param parameterTypes a list of "internal names" for the parameter types */ -public record MethodKey(String className, String methodName, List parameterTypes, boolean isStatic) {} +public record MethodKey(String className, String methodName, List parameterTypes) {} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index cff3117d032a4..6324dbf73ee05 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -44,7 +44,7 @@ private static Set findSystemModules() { } @Override - public void checkSystemExit(Class callerClass, int status) { + public void check$java_lang_System$exit(Class callerClass, int status) { var requestingModule = requestingModule(callerClass); if (isTriviallyAllowed(requestingModule)) { return; From ea4b41fca874b669245c838b3f587eeaa7c9b018 Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Thu, 21 Nov 2024 07:45:22 +0100 Subject: [PATCH 123/386] ESQL - match operator included in non-snapshot builds (#116819) --- docs/changelog/116819.yaml | 5 + .../kibana/definition/match_operator.json | 49 + .../functions/kibana/docs/match_operator.md | 14 + .../esql/functions/operators.asciidoc | 2 + docs/reference/esql/functions/search.asciidoc | 23 + .../functions/signature/match_operator.svg | 1 + .../functions/types/match_operator.asciidoc | 10 + .../xpack/esql/plugin/MatchOperatorIT.java | 9 - .../esql/src/main/antlr/EsqlBaseLexer.g4 | 4 +- .../esql/src/main/antlr/EsqlBaseLexer.tokens | 34 +- .../esql/src/main/antlr/EsqlBaseParser.g4 | 2 +- .../esql/src/main/antlr/EsqlBaseParser.tokens | 34 +- .../xpack/esql/action/EsqlCapabilities.java | 2 +- .../xpack/esql/parser/EsqlBaseLexer.interp | 9 +- .../xpack/esql/parser/EsqlBaseLexer.java | 2071 ++++++++--------- .../xpack/esql/parser/EsqlBaseParser.interp | 6 +- .../xpack/esql/parser/EsqlBaseParser.java | 1400 ++++++----- .../xpack/esql/analysis/AnalyzerTests.java | 2 - .../xpack/esql/analysis/VerifierTests.java | 11 - .../function/AbstractFunctionTestCase.java | 21 +- .../expression/function/RailRoadDiagram.java | 12 + .../function/fulltext/MatchOperatorTests.java | 41 + .../function/fulltext/MatchTests.java | 31 +- .../LocalPhysicalPlanOptimizerTests.java | 5 - .../esql/parser/StatementParserTests.java | 2 - 25 files changed, 1963 insertions(+), 1837 deletions(-) create mode 100644 docs/changelog/116819.yaml create mode 100644 docs/reference/esql/functions/kibana/definition/match_operator.json create mode 100644 docs/reference/esql/functions/kibana/docs/match_operator.md create mode 100644 docs/reference/esql/functions/search.asciidoc create mode 100644 docs/reference/esql/functions/signature/match_operator.svg create mode 100644 docs/reference/esql/functions/types/match_operator.asciidoc create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java diff --git a/docs/changelog/116819.yaml b/docs/changelog/116819.yaml new file mode 100644 index 0000000000000..afe06c583fe55 --- /dev/null +++ b/docs/changelog/116819.yaml @@ -0,0 +1,5 @@ +pr: 116819 +summary: ESQL - Add match operator (:) +area: Search +type: feature +issues: [] diff --git a/docs/reference/esql/functions/kibana/definition/match_operator.json b/docs/reference/esql/functions/kibana/definition/match_operator.json new file mode 100644 index 0000000000000..2facebfc44e57 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/match_operator.json @@ -0,0 +1,49 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "operator", + "name" : "match_operator", + "description" : "Performs a match query on the specified field. Returns true if the provided query matches the row.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "keyword", + "optional" : false, + "description" : "Text you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "text", + "optional" : false, + "description" : "Text you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;" + ], + "preview" : true, + "snapshot_only" : false +} diff --git a/docs/reference/esql/functions/kibana/docs/match_operator.md b/docs/reference/esql/functions/kibana/docs/match_operator.md new file mode 100644 index 0000000000000..fda8b24ff76cc --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/match_operator.md @@ -0,0 +1,14 @@ + + +### MATCH_OPERATOR +Performs a match query on the specified field. Returns true if the provided query matches the row. + +``` +FROM books +| WHERE MATCH(author, "Faulkner") +| KEEP book_no, author +| SORT book_no +| LIMIT 5; +``` diff --git a/docs/reference/esql/functions/operators.asciidoc b/docs/reference/esql/functions/operators.asciidoc index ee344a52687c2..a1a2226335e9b 100644 --- a/docs/reference/esql/functions/operators.asciidoc +++ b/docs/reference/esql/functions/operators.asciidoc @@ -16,6 +16,7 @@ Boolean operators for comparing against one or multiple expressions. * <> * <> * <> +* experimental:[] <> // end::op_list[] include::binary.asciidoc[] @@ -26,3 +27,4 @@ include::cast.asciidoc[] include::in.asciidoc[] include::like.asciidoc[] include::rlike.asciidoc[] +include::search.asciidoc[] diff --git a/docs/reference/esql/functions/search.asciidoc b/docs/reference/esql/functions/search.asciidoc new file mode 100644 index 0000000000000..ae1b003b65abb --- /dev/null +++ b/docs/reference/esql/functions/search.asciidoc @@ -0,0 +1,23 @@ +[discrete] +[[esql-search-operators]] +=== Search operators + +The only search operator is match (`:`). + +preview::["Do not use on production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] + +The match operator performs a <> on the specified field. Returns true if the provided query matches the row. + +[.text-center] +image::esql/functions/signature/match_operator.svg[Embedded,opts=inline] + +include::types/match.asciidoc[] + +[source.merge.styled,esql] +---- +include::{esql-specs}/match-operator.csv-spec[tag=match-with-field] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/match-operator.csv-spec[tag=match-with-field-result] +|=== diff --git a/docs/reference/esql/functions/signature/match_operator.svg b/docs/reference/esql/functions/signature/match_operator.svg new file mode 100644 index 0000000000000..70cea841622eb --- /dev/null +++ b/docs/reference/esql/functions/signature/match_operator.svg @@ -0,0 +1 @@ +field:query \ No newline at end of file diff --git a/docs/reference/esql/functions/types/match_operator.asciidoc b/docs/reference/esql/functions/types/match_operator.asciidoc new file mode 100644 index 0000000000000..5c6afacdce1b2 --- /dev/null +++ b/docs/reference/esql/functions/types/match_operator.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +field | query | result +keyword | keyword | boolean +text | text | boolean +|=== diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java index b86c46fd3fa7a..3b647583f1129 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java @@ -14,9 +14,6 @@ import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; -import org.elasticsearch.xpack.esql.action.EsqlCapabilities; -import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; -import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import org.junit.Before; import java.util.List; @@ -32,12 +29,6 @@ public void setupIndex() { createAndPopulateIndex(); } - @Override - protected EsqlQueryResponse run(EsqlQueryRequest request) { - assumeTrue("match operator capability not available", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - return super.run(request); - } - public void testSimpleWhereMatch() { var query = """ FROM test diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index b2f0e2942d3cc..ef875d7ca01d8 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -112,8 +112,6 @@ WS : [ \r\n\t]+ -> channel(HIDDEN) ; -COLON : ':'; - // // Expression - used by most command // @@ -184,6 +182,7 @@ AND : 'and'; ASC : 'asc'; ASSIGN : '='; CAST_OP : '::'; +COLON : ':'; COMMA : ','; DESC : 'desc'; DOT : '.'; @@ -216,7 +215,6 @@ MINUS : '-'; ASTERISK : '*'; SLASH : '/'; PERCENT : '%'; -EXPRESSION_COLON : {this.isDevVersion()}? COLON -> type(COLON); NESTED_WHERE : WHERE -> type(WHERE); diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index 16376d6863b2f..b1a16987dd8ce 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -26,16 +26,16 @@ UNKNOWN_CMD=25 LINE_COMMENT=26 MULTILINE_COMMENT=27 WS=28 -COLON=29 -PIPE=30 -QUOTED_STRING=31 -INTEGER_LITERAL=32 -DECIMAL_LITERAL=33 -BY=34 -AND=35 -ASC=36 -ASSIGN=37 -CAST_OP=38 +PIPE=29 +QUOTED_STRING=30 +INTEGER_LITERAL=31 +DECIMAL_LITERAL=32 +BY=33 +AND=34 +ASC=35 +ASSIGN=36 +CAST_OP=37 +COLON=38 COMMA=39 DESC=40 DOT=41 @@ -142,13 +142,13 @@ CLOSING_METRICS_WS=128 'sort'=14 'stats'=15 'where'=16 -':'=29 -'|'=30 -'by'=34 -'and'=35 -'asc'=36 -'='=37 -'::'=38 +'|'=29 +'by'=33 +'and'=34 +'asc'=35 +'='=36 +'::'=37 +':'=38 ','=39 'desc'=40 '.'=41 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index 33c4d8957d387..f84cfe3060503 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -69,7 +69,7 @@ booleanExpression | left=booleanExpression operator=OR right=booleanExpression #logicalBinary | valueExpression (NOT)? IN LP valueExpression (COMMA valueExpression)* RP #logicalIn | valueExpression IS NOT? NULL #isNull - | {this.isDevVersion()}? matchBooleanExpression #matchExpression + | matchBooleanExpression #matchExpression ; regexBooleanExpression diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index 16376d6863b2f..b1a16987dd8ce 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -26,16 +26,16 @@ UNKNOWN_CMD=25 LINE_COMMENT=26 MULTILINE_COMMENT=27 WS=28 -COLON=29 -PIPE=30 -QUOTED_STRING=31 -INTEGER_LITERAL=32 -DECIMAL_LITERAL=33 -BY=34 -AND=35 -ASC=36 -ASSIGN=37 -CAST_OP=38 +PIPE=29 +QUOTED_STRING=30 +INTEGER_LITERAL=31 +DECIMAL_LITERAL=32 +BY=33 +AND=34 +ASC=35 +ASSIGN=36 +CAST_OP=37 +COLON=38 COMMA=39 DESC=40 DOT=41 @@ -142,13 +142,13 @@ CLOSING_METRICS_WS=128 'sort'=14 'stats'=15 'where'=16 -':'=29 -'|'=30 -'by'=34 -'and'=35 -'asc'=36 -'='=37 -'::'=38 +'|'=29 +'by'=33 +'and'=34 +'asc'=35 +'='=36 +'::'=37 +':'=38 ','=39 'desc'=40 '.'=41 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 4137d863e0f7e..c5d3ee29d0bda 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -307,7 +307,7 @@ public enum Cap { /** * Support for match operator as a colon. Previous support for match operator as MATCH has been removed */ - MATCH_OPERATOR_COLON(Build.current().isSnapshot()), + MATCH_OPERATOR_COLON, /** * Removing support for the {@code META} keyword. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 8b8bab2edbc41..c83fdbe8847a9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -28,7 +28,6 @@ null null null null -':' '|' null null @@ -38,6 +37,7 @@ null 'asc' '=' '::' +':' ',' 'desc' '.' @@ -159,7 +159,6 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS -COLON PIPE QUOTED_STRING INTEGER_LITERAL @@ -169,6 +168,7 @@ AND ASC ASSIGN CAST_OP +COLON COMMA DESC DOT @@ -289,7 +289,6 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS -COLON PIPE DIGIT LETTER @@ -309,6 +308,7 @@ AND ASC ASSIGN CAST_OP +COLON COMMA DESC DOT @@ -339,7 +339,6 @@ MINUS ASTERISK SLASH PERCENT -EXPRESSION_COLON NESTED_WHERE NAMED_OR_POSITIONAL_PARAM OPENING_BRACKET @@ -499,4 +498,4 @@ METRICS_MODE CLOSING_METRICS_MODE atn: -[4, 0, 128, 1608, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 2, 213, 7, 213, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 4, 24, 656, 8, 24, 11, 24, 12, 24, 657, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 666, 8, 25, 10, 25, 12, 25, 669, 9, 25, 1, 25, 3, 25, 672, 8, 25, 1, 25, 3, 25, 675, 8, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 684, 8, 26, 10, 26, 12, 26, 687, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 4, 27, 695, 8, 27, 11, 27, 12, 27, 696, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 34, 1, 34, 3, 34, 718, 8, 34, 1, 34, 4, 34, 721, 8, 34, 11, 34, 12, 34, 722, 1, 35, 1, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 732, 8, 37, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 3, 39, 739, 8, 39, 1, 40, 1, 40, 1, 40, 5, 40, 744, 8, 40, 10, 40, 12, 40, 747, 9, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 755, 8, 40, 10, 40, 12, 40, 758, 9, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 3, 40, 765, 8, 40, 1, 40, 3, 40, 768, 8, 40, 3, 40, 770, 8, 40, 1, 41, 4, 41, 773, 8, 41, 11, 41, 12, 41, 774, 1, 42, 4, 42, 778, 8, 42, 11, 42, 12, 42, 779, 1, 42, 1, 42, 5, 42, 784, 8, 42, 10, 42, 12, 42, 787, 9, 42, 1, 42, 1, 42, 4, 42, 791, 8, 42, 11, 42, 12, 42, 792, 1, 42, 4, 42, 796, 8, 42, 11, 42, 12, 42, 797, 1, 42, 1, 42, 5, 42, 802, 8, 42, 10, 42, 12, 42, 805, 9, 42, 3, 42, 807, 8, 42, 1, 42, 1, 42, 1, 42, 1, 42, 4, 42, 813, 8, 42, 11, 42, 12, 42, 814, 1, 42, 1, 42, 3, 42, 819, 8, 42, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 3, 80, 950, 8, 80, 1, 80, 5, 80, 953, 8, 80, 10, 80, 12, 80, 956, 9, 80, 1, 80, 1, 80, 4, 80, 960, 8, 80, 11, 80, 12, 80, 961, 3, 80, 964, 8, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 5, 83, 978, 8, 83, 10, 83, 12, 83, 981, 9, 83, 1, 83, 1, 83, 3, 83, 985, 8, 83, 1, 83, 4, 83, 988, 8, 83, 11, 83, 12, 83, 989, 3, 83, 992, 8, 83, 1, 84, 1, 84, 4, 84, 996, 8, 84, 11, 84, 12, 84, 997, 1, 84, 1, 84, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 3, 101, 1075, 8, 101, 1, 102, 4, 102, 1078, 8, 102, 11, 102, 12, 102, 1079, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 3, 113, 1129, 8, 113, 1, 114, 1, 114, 3, 114, 1133, 8, 114, 1, 114, 5, 114, 1136, 8, 114, 10, 114, 12, 114, 1139, 9, 114, 1, 114, 1, 114, 3, 114, 1143, 8, 114, 1, 114, 4, 114, 1146, 8, 114, 11, 114, 12, 114, 1147, 3, 114, 1150, 8, 114, 1, 115, 1, 115, 4, 115, 1154, 8, 115, 11, 115, 12, 115, 1155, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 135, 4, 135, 1241, 8, 135, 11, 135, 12, 135, 1242, 1, 135, 1, 135, 3, 135, 1247, 8, 135, 1, 135, 4, 135, 1250, 8, 135, 11, 135, 12, 135, 1251, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 168, 4, 168, 1397, 8, 168, 11, 168, 12, 168, 1398, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 212, 1, 212, 1, 213, 1, 213, 1, 213, 1, 213, 1, 213, 2, 685, 756, 0, 214, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 25, 66, 26, 68, 27, 70, 28, 72, 29, 74, 30, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 0, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 68, 172, 0, 174, 0, 176, 69, 178, 70, 180, 71, 182, 72, 184, 0, 186, 73, 188, 74, 190, 75, 192, 76, 194, 0, 196, 0, 198, 77, 200, 78, 202, 79, 204, 0, 206, 0, 208, 0, 210, 0, 212, 0, 214, 0, 216, 80, 218, 0, 220, 81, 222, 0, 224, 0, 226, 82, 228, 83, 230, 84, 232, 0, 234, 0, 236, 0, 238, 0, 240, 0, 242, 0, 244, 0, 246, 85, 248, 86, 250, 87, 252, 88, 254, 0, 256, 0, 258, 0, 260, 0, 262, 0, 264, 0, 266, 89, 268, 0, 270, 90, 272, 91, 274, 92, 276, 0, 278, 0, 280, 93, 282, 94, 284, 0, 286, 95, 288, 0, 290, 96, 292, 97, 294, 98, 296, 0, 298, 0, 300, 0, 302, 0, 304, 0, 306, 0, 308, 0, 310, 0, 312, 0, 314, 99, 316, 100, 318, 101, 320, 0, 322, 0, 324, 0, 326, 0, 328, 0, 330, 0, 332, 102, 334, 103, 336, 104, 338, 0, 340, 105, 342, 106, 344, 107, 346, 108, 348, 0, 350, 0, 352, 109, 354, 110, 356, 111, 358, 112, 360, 0, 362, 0, 364, 0, 366, 0, 368, 0, 370, 0, 372, 0, 374, 113, 376, 114, 378, 115, 380, 0, 382, 0, 384, 0, 386, 0, 388, 116, 390, 117, 392, 118, 394, 0, 396, 0, 398, 0, 400, 0, 402, 119, 404, 0, 406, 0, 408, 120, 410, 121, 412, 122, 414, 0, 416, 0, 418, 0, 420, 123, 422, 124, 424, 125, 426, 0, 428, 0, 430, 126, 432, 127, 434, 128, 436, 0, 438, 0, 440, 0, 442, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 2, 0, 74, 74, 106, 106, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1635, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 64, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 0, 68, 1, 0, 0, 0, 0, 70, 1, 0, 0, 0, 0, 72, 1, 0, 0, 0, 1, 74, 1, 0, 0, 0, 1, 96, 1, 0, 0, 0, 1, 98, 1, 0, 0, 0, 1, 100, 1, 0, 0, 0, 1, 102, 1, 0, 0, 0, 1, 104, 1, 0, 0, 0, 1, 106, 1, 0, 0, 0, 1, 108, 1, 0, 0, 0, 1, 110, 1, 0, 0, 0, 1, 112, 1, 0, 0, 0, 1, 114, 1, 0, 0, 0, 1, 116, 1, 0, 0, 0, 1, 118, 1, 0, 0, 0, 1, 120, 1, 0, 0, 0, 1, 122, 1, 0, 0, 0, 1, 124, 1, 0, 0, 0, 1, 126, 1, 0, 0, 0, 1, 128, 1, 0, 0, 0, 1, 130, 1, 0, 0, 0, 1, 132, 1, 0, 0, 0, 1, 134, 1, 0, 0, 0, 1, 136, 1, 0, 0, 0, 1, 138, 1, 0, 0, 0, 1, 140, 1, 0, 0, 0, 1, 142, 1, 0, 0, 0, 1, 144, 1, 0, 0, 0, 1, 146, 1, 0, 0, 0, 1, 148, 1, 0, 0, 0, 1, 150, 1, 0, 0, 0, 1, 152, 1, 0, 0, 0, 1, 154, 1, 0, 0, 0, 1, 156, 1, 0, 0, 0, 1, 158, 1, 0, 0, 0, 1, 160, 1, 0, 0, 0, 1, 162, 1, 0, 0, 0, 1, 164, 1, 0, 0, 0, 1, 166, 1, 0, 0, 0, 1, 168, 1, 0, 0, 0, 1, 170, 1, 0, 0, 0, 1, 172, 1, 0, 0, 0, 1, 174, 1, 0, 0, 0, 1, 176, 1, 0, 0, 0, 1, 178, 1, 0, 0, 0, 1, 180, 1, 0, 0, 0, 1, 182, 1, 0, 0, 0, 1, 186, 1, 0, 0, 0, 1, 188, 1, 0, 0, 0, 1, 190, 1, 0, 0, 0, 1, 192, 1, 0, 0, 0, 2, 194, 1, 0, 0, 0, 2, 196, 1, 0, 0, 0, 2, 198, 1, 0, 0, 0, 2, 200, 1, 0, 0, 0, 2, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 216, 1, 0, 0, 0, 3, 220, 1, 0, 0, 0, 3, 222, 1, 0, 0, 0, 3, 224, 1, 0, 0, 0, 3, 226, 1, 0, 0, 0, 3, 228, 1, 0, 0, 0, 3, 230, 1, 0, 0, 0, 4, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 4, 236, 1, 0, 0, 0, 4, 238, 1, 0, 0, 0, 4, 240, 1, 0, 0, 0, 4, 246, 1, 0, 0, 0, 4, 248, 1, 0, 0, 0, 4, 250, 1, 0, 0, 0, 4, 252, 1, 0, 0, 0, 5, 254, 1, 0, 0, 0, 5, 256, 1, 0, 0, 0, 5, 258, 1, 0, 0, 0, 5, 260, 1, 0, 0, 0, 5, 262, 1, 0, 0, 0, 5, 264, 1, 0, 0, 0, 5, 266, 1, 0, 0, 0, 5, 268, 1, 0, 0, 0, 5, 270, 1, 0, 0, 0, 5, 272, 1, 0, 0, 0, 5, 274, 1, 0, 0, 0, 6, 276, 1, 0, 0, 0, 6, 278, 1, 0, 0, 0, 6, 280, 1, 0, 0, 0, 6, 282, 1, 0, 0, 0, 6, 286, 1, 0, 0, 0, 6, 288, 1, 0, 0, 0, 6, 290, 1, 0, 0, 0, 6, 292, 1, 0, 0, 0, 6, 294, 1, 0, 0, 0, 7, 296, 1, 0, 0, 0, 7, 298, 1, 0, 0, 0, 7, 300, 1, 0, 0, 0, 7, 302, 1, 0, 0, 0, 7, 304, 1, 0, 0, 0, 7, 306, 1, 0, 0, 0, 7, 308, 1, 0, 0, 0, 7, 310, 1, 0, 0, 0, 7, 312, 1, 0, 0, 0, 7, 314, 1, 0, 0, 0, 7, 316, 1, 0, 0, 0, 7, 318, 1, 0, 0, 0, 8, 320, 1, 0, 0, 0, 8, 322, 1, 0, 0, 0, 8, 324, 1, 0, 0, 0, 8, 326, 1, 0, 0, 0, 8, 328, 1, 0, 0, 0, 8, 330, 1, 0, 0, 0, 8, 332, 1, 0, 0, 0, 8, 334, 1, 0, 0, 0, 8, 336, 1, 0, 0, 0, 9, 338, 1, 0, 0, 0, 9, 340, 1, 0, 0, 0, 9, 342, 1, 0, 0, 0, 9, 344, 1, 0, 0, 0, 9, 346, 1, 0, 0, 0, 10, 348, 1, 0, 0, 0, 10, 350, 1, 0, 0, 0, 10, 352, 1, 0, 0, 0, 10, 354, 1, 0, 0, 0, 10, 356, 1, 0, 0, 0, 10, 358, 1, 0, 0, 0, 11, 360, 1, 0, 0, 0, 11, 362, 1, 0, 0, 0, 11, 364, 1, 0, 0, 0, 11, 366, 1, 0, 0, 0, 11, 368, 1, 0, 0, 0, 11, 370, 1, 0, 0, 0, 11, 372, 1, 0, 0, 0, 11, 374, 1, 0, 0, 0, 11, 376, 1, 0, 0, 0, 11, 378, 1, 0, 0, 0, 12, 380, 1, 0, 0, 0, 12, 382, 1, 0, 0, 0, 12, 384, 1, 0, 0, 0, 12, 386, 1, 0, 0, 0, 12, 388, 1, 0, 0, 0, 12, 390, 1, 0, 0, 0, 12, 392, 1, 0, 0, 0, 13, 394, 1, 0, 0, 0, 13, 396, 1, 0, 0, 0, 13, 398, 1, 0, 0, 0, 13, 400, 1, 0, 0, 0, 13, 402, 1, 0, 0, 0, 13, 404, 1, 0, 0, 0, 13, 406, 1, 0, 0, 0, 13, 408, 1, 0, 0, 0, 13, 410, 1, 0, 0, 0, 13, 412, 1, 0, 0, 0, 14, 414, 1, 0, 0, 0, 14, 416, 1, 0, 0, 0, 14, 418, 1, 0, 0, 0, 14, 420, 1, 0, 0, 0, 14, 422, 1, 0, 0, 0, 14, 424, 1, 0, 0, 0, 15, 426, 1, 0, 0, 0, 15, 428, 1, 0, 0, 0, 15, 430, 1, 0, 0, 0, 15, 432, 1, 0, 0, 0, 15, 434, 1, 0, 0, 0, 15, 436, 1, 0, 0, 0, 15, 438, 1, 0, 0, 0, 15, 440, 1, 0, 0, 0, 15, 442, 1, 0, 0, 0, 16, 444, 1, 0, 0, 0, 18, 454, 1, 0, 0, 0, 20, 461, 1, 0, 0, 0, 22, 470, 1, 0, 0, 0, 24, 477, 1, 0, 0, 0, 26, 487, 1, 0, 0, 0, 28, 494, 1, 0, 0, 0, 30, 501, 1, 0, 0, 0, 32, 508, 1, 0, 0, 0, 34, 516, 1, 0, 0, 0, 36, 528, 1, 0, 0, 0, 38, 537, 1, 0, 0, 0, 40, 543, 1, 0, 0, 0, 42, 550, 1, 0, 0, 0, 44, 557, 1, 0, 0, 0, 46, 565, 1, 0, 0, 0, 48, 573, 1, 0, 0, 0, 50, 588, 1, 0, 0, 0, 52, 600, 1, 0, 0, 0, 54, 611, 1, 0, 0, 0, 56, 619, 1, 0, 0, 0, 58, 627, 1, 0, 0, 0, 60, 635, 1, 0, 0, 0, 62, 644, 1, 0, 0, 0, 64, 655, 1, 0, 0, 0, 66, 661, 1, 0, 0, 0, 68, 678, 1, 0, 0, 0, 70, 694, 1, 0, 0, 0, 72, 700, 1, 0, 0, 0, 74, 702, 1, 0, 0, 0, 76, 706, 1, 0, 0, 0, 78, 708, 1, 0, 0, 0, 80, 710, 1, 0, 0, 0, 82, 713, 1, 0, 0, 0, 84, 715, 1, 0, 0, 0, 86, 724, 1, 0, 0, 0, 88, 726, 1, 0, 0, 0, 90, 731, 1, 0, 0, 0, 92, 733, 1, 0, 0, 0, 94, 738, 1, 0, 0, 0, 96, 769, 1, 0, 0, 0, 98, 772, 1, 0, 0, 0, 100, 818, 1, 0, 0, 0, 102, 820, 1, 0, 0, 0, 104, 823, 1, 0, 0, 0, 106, 827, 1, 0, 0, 0, 108, 831, 1, 0, 0, 0, 110, 833, 1, 0, 0, 0, 112, 836, 1, 0, 0, 0, 114, 838, 1, 0, 0, 0, 116, 843, 1, 0, 0, 0, 118, 845, 1, 0, 0, 0, 120, 851, 1, 0, 0, 0, 122, 857, 1, 0, 0, 0, 124, 860, 1, 0, 0, 0, 126, 863, 1, 0, 0, 0, 128, 868, 1, 0, 0, 0, 130, 873, 1, 0, 0, 0, 132, 875, 1, 0, 0, 0, 134, 879, 1, 0, 0, 0, 136, 884, 1, 0, 0, 0, 138, 890, 1, 0, 0, 0, 140, 893, 1, 0, 0, 0, 142, 895, 1, 0, 0, 0, 144, 901, 1, 0, 0, 0, 146, 903, 1, 0, 0, 0, 148, 908, 1, 0, 0, 0, 150, 911, 1, 0, 0, 0, 152, 914, 1, 0, 0, 0, 154, 917, 1, 0, 0, 0, 156, 919, 1, 0, 0, 0, 158, 922, 1, 0, 0, 0, 160, 924, 1, 0, 0, 0, 162, 927, 1, 0, 0, 0, 164, 929, 1, 0, 0, 0, 166, 931, 1, 0, 0, 0, 168, 933, 1, 0, 0, 0, 170, 935, 1, 0, 0, 0, 172, 937, 1, 0, 0, 0, 174, 942, 1, 0, 0, 0, 176, 963, 1, 0, 0, 0, 178, 965, 1, 0, 0, 0, 180, 970, 1, 0, 0, 0, 182, 991, 1, 0, 0, 0, 184, 993, 1, 0, 0, 0, 186, 1001, 1, 0, 0, 0, 188, 1003, 1, 0, 0, 0, 190, 1007, 1, 0, 0, 0, 192, 1011, 1, 0, 0, 0, 194, 1015, 1, 0, 0, 0, 196, 1020, 1, 0, 0, 0, 198, 1025, 1, 0, 0, 0, 200, 1029, 1, 0, 0, 0, 202, 1033, 1, 0, 0, 0, 204, 1037, 1, 0, 0, 0, 206, 1042, 1, 0, 0, 0, 208, 1046, 1, 0, 0, 0, 210, 1050, 1, 0, 0, 0, 212, 1054, 1, 0, 0, 0, 214, 1058, 1, 0, 0, 0, 216, 1062, 1, 0, 0, 0, 218, 1074, 1, 0, 0, 0, 220, 1077, 1, 0, 0, 0, 222, 1081, 1, 0, 0, 0, 224, 1085, 1, 0, 0, 0, 226, 1089, 1, 0, 0, 0, 228, 1093, 1, 0, 0, 0, 230, 1097, 1, 0, 0, 0, 232, 1101, 1, 0, 0, 0, 234, 1106, 1, 0, 0, 0, 236, 1110, 1, 0, 0, 0, 238, 1114, 1, 0, 0, 0, 240, 1119, 1, 0, 0, 0, 242, 1128, 1, 0, 0, 0, 244, 1149, 1, 0, 0, 0, 246, 1153, 1, 0, 0, 0, 248, 1157, 1, 0, 0, 0, 250, 1161, 1, 0, 0, 0, 252, 1165, 1, 0, 0, 0, 254, 1169, 1, 0, 0, 0, 256, 1174, 1, 0, 0, 0, 258, 1178, 1, 0, 0, 0, 260, 1182, 1, 0, 0, 0, 262, 1186, 1, 0, 0, 0, 264, 1191, 1, 0, 0, 0, 266, 1196, 1, 0, 0, 0, 268, 1199, 1, 0, 0, 0, 270, 1203, 1, 0, 0, 0, 272, 1207, 1, 0, 0, 0, 274, 1211, 1, 0, 0, 0, 276, 1215, 1, 0, 0, 0, 278, 1220, 1, 0, 0, 0, 280, 1225, 1, 0, 0, 0, 282, 1230, 1, 0, 0, 0, 284, 1237, 1, 0, 0, 0, 286, 1246, 1, 0, 0, 0, 288, 1253, 1, 0, 0, 0, 290, 1257, 1, 0, 0, 0, 292, 1261, 1, 0, 0, 0, 294, 1265, 1, 0, 0, 0, 296, 1269, 1, 0, 0, 0, 298, 1275, 1, 0, 0, 0, 300, 1279, 1, 0, 0, 0, 302, 1283, 1, 0, 0, 0, 304, 1287, 1, 0, 0, 0, 306, 1291, 1, 0, 0, 0, 308, 1295, 1, 0, 0, 0, 310, 1299, 1, 0, 0, 0, 312, 1304, 1, 0, 0, 0, 314, 1309, 1, 0, 0, 0, 316, 1313, 1, 0, 0, 0, 318, 1317, 1, 0, 0, 0, 320, 1321, 1, 0, 0, 0, 322, 1326, 1, 0, 0, 0, 324, 1330, 1, 0, 0, 0, 326, 1335, 1, 0, 0, 0, 328, 1340, 1, 0, 0, 0, 330, 1344, 1, 0, 0, 0, 332, 1348, 1, 0, 0, 0, 334, 1352, 1, 0, 0, 0, 336, 1356, 1, 0, 0, 0, 338, 1360, 1, 0, 0, 0, 340, 1365, 1, 0, 0, 0, 342, 1370, 1, 0, 0, 0, 344, 1374, 1, 0, 0, 0, 346, 1378, 1, 0, 0, 0, 348, 1382, 1, 0, 0, 0, 350, 1387, 1, 0, 0, 0, 352, 1396, 1, 0, 0, 0, 354, 1400, 1, 0, 0, 0, 356, 1404, 1, 0, 0, 0, 358, 1408, 1, 0, 0, 0, 360, 1412, 1, 0, 0, 0, 362, 1417, 1, 0, 0, 0, 364, 1421, 1, 0, 0, 0, 366, 1425, 1, 0, 0, 0, 368, 1429, 1, 0, 0, 0, 370, 1434, 1, 0, 0, 0, 372, 1438, 1, 0, 0, 0, 374, 1442, 1, 0, 0, 0, 376, 1446, 1, 0, 0, 0, 378, 1450, 1, 0, 0, 0, 380, 1454, 1, 0, 0, 0, 382, 1460, 1, 0, 0, 0, 384, 1464, 1, 0, 0, 0, 386, 1468, 1, 0, 0, 0, 388, 1472, 1, 0, 0, 0, 390, 1476, 1, 0, 0, 0, 392, 1480, 1, 0, 0, 0, 394, 1484, 1, 0, 0, 0, 396, 1489, 1, 0, 0, 0, 398, 1493, 1, 0, 0, 0, 400, 1497, 1, 0, 0, 0, 402, 1503, 1, 0, 0, 0, 404, 1512, 1, 0, 0, 0, 406, 1516, 1, 0, 0, 0, 408, 1520, 1, 0, 0, 0, 410, 1524, 1, 0, 0, 0, 412, 1528, 1, 0, 0, 0, 414, 1532, 1, 0, 0, 0, 416, 1537, 1, 0, 0, 0, 418, 1543, 1, 0, 0, 0, 420, 1549, 1, 0, 0, 0, 422, 1553, 1, 0, 0, 0, 424, 1557, 1, 0, 0, 0, 426, 1561, 1, 0, 0, 0, 428, 1567, 1, 0, 0, 0, 430, 1573, 1, 0, 0, 0, 432, 1577, 1, 0, 0, 0, 434, 1581, 1, 0, 0, 0, 436, 1585, 1, 0, 0, 0, 438, 1591, 1, 0, 0, 0, 440, 1597, 1, 0, 0, 0, 442, 1603, 1, 0, 0, 0, 444, 445, 7, 0, 0, 0, 445, 446, 7, 1, 0, 0, 446, 447, 7, 2, 0, 0, 447, 448, 7, 2, 0, 0, 448, 449, 7, 3, 0, 0, 449, 450, 7, 4, 0, 0, 450, 451, 7, 5, 0, 0, 451, 452, 1, 0, 0, 0, 452, 453, 6, 0, 0, 0, 453, 17, 1, 0, 0, 0, 454, 455, 7, 0, 0, 0, 455, 456, 7, 6, 0, 0, 456, 457, 7, 7, 0, 0, 457, 458, 7, 8, 0, 0, 458, 459, 1, 0, 0, 0, 459, 460, 6, 1, 1, 0, 460, 19, 1, 0, 0, 0, 461, 462, 7, 3, 0, 0, 462, 463, 7, 9, 0, 0, 463, 464, 7, 6, 0, 0, 464, 465, 7, 1, 0, 0, 465, 466, 7, 4, 0, 0, 466, 467, 7, 10, 0, 0, 467, 468, 1, 0, 0, 0, 468, 469, 6, 2, 2, 0, 469, 21, 1, 0, 0, 0, 470, 471, 7, 3, 0, 0, 471, 472, 7, 11, 0, 0, 472, 473, 7, 12, 0, 0, 473, 474, 7, 13, 0, 0, 474, 475, 1, 0, 0, 0, 475, 476, 6, 3, 0, 0, 476, 23, 1, 0, 0, 0, 477, 478, 7, 3, 0, 0, 478, 479, 7, 14, 0, 0, 479, 480, 7, 8, 0, 0, 480, 481, 7, 13, 0, 0, 481, 482, 7, 12, 0, 0, 482, 483, 7, 1, 0, 0, 483, 484, 7, 9, 0, 0, 484, 485, 1, 0, 0, 0, 485, 486, 6, 4, 3, 0, 486, 25, 1, 0, 0, 0, 487, 488, 7, 15, 0, 0, 488, 489, 7, 6, 0, 0, 489, 490, 7, 7, 0, 0, 490, 491, 7, 16, 0, 0, 491, 492, 1, 0, 0, 0, 492, 493, 6, 5, 4, 0, 493, 27, 1, 0, 0, 0, 494, 495, 7, 17, 0, 0, 495, 496, 7, 6, 0, 0, 496, 497, 7, 7, 0, 0, 497, 498, 7, 18, 0, 0, 498, 499, 1, 0, 0, 0, 499, 500, 6, 6, 0, 0, 500, 29, 1, 0, 0, 0, 501, 502, 7, 18, 0, 0, 502, 503, 7, 3, 0, 0, 503, 504, 7, 3, 0, 0, 504, 505, 7, 8, 0, 0, 505, 506, 1, 0, 0, 0, 506, 507, 6, 7, 1, 0, 507, 31, 1, 0, 0, 0, 508, 509, 7, 13, 0, 0, 509, 510, 7, 1, 0, 0, 510, 511, 7, 16, 0, 0, 511, 512, 7, 1, 0, 0, 512, 513, 7, 5, 0, 0, 513, 514, 1, 0, 0, 0, 514, 515, 6, 8, 0, 0, 515, 33, 1, 0, 0, 0, 516, 517, 7, 16, 0, 0, 517, 518, 7, 11, 0, 0, 518, 519, 5, 95, 0, 0, 519, 520, 7, 3, 0, 0, 520, 521, 7, 14, 0, 0, 521, 522, 7, 8, 0, 0, 522, 523, 7, 12, 0, 0, 523, 524, 7, 9, 0, 0, 524, 525, 7, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 6, 9, 5, 0, 527, 35, 1, 0, 0, 0, 528, 529, 7, 6, 0, 0, 529, 530, 7, 3, 0, 0, 530, 531, 7, 9, 0, 0, 531, 532, 7, 12, 0, 0, 532, 533, 7, 16, 0, 0, 533, 534, 7, 3, 0, 0, 534, 535, 1, 0, 0, 0, 535, 536, 6, 10, 6, 0, 536, 37, 1, 0, 0, 0, 537, 538, 7, 6, 0, 0, 538, 539, 7, 7, 0, 0, 539, 540, 7, 19, 0, 0, 540, 541, 1, 0, 0, 0, 541, 542, 6, 11, 0, 0, 542, 39, 1, 0, 0, 0, 543, 544, 7, 2, 0, 0, 544, 545, 7, 10, 0, 0, 545, 546, 7, 7, 0, 0, 546, 547, 7, 19, 0, 0, 547, 548, 1, 0, 0, 0, 548, 549, 6, 12, 7, 0, 549, 41, 1, 0, 0, 0, 550, 551, 7, 2, 0, 0, 551, 552, 7, 7, 0, 0, 552, 553, 7, 6, 0, 0, 553, 554, 7, 5, 0, 0, 554, 555, 1, 0, 0, 0, 555, 556, 6, 13, 0, 0, 556, 43, 1, 0, 0, 0, 557, 558, 7, 2, 0, 0, 558, 559, 7, 5, 0, 0, 559, 560, 7, 12, 0, 0, 560, 561, 7, 5, 0, 0, 561, 562, 7, 2, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 6, 14, 0, 0, 564, 45, 1, 0, 0, 0, 565, 566, 7, 19, 0, 0, 566, 567, 7, 10, 0, 0, 567, 568, 7, 3, 0, 0, 568, 569, 7, 6, 0, 0, 569, 570, 7, 3, 0, 0, 570, 571, 1, 0, 0, 0, 571, 572, 6, 15, 0, 0, 572, 47, 1, 0, 0, 0, 573, 574, 4, 16, 0, 0, 574, 575, 7, 1, 0, 0, 575, 576, 7, 9, 0, 0, 576, 577, 7, 13, 0, 0, 577, 578, 7, 1, 0, 0, 578, 579, 7, 9, 0, 0, 579, 580, 7, 3, 0, 0, 580, 581, 7, 2, 0, 0, 581, 582, 7, 5, 0, 0, 582, 583, 7, 12, 0, 0, 583, 584, 7, 5, 0, 0, 584, 585, 7, 2, 0, 0, 585, 586, 1, 0, 0, 0, 586, 587, 6, 16, 0, 0, 587, 49, 1, 0, 0, 0, 588, 589, 4, 17, 1, 0, 589, 590, 7, 13, 0, 0, 590, 591, 7, 7, 0, 0, 591, 592, 7, 7, 0, 0, 592, 593, 7, 18, 0, 0, 593, 594, 7, 20, 0, 0, 594, 595, 7, 8, 0, 0, 595, 596, 5, 95, 0, 0, 596, 597, 5, 128020, 0, 0, 597, 598, 1, 0, 0, 0, 598, 599, 6, 17, 8, 0, 599, 51, 1, 0, 0, 0, 600, 601, 4, 18, 2, 0, 601, 602, 7, 16, 0, 0, 602, 603, 7, 3, 0, 0, 603, 604, 7, 5, 0, 0, 604, 605, 7, 6, 0, 0, 605, 606, 7, 1, 0, 0, 606, 607, 7, 4, 0, 0, 607, 608, 7, 2, 0, 0, 608, 609, 1, 0, 0, 0, 609, 610, 6, 18, 9, 0, 610, 53, 1, 0, 0, 0, 611, 612, 4, 19, 3, 0, 612, 613, 7, 21, 0, 0, 613, 614, 7, 7, 0, 0, 614, 615, 7, 1, 0, 0, 615, 616, 7, 9, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 6, 19, 10, 0, 618, 55, 1, 0, 0, 0, 619, 620, 4, 20, 4, 0, 620, 621, 7, 15, 0, 0, 621, 622, 7, 20, 0, 0, 622, 623, 7, 13, 0, 0, 623, 624, 7, 13, 0, 0, 624, 625, 1, 0, 0, 0, 625, 626, 6, 20, 10, 0, 626, 57, 1, 0, 0, 0, 627, 628, 4, 21, 5, 0, 628, 629, 7, 13, 0, 0, 629, 630, 7, 3, 0, 0, 630, 631, 7, 15, 0, 0, 631, 632, 7, 5, 0, 0, 632, 633, 1, 0, 0, 0, 633, 634, 6, 21, 10, 0, 634, 59, 1, 0, 0, 0, 635, 636, 4, 22, 6, 0, 636, 637, 7, 6, 0, 0, 637, 638, 7, 1, 0, 0, 638, 639, 7, 17, 0, 0, 639, 640, 7, 10, 0, 0, 640, 641, 7, 5, 0, 0, 641, 642, 1, 0, 0, 0, 642, 643, 6, 22, 10, 0, 643, 61, 1, 0, 0, 0, 644, 645, 4, 23, 7, 0, 645, 646, 7, 13, 0, 0, 646, 647, 7, 7, 0, 0, 647, 648, 7, 7, 0, 0, 648, 649, 7, 18, 0, 0, 649, 650, 7, 20, 0, 0, 650, 651, 7, 8, 0, 0, 651, 652, 1, 0, 0, 0, 652, 653, 6, 23, 10, 0, 653, 63, 1, 0, 0, 0, 654, 656, 8, 22, 0, 0, 655, 654, 1, 0, 0, 0, 656, 657, 1, 0, 0, 0, 657, 655, 1, 0, 0, 0, 657, 658, 1, 0, 0, 0, 658, 659, 1, 0, 0, 0, 659, 660, 6, 24, 0, 0, 660, 65, 1, 0, 0, 0, 661, 662, 5, 47, 0, 0, 662, 663, 5, 47, 0, 0, 663, 667, 1, 0, 0, 0, 664, 666, 8, 23, 0, 0, 665, 664, 1, 0, 0, 0, 666, 669, 1, 0, 0, 0, 667, 665, 1, 0, 0, 0, 667, 668, 1, 0, 0, 0, 668, 671, 1, 0, 0, 0, 669, 667, 1, 0, 0, 0, 670, 672, 5, 13, 0, 0, 671, 670, 1, 0, 0, 0, 671, 672, 1, 0, 0, 0, 672, 674, 1, 0, 0, 0, 673, 675, 5, 10, 0, 0, 674, 673, 1, 0, 0, 0, 674, 675, 1, 0, 0, 0, 675, 676, 1, 0, 0, 0, 676, 677, 6, 25, 11, 0, 677, 67, 1, 0, 0, 0, 678, 679, 5, 47, 0, 0, 679, 680, 5, 42, 0, 0, 680, 685, 1, 0, 0, 0, 681, 684, 3, 68, 26, 0, 682, 684, 9, 0, 0, 0, 683, 681, 1, 0, 0, 0, 683, 682, 1, 0, 0, 0, 684, 687, 1, 0, 0, 0, 685, 686, 1, 0, 0, 0, 685, 683, 1, 0, 0, 0, 686, 688, 1, 0, 0, 0, 687, 685, 1, 0, 0, 0, 688, 689, 5, 42, 0, 0, 689, 690, 5, 47, 0, 0, 690, 691, 1, 0, 0, 0, 691, 692, 6, 26, 11, 0, 692, 69, 1, 0, 0, 0, 693, 695, 7, 24, 0, 0, 694, 693, 1, 0, 0, 0, 695, 696, 1, 0, 0, 0, 696, 694, 1, 0, 0, 0, 696, 697, 1, 0, 0, 0, 697, 698, 1, 0, 0, 0, 698, 699, 6, 27, 11, 0, 699, 71, 1, 0, 0, 0, 700, 701, 5, 58, 0, 0, 701, 73, 1, 0, 0, 0, 702, 703, 5, 124, 0, 0, 703, 704, 1, 0, 0, 0, 704, 705, 6, 29, 12, 0, 705, 75, 1, 0, 0, 0, 706, 707, 7, 25, 0, 0, 707, 77, 1, 0, 0, 0, 708, 709, 7, 26, 0, 0, 709, 79, 1, 0, 0, 0, 710, 711, 5, 92, 0, 0, 711, 712, 7, 27, 0, 0, 712, 81, 1, 0, 0, 0, 713, 714, 8, 28, 0, 0, 714, 83, 1, 0, 0, 0, 715, 717, 7, 3, 0, 0, 716, 718, 7, 29, 0, 0, 717, 716, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 720, 1, 0, 0, 0, 719, 721, 3, 76, 30, 0, 720, 719, 1, 0, 0, 0, 721, 722, 1, 0, 0, 0, 722, 720, 1, 0, 0, 0, 722, 723, 1, 0, 0, 0, 723, 85, 1, 0, 0, 0, 724, 725, 5, 64, 0, 0, 725, 87, 1, 0, 0, 0, 726, 727, 5, 96, 0, 0, 727, 89, 1, 0, 0, 0, 728, 732, 8, 30, 0, 0, 729, 730, 5, 96, 0, 0, 730, 732, 5, 96, 0, 0, 731, 728, 1, 0, 0, 0, 731, 729, 1, 0, 0, 0, 732, 91, 1, 0, 0, 0, 733, 734, 5, 95, 0, 0, 734, 93, 1, 0, 0, 0, 735, 739, 3, 78, 31, 0, 736, 739, 3, 76, 30, 0, 737, 739, 3, 92, 38, 0, 738, 735, 1, 0, 0, 0, 738, 736, 1, 0, 0, 0, 738, 737, 1, 0, 0, 0, 739, 95, 1, 0, 0, 0, 740, 745, 5, 34, 0, 0, 741, 744, 3, 80, 32, 0, 742, 744, 3, 82, 33, 0, 743, 741, 1, 0, 0, 0, 743, 742, 1, 0, 0, 0, 744, 747, 1, 0, 0, 0, 745, 743, 1, 0, 0, 0, 745, 746, 1, 0, 0, 0, 746, 748, 1, 0, 0, 0, 747, 745, 1, 0, 0, 0, 748, 770, 5, 34, 0, 0, 749, 750, 5, 34, 0, 0, 750, 751, 5, 34, 0, 0, 751, 752, 5, 34, 0, 0, 752, 756, 1, 0, 0, 0, 753, 755, 8, 23, 0, 0, 754, 753, 1, 0, 0, 0, 755, 758, 1, 0, 0, 0, 756, 757, 1, 0, 0, 0, 756, 754, 1, 0, 0, 0, 757, 759, 1, 0, 0, 0, 758, 756, 1, 0, 0, 0, 759, 760, 5, 34, 0, 0, 760, 761, 5, 34, 0, 0, 761, 762, 5, 34, 0, 0, 762, 764, 1, 0, 0, 0, 763, 765, 5, 34, 0, 0, 764, 763, 1, 0, 0, 0, 764, 765, 1, 0, 0, 0, 765, 767, 1, 0, 0, 0, 766, 768, 5, 34, 0, 0, 767, 766, 1, 0, 0, 0, 767, 768, 1, 0, 0, 0, 768, 770, 1, 0, 0, 0, 769, 740, 1, 0, 0, 0, 769, 749, 1, 0, 0, 0, 770, 97, 1, 0, 0, 0, 771, 773, 3, 76, 30, 0, 772, 771, 1, 0, 0, 0, 773, 774, 1, 0, 0, 0, 774, 772, 1, 0, 0, 0, 774, 775, 1, 0, 0, 0, 775, 99, 1, 0, 0, 0, 776, 778, 3, 76, 30, 0, 777, 776, 1, 0, 0, 0, 778, 779, 1, 0, 0, 0, 779, 777, 1, 0, 0, 0, 779, 780, 1, 0, 0, 0, 780, 781, 1, 0, 0, 0, 781, 785, 3, 116, 50, 0, 782, 784, 3, 76, 30, 0, 783, 782, 1, 0, 0, 0, 784, 787, 1, 0, 0, 0, 785, 783, 1, 0, 0, 0, 785, 786, 1, 0, 0, 0, 786, 819, 1, 0, 0, 0, 787, 785, 1, 0, 0, 0, 788, 790, 3, 116, 50, 0, 789, 791, 3, 76, 30, 0, 790, 789, 1, 0, 0, 0, 791, 792, 1, 0, 0, 0, 792, 790, 1, 0, 0, 0, 792, 793, 1, 0, 0, 0, 793, 819, 1, 0, 0, 0, 794, 796, 3, 76, 30, 0, 795, 794, 1, 0, 0, 0, 796, 797, 1, 0, 0, 0, 797, 795, 1, 0, 0, 0, 797, 798, 1, 0, 0, 0, 798, 806, 1, 0, 0, 0, 799, 803, 3, 116, 50, 0, 800, 802, 3, 76, 30, 0, 801, 800, 1, 0, 0, 0, 802, 805, 1, 0, 0, 0, 803, 801, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 807, 1, 0, 0, 0, 805, 803, 1, 0, 0, 0, 806, 799, 1, 0, 0, 0, 806, 807, 1, 0, 0, 0, 807, 808, 1, 0, 0, 0, 808, 809, 3, 84, 34, 0, 809, 819, 1, 0, 0, 0, 810, 812, 3, 116, 50, 0, 811, 813, 3, 76, 30, 0, 812, 811, 1, 0, 0, 0, 813, 814, 1, 0, 0, 0, 814, 812, 1, 0, 0, 0, 814, 815, 1, 0, 0, 0, 815, 816, 1, 0, 0, 0, 816, 817, 3, 84, 34, 0, 817, 819, 1, 0, 0, 0, 818, 777, 1, 0, 0, 0, 818, 788, 1, 0, 0, 0, 818, 795, 1, 0, 0, 0, 818, 810, 1, 0, 0, 0, 819, 101, 1, 0, 0, 0, 820, 821, 7, 31, 0, 0, 821, 822, 7, 32, 0, 0, 822, 103, 1, 0, 0, 0, 823, 824, 7, 12, 0, 0, 824, 825, 7, 9, 0, 0, 825, 826, 7, 0, 0, 0, 826, 105, 1, 0, 0, 0, 827, 828, 7, 12, 0, 0, 828, 829, 7, 2, 0, 0, 829, 830, 7, 4, 0, 0, 830, 107, 1, 0, 0, 0, 831, 832, 5, 61, 0, 0, 832, 109, 1, 0, 0, 0, 833, 834, 5, 58, 0, 0, 834, 835, 5, 58, 0, 0, 835, 111, 1, 0, 0, 0, 836, 837, 5, 44, 0, 0, 837, 113, 1, 0, 0, 0, 838, 839, 7, 0, 0, 0, 839, 840, 7, 3, 0, 0, 840, 841, 7, 2, 0, 0, 841, 842, 7, 4, 0, 0, 842, 115, 1, 0, 0, 0, 843, 844, 5, 46, 0, 0, 844, 117, 1, 0, 0, 0, 845, 846, 7, 15, 0, 0, 846, 847, 7, 12, 0, 0, 847, 848, 7, 13, 0, 0, 848, 849, 7, 2, 0, 0, 849, 850, 7, 3, 0, 0, 850, 119, 1, 0, 0, 0, 851, 852, 7, 15, 0, 0, 852, 853, 7, 1, 0, 0, 853, 854, 7, 6, 0, 0, 854, 855, 7, 2, 0, 0, 855, 856, 7, 5, 0, 0, 856, 121, 1, 0, 0, 0, 857, 858, 7, 1, 0, 0, 858, 859, 7, 9, 0, 0, 859, 123, 1, 0, 0, 0, 860, 861, 7, 1, 0, 0, 861, 862, 7, 2, 0, 0, 862, 125, 1, 0, 0, 0, 863, 864, 7, 13, 0, 0, 864, 865, 7, 12, 0, 0, 865, 866, 7, 2, 0, 0, 866, 867, 7, 5, 0, 0, 867, 127, 1, 0, 0, 0, 868, 869, 7, 13, 0, 0, 869, 870, 7, 1, 0, 0, 870, 871, 7, 18, 0, 0, 871, 872, 7, 3, 0, 0, 872, 129, 1, 0, 0, 0, 873, 874, 5, 40, 0, 0, 874, 131, 1, 0, 0, 0, 875, 876, 7, 9, 0, 0, 876, 877, 7, 7, 0, 0, 877, 878, 7, 5, 0, 0, 878, 133, 1, 0, 0, 0, 879, 880, 7, 9, 0, 0, 880, 881, 7, 20, 0, 0, 881, 882, 7, 13, 0, 0, 882, 883, 7, 13, 0, 0, 883, 135, 1, 0, 0, 0, 884, 885, 7, 9, 0, 0, 885, 886, 7, 20, 0, 0, 886, 887, 7, 13, 0, 0, 887, 888, 7, 13, 0, 0, 888, 889, 7, 2, 0, 0, 889, 137, 1, 0, 0, 0, 890, 891, 7, 7, 0, 0, 891, 892, 7, 6, 0, 0, 892, 139, 1, 0, 0, 0, 893, 894, 5, 63, 0, 0, 894, 141, 1, 0, 0, 0, 895, 896, 7, 6, 0, 0, 896, 897, 7, 13, 0, 0, 897, 898, 7, 1, 0, 0, 898, 899, 7, 18, 0, 0, 899, 900, 7, 3, 0, 0, 900, 143, 1, 0, 0, 0, 901, 902, 5, 41, 0, 0, 902, 145, 1, 0, 0, 0, 903, 904, 7, 5, 0, 0, 904, 905, 7, 6, 0, 0, 905, 906, 7, 20, 0, 0, 906, 907, 7, 3, 0, 0, 907, 147, 1, 0, 0, 0, 908, 909, 5, 61, 0, 0, 909, 910, 5, 61, 0, 0, 910, 149, 1, 0, 0, 0, 911, 912, 5, 61, 0, 0, 912, 913, 5, 126, 0, 0, 913, 151, 1, 0, 0, 0, 914, 915, 5, 33, 0, 0, 915, 916, 5, 61, 0, 0, 916, 153, 1, 0, 0, 0, 917, 918, 5, 60, 0, 0, 918, 155, 1, 0, 0, 0, 919, 920, 5, 60, 0, 0, 920, 921, 5, 61, 0, 0, 921, 157, 1, 0, 0, 0, 922, 923, 5, 62, 0, 0, 923, 159, 1, 0, 0, 0, 924, 925, 5, 62, 0, 0, 925, 926, 5, 61, 0, 0, 926, 161, 1, 0, 0, 0, 927, 928, 5, 43, 0, 0, 928, 163, 1, 0, 0, 0, 929, 930, 5, 45, 0, 0, 930, 165, 1, 0, 0, 0, 931, 932, 5, 42, 0, 0, 932, 167, 1, 0, 0, 0, 933, 934, 5, 47, 0, 0, 934, 169, 1, 0, 0, 0, 935, 936, 5, 37, 0, 0, 936, 171, 1, 0, 0, 0, 937, 938, 4, 78, 8, 0, 938, 939, 3, 72, 28, 0, 939, 940, 1, 0, 0, 0, 940, 941, 6, 78, 13, 0, 941, 173, 1, 0, 0, 0, 942, 943, 3, 46, 15, 0, 943, 944, 1, 0, 0, 0, 944, 945, 6, 79, 14, 0, 945, 175, 1, 0, 0, 0, 946, 949, 3, 140, 62, 0, 947, 950, 3, 78, 31, 0, 948, 950, 3, 92, 38, 0, 949, 947, 1, 0, 0, 0, 949, 948, 1, 0, 0, 0, 950, 954, 1, 0, 0, 0, 951, 953, 3, 94, 39, 0, 952, 951, 1, 0, 0, 0, 953, 956, 1, 0, 0, 0, 954, 952, 1, 0, 0, 0, 954, 955, 1, 0, 0, 0, 955, 964, 1, 0, 0, 0, 956, 954, 1, 0, 0, 0, 957, 959, 3, 140, 62, 0, 958, 960, 3, 76, 30, 0, 959, 958, 1, 0, 0, 0, 960, 961, 1, 0, 0, 0, 961, 959, 1, 0, 0, 0, 961, 962, 1, 0, 0, 0, 962, 964, 1, 0, 0, 0, 963, 946, 1, 0, 0, 0, 963, 957, 1, 0, 0, 0, 964, 177, 1, 0, 0, 0, 965, 966, 5, 91, 0, 0, 966, 967, 1, 0, 0, 0, 967, 968, 6, 81, 0, 0, 968, 969, 6, 81, 0, 0, 969, 179, 1, 0, 0, 0, 970, 971, 5, 93, 0, 0, 971, 972, 1, 0, 0, 0, 972, 973, 6, 82, 12, 0, 973, 974, 6, 82, 12, 0, 974, 181, 1, 0, 0, 0, 975, 979, 3, 78, 31, 0, 976, 978, 3, 94, 39, 0, 977, 976, 1, 0, 0, 0, 978, 981, 1, 0, 0, 0, 979, 977, 1, 0, 0, 0, 979, 980, 1, 0, 0, 0, 980, 992, 1, 0, 0, 0, 981, 979, 1, 0, 0, 0, 982, 985, 3, 92, 38, 0, 983, 985, 3, 86, 35, 0, 984, 982, 1, 0, 0, 0, 984, 983, 1, 0, 0, 0, 985, 987, 1, 0, 0, 0, 986, 988, 3, 94, 39, 0, 987, 986, 1, 0, 0, 0, 988, 989, 1, 0, 0, 0, 989, 987, 1, 0, 0, 0, 989, 990, 1, 0, 0, 0, 990, 992, 1, 0, 0, 0, 991, 975, 1, 0, 0, 0, 991, 984, 1, 0, 0, 0, 992, 183, 1, 0, 0, 0, 993, 995, 3, 88, 36, 0, 994, 996, 3, 90, 37, 0, 995, 994, 1, 0, 0, 0, 996, 997, 1, 0, 0, 0, 997, 995, 1, 0, 0, 0, 997, 998, 1, 0, 0, 0, 998, 999, 1, 0, 0, 0, 999, 1000, 3, 88, 36, 0, 1000, 185, 1, 0, 0, 0, 1001, 1002, 3, 184, 84, 0, 1002, 187, 1, 0, 0, 0, 1003, 1004, 3, 66, 25, 0, 1004, 1005, 1, 0, 0, 0, 1005, 1006, 6, 86, 11, 0, 1006, 189, 1, 0, 0, 0, 1007, 1008, 3, 68, 26, 0, 1008, 1009, 1, 0, 0, 0, 1009, 1010, 6, 87, 11, 0, 1010, 191, 1, 0, 0, 0, 1011, 1012, 3, 70, 27, 0, 1012, 1013, 1, 0, 0, 0, 1013, 1014, 6, 88, 11, 0, 1014, 193, 1, 0, 0, 0, 1015, 1016, 3, 178, 81, 0, 1016, 1017, 1, 0, 0, 0, 1017, 1018, 6, 89, 15, 0, 1018, 1019, 6, 89, 16, 0, 1019, 195, 1, 0, 0, 0, 1020, 1021, 3, 74, 29, 0, 1021, 1022, 1, 0, 0, 0, 1022, 1023, 6, 90, 17, 0, 1023, 1024, 6, 90, 12, 0, 1024, 197, 1, 0, 0, 0, 1025, 1026, 3, 70, 27, 0, 1026, 1027, 1, 0, 0, 0, 1027, 1028, 6, 91, 11, 0, 1028, 199, 1, 0, 0, 0, 1029, 1030, 3, 66, 25, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1032, 6, 92, 11, 0, 1032, 201, 1, 0, 0, 0, 1033, 1034, 3, 68, 26, 0, 1034, 1035, 1, 0, 0, 0, 1035, 1036, 6, 93, 11, 0, 1036, 203, 1, 0, 0, 0, 1037, 1038, 3, 74, 29, 0, 1038, 1039, 1, 0, 0, 0, 1039, 1040, 6, 94, 17, 0, 1040, 1041, 6, 94, 12, 0, 1041, 205, 1, 0, 0, 0, 1042, 1043, 3, 178, 81, 0, 1043, 1044, 1, 0, 0, 0, 1044, 1045, 6, 95, 15, 0, 1045, 207, 1, 0, 0, 0, 1046, 1047, 3, 180, 82, 0, 1047, 1048, 1, 0, 0, 0, 1048, 1049, 6, 96, 18, 0, 1049, 209, 1, 0, 0, 0, 1050, 1051, 3, 72, 28, 0, 1051, 1052, 1, 0, 0, 0, 1052, 1053, 6, 97, 13, 0, 1053, 211, 1, 0, 0, 0, 1054, 1055, 3, 112, 48, 0, 1055, 1056, 1, 0, 0, 0, 1056, 1057, 6, 98, 19, 0, 1057, 213, 1, 0, 0, 0, 1058, 1059, 3, 108, 46, 0, 1059, 1060, 1, 0, 0, 0, 1060, 1061, 6, 99, 20, 0, 1061, 215, 1, 0, 0, 0, 1062, 1063, 7, 16, 0, 0, 1063, 1064, 7, 3, 0, 0, 1064, 1065, 7, 5, 0, 0, 1065, 1066, 7, 12, 0, 0, 1066, 1067, 7, 0, 0, 0, 1067, 1068, 7, 12, 0, 0, 1068, 1069, 7, 5, 0, 0, 1069, 1070, 7, 12, 0, 0, 1070, 217, 1, 0, 0, 0, 1071, 1075, 8, 33, 0, 0, 1072, 1073, 5, 47, 0, 0, 1073, 1075, 8, 34, 0, 0, 1074, 1071, 1, 0, 0, 0, 1074, 1072, 1, 0, 0, 0, 1075, 219, 1, 0, 0, 0, 1076, 1078, 3, 218, 101, 0, 1077, 1076, 1, 0, 0, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1077, 1, 0, 0, 0, 1079, 1080, 1, 0, 0, 0, 1080, 221, 1, 0, 0, 0, 1081, 1082, 3, 220, 102, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 103, 21, 0, 1084, 223, 1, 0, 0, 0, 1085, 1086, 3, 96, 40, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 104, 22, 0, 1088, 225, 1, 0, 0, 0, 1089, 1090, 3, 66, 25, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 105, 11, 0, 1092, 227, 1, 0, 0, 0, 1093, 1094, 3, 68, 26, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 106, 11, 0, 1096, 229, 1, 0, 0, 0, 1097, 1098, 3, 70, 27, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 6, 107, 11, 0, 1100, 231, 1, 0, 0, 0, 1101, 1102, 3, 74, 29, 0, 1102, 1103, 1, 0, 0, 0, 1103, 1104, 6, 108, 17, 0, 1104, 1105, 6, 108, 12, 0, 1105, 233, 1, 0, 0, 0, 1106, 1107, 3, 116, 50, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 6, 109, 23, 0, 1109, 235, 1, 0, 0, 0, 1110, 1111, 3, 112, 48, 0, 1111, 1112, 1, 0, 0, 0, 1112, 1113, 6, 110, 19, 0, 1113, 237, 1, 0, 0, 0, 1114, 1115, 4, 111, 9, 0, 1115, 1116, 3, 140, 62, 0, 1116, 1117, 1, 0, 0, 0, 1117, 1118, 6, 111, 24, 0, 1118, 239, 1, 0, 0, 0, 1119, 1120, 4, 112, 10, 0, 1120, 1121, 3, 176, 80, 0, 1121, 1122, 1, 0, 0, 0, 1122, 1123, 6, 112, 25, 0, 1123, 241, 1, 0, 0, 0, 1124, 1129, 3, 78, 31, 0, 1125, 1129, 3, 76, 30, 0, 1126, 1129, 3, 92, 38, 0, 1127, 1129, 3, 166, 75, 0, 1128, 1124, 1, 0, 0, 0, 1128, 1125, 1, 0, 0, 0, 1128, 1126, 1, 0, 0, 0, 1128, 1127, 1, 0, 0, 0, 1129, 243, 1, 0, 0, 0, 1130, 1133, 3, 78, 31, 0, 1131, 1133, 3, 166, 75, 0, 1132, 1130, 1, 0, 0, 0, 1132, 1131, 1, 0, 0, 0, 1133, 1137, 1, 0, 0, 0, 1134, 1136, 3, 242, 113, 0, 1135, 1134, 1, 0, 0, 0, 1136, 1139, 1, 0, 0, 0, 1137, 1135, 1, 0, 0, 0, 1137, 1138, 1, 0, 0, 0, 1138, 1150, 1, 0, 0, 0, 1139, 1137, 1, 0, 0, 0, 1140, 1143, 3, 92, 38, 0, 1141, 1143, 3, 86, 35, 0, 1142, 1140, 1, 0, 0, 0, 1142, 1141, 1, 0, 0, 0, 1143, 1145, 1, 0, 0, 0, 1144, 1146, 3, 242, 113, 0, 1145, 1144, 1, 0, 0, 0, 1146, 1147, 1, 0, 0, 0, 1147, 1145, 1, 0, 0, 0, 1147, 1148, 1, 0, 0, 0, 1148, 1150, 1, 0, 0, 0, 1149, 1132, 1, 0, 0, 0, 1149, 1142, 1, 0, 0, 0, 1150, 245, 1, 0, 0, 0, 1151, 1154, 3, 244, 114, 0, 1152, 1154, 3, 184, 84, 0, 1153, 1151, 1, 0, 0, 0, 1153, 1152, 1, 0, 0, 0, 1154, 1155, 1, 0, 0, 0, 1155, 1153, 1, 0, 0, 0, 1155, 1156, 1, 0, 0, 0, 1156, 247, 1, 0, 0, 0, 1157, 1158, 3, 66, 25, 0, 1158, 1159, 1, 0, 0, 0, 1159, 1160, 6, 116, 11, 0, 1160, 249, 1, 0, 0, 0, 1161, 1162, 3, 68, 26, 0, 1162, 1163, 1, 0, 0, 0, 1163, 1164, 6, 117, 11, 0, 1164, 251, 1, 0, 0, 0, 1165, 1166, 3, 70, 27, 0, 1166, 1167, 1, 0, 0, 0, 1167, 1168, 6, 118, 11, 0, 1168, 253, 1, 0, 0, 0, 1169, 1170, 3, 74, 29, 0, 1170, 1171, 1, 0, 0, 0, 1171, 1172, 6, 119, 17, 0, 1172, 1173, 6, 119, 12, 0, 1173, 255, 1, 0, 0, 0, 1174, 1175, 3, 108, 46, 0, 1175, 1176, 1, 0, 0, 0, 1176, 1177, 6, 120, 20, 0, 1177, 257, 1, 0, 0, 0, 1178, 1179, 3, 112, 48, 0, 1179, 1180, 1, 0, 0, 0, 1180, 1181, 6, 121, 19, 0, 1181, 259, 1, 0, 0, 0, 1182, 1183, 3, 116, 50, 0, 1183, 1184, 1, 0, 0, 0, 1184, 1185, 6, 122, 23, 0, 1185, 261, 1, 0, 0, 0, 1186, 1187, 4, 123, 11, 0, 1187, 1188, 3, 140, 62, 0, 1188, 1189, 1, 0, 0, 0, 1189, 1190, 6, 123, 24, 0, 1190, 263, 1, 0, 0, 0, 1191, 1192, 4, 124, 12, 0, 1192, 1193, 3, 176, 80, 0, 1193, 1194, 1, 0, 0, 0, 1194, 1195, 6, 124, 25, 0, 1195, 265, 1, 0, 0, 0, 1196, 1197, 7, 12, 0, 0, 1197, 1198, 7, 2, 0, 0, 1198, 267, 1, 0, 0, 0, 1199, 1200, 3, 246, 115, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 6, 126, 26, 0, 1202, 269, 1, 0, 0, 0, 1203, 1204, 3, 66, 25, 0, 1204, 1205, 1, 0, 0, 0, 1205, 1206, 6, 127, 11, 0, 1206, 271, 1, 0, 0, 0, 1207, 1208, 3, 68, 26, 0, 1208, 1209, 1, 0, 0, 0, 1209, 1210, 6, 128, 11, 0, 1210, 273, 1, 0, 0, 0, 1211, 1212, 3, 70, 27, 0, 1212, 1213, 1, 0, 0, 0, 1213, 1214, 6, 129, 11, 0, 1214, 275, 1, 0, 0, 0, 1215, 1216, 3, 74, 29, 0, 1216, 1217, 1, 0, 0, 0, 1217, 1218, 6, 130, 17, 0, 1218, 1219, 6, 130, 12, 0, 1219, 277, 1, 0, 0, 0, 1220, 1221, 3, 178, 81, 0, 1221, 1222, 1, 0, 0, 0, 1222, 1223, 6, 131, 15, 0, 1223, 1224, 6, 131, 27, 0, 1224, 279, 1, 0, 0, 0, 1225, 1226, 7, 7, 0, 0, 1226, 1227, 7, 9, 0, 0, 1227, 1228, 1, 0, 0, 0, 1228, 1229, 6, 132, 28, 0, 1229, 281, 1, 0, 0, 0, 1230, 1231, 7, 19, 0, 0, 1231, 1232, 7, 1, 0, 0, 1232, 1233, 7, 5, 0, 0, 1233, 1234, 7, 10, 0, 0, 1234, 1235, 1, 0, 0, 0, 1235, 1236, 6, 133, 28, 0, 1236, 283, 1, 0, 0, 0, 1237, 1238, 8, 35, 0, 0, 1238, 285, 1, 0, 0, 0, 1239, 1241, 3, 284, 134, 0, 1240, 1239, 1, 0, 0, 0, 1241, 1242, 1, 0, 0, 0, 1242, 1240, 1, 0, 0, 0, 1242, 1243, 1, 0, 0, 0, 1243, 1244, 1, 0, 0, 0, 1244, 1245, 3, 72, 28, 0, 1245, 1247, 1, 0, 0, 0, 1246, 1240, 1, 0, 0, 0, 1246, 1247, 1, 0, 0, 0, 1247, 1249, 1, 0, 0, 0, 1248, 1250, 3, 284, 134, 0, 1249, 1248, 1, 0, 0, 0, 1250, 1251, 1, 0, 0, 0, 1251, 1249, 1, 0, 0, 0, 1251, 1252, 1, 0, 0, 0, 1252, 287, 1, 0, 0, 0, 1253, 1254, 3, 286, 135, 0, 1254, 1255, 1, 0, 0, 0, 1255, 1256, 6, 136, 29, 0, 1256, 289, 1, 0, 0, 0, 1257, 1258, 3, 66, 25, 0, 1258, 1259, 1, 0, 0, 0, 1259, 1260, 6, 137, 11, 0, 1260, 291, 1, 0, 0, 0, 1261, 1262, 3, 68, 26, 0, 1262, 1263, 1, 0, 0, 0, 1263, 1264, 6, 138, 11, 0, 1264, 293, 1, 0, 0, 0, 1265, 1266, 3, 70, 27, 0, 1266, 1267, 1, 0, 0, 0, 1267, 1268, 6, 139, 11, 0, 1268, 295, 1, 0, 0, 0, 1269, 1270, 3, 74, 29, 0, 1270, 1271, 1, 0, 0, 0, 1271, 1272, 6, 140, 17, 0, 1272, 1273, 6, 140, 12, 0, 1273, 1274, 6, 140, 12, 0, 1274, 297, 1, 0, 0, 0, 1275, 1276, 3, 108, 46, 0, 1276, 1277, 1, 0, 0, 0, 1277, 1278, 6, 141, 20, 0, 1278, 299, 1, 0, 0, 0, 1279, 1280, 3, 112, 48, 0, 1280, 1281, 1, 0, 0, 0, 1281, 1282, 6, 142, 19, 0, 1282, 301, 1, 0, 0, 0, 1283, 1284, 3, 116, 50, 0, 1284, 1285, 1, 0, 0, 0, 1285, 1286, 6, 143, 23, 0, 1286, 303, 1, 0, 0, 0, 1287, 1288, 3, 282, 133, 0, 1288, 1289, 1, 0, 0, 0, 1289, 1290, 6, 144, 30, 0, 1290, 305, 1, 0, 0, 0, 1291, 1292, 3, 246, 115, 0, 1292, 1293, 1, 0, 0, 0, 1293, 1294, 6, 145, 26, 0, 1294, 307, 1, 0, 0, 0, 1295, 1296, 3, 186, 85, 0, 1296, 1297, 1, 0, 0, 0, 1297, 1298, 6, 146, 31, 0, 1298, 309, 1, 0, 0, 0, 1299, 1300, 4, 147, 13, 0, 1300, 1301, 3, 140, 62, 0, 1301, 1302, 1, 0, 0, 0, 1302, 1303, 6, 147, 24, 0, 1303, 311, 1, 0, 0, 0, 1304, 1305, 4, 148, 14, 0, 1305, 1306, 3, 176, 80, 0, 1306, 1307, 1, 0, 0, 0, 1307, 1308, 6, 148, 25, 0, 1308, 313, 1, 0, 0, 0, 1309, 1310, 3, 66, 25, 0, 1310, 1311, 1, 0, 0, 0, 1311, 1312, 6, 149, 11, 0, 1312, 315, 1, 0, 0, 0, 1313, 1314, 3, 68, 26, 0, 1314, 1315, 1, 0, 0, 0, 1315, 1316, 6, 150, 11, 0, 1316, 317, 1, 0, 0, 0, 1317, 1318, 3, 70, 27, 0, 1318, 1319, 1, 0, 0, 0, 1319, 1320, 6, 151, 11, 0, 1320, 319, 1, 0, 0, 0, 1321, 1322, 3, 74, 29, 0, 1322, 1323, 1, 0, 0, 0, 1323, 1324, 6, 152, 17, 0, 1324, 1325, 6, 152, 12, 0, 1325, 321, 1, 0, 0, 0, 1326, 1327, 3, 116, 50, 0, 1327, 1328, 1, 0, 0, 0, 1328, 1329, 6, 153, 23, 0, 1329, 323, 1, 0, 0, 0, 1330, 1331, 4, 154, 15, 0, 1331, 1332, 3, 140, 62, 0, 1332, 1333, 1, 0, 0, 0, 1333, 1334, 6, 154, 24, 0, 1334, 325, 1, 0, 0, 0, 1335, 1336, 4, 155, 16, 0, 1336, 1337, 3, 176, 80, 0, 1337, 1338, 1, 0, 0, 0, 1338, 1339, 6, 155, 25, 0, 1339, 327, 1, 0, 0, 0, 1340, 1341, 3, 186, 85, 0, 1341, 1342, 1, 0, 0, 0, 1342, 1343, 6, 156, 31, 0, 1343, 329, 1, 0, 0, 0, 1344, 1345, 3, 182, 83, 0, 1345, 1346, 1, 0, 0, 0, 1346, 1347, 6, 157, 32, 0, 1347, 331, 1, 0, 0, 0, 1348, 1349, 3, 66, 25, 0, 1349, 1350, 1, 0, 0, 0, 1350, 1351, 6, 158, 11, 0, 1351, 333, 1, 0, 0, 0, 1352, 1353, 3, 68, 26, 0, 1353, 1354, 1, 0, 0, 0, 1354, 1355, 6, 159, 11, 0, 1355, 335, 1, 0, 0, 0, 1356, 1357, 3, 70, 27, 0, 1357, 1358, 1, 0, 0, 0, 1358, 1359, 6, 160, 11, 0, 1359, 337, 1, 0, 0, 0, 1360, 1361, 3, 74, 29, 0, 1361, 1362, 1, 0, 0, 0, 1362, 1363, 6, 161, 17, 0, 1363, 1364, 6, 161, 12, 0, 1364, 339, 1, 0, 0, 0, 1365, 1366, 7, 1, 0, 0, 1366, 1367, 7, 9, 0, 0, 1367, 1368, 7, 15, 0, 0, 1368, 1369, 7, 7, 0, 0, 1369, 341, 1, 0, 0, 0, 1370, 1371, 3, 66, 25, 0, 1371, 1372, 1, 0, 0, 0, 1372, 1373, 6, 163, 11, 0, 1373, 343, 1, 0, 0, 0, 1374, 1375, 3, 68, 26, 0, 1375, 1376, 1, 0, 0, 0, 1376, 1377, 6, 164, 11, 0, 1377, 345, 1, 0, 0, 0, 1378, 1379, 3, 70, 27, 0, 1379, 1380, 1, 0, 0, 0, 1380, 1381, 6, 165, 11, 0, 1381, 347, 1, 0, 0, 0, 1382, 1383, 3, 180, 82, 0, 1383, 1384, 1, 0, 0, 0, 1384, 1385, 6, 166, 18, 0, 1385, 1386, 6, 166, 12, 0, 1386, 349, 1, 0, 0, 0, 1387, 1388, 3, 72, 28, 0, 1388, 1389, 1, 0, 0, 0, 1389, 1390, 6, 167, 13, 0, 1390, 351, 1, 0, 0, 0, 1391, 1397, 3, 86, 35, 0, 1392, 1397, 3, 76, 30, 0, 1393, 1397, 3, 116, 50, 0, 1394, 1397, 3, 78, 31, 0, 1395, 1397, 3, 92, 38, 0, 1396, 1391, 1, 0, 0, 0, 1396, 1392, 1, 0, 0, 0, 1396, 1393, 1, 0, 0, 0, 1396, 1394, 1, 0, 0, 0, 1396, 1395, 1, 0, 0, 0, 1397, 1398, 1, 0, 0, 0, 1398, 1396, 1, 0, 0, 0, 1398, 1399, 1, 0, 0, 0, 1399, 353, 1, 0, 0, 0, 1400, 1401, 3, 66, 25, 0, 1401, 1402, 1, 0, 0, 0, 1402, 1403, 6, 169, 11, 0, 1403, 355, 1, 0, 0, 0, 1404, 1405, 3, 68, 26, 0, 1405, 1406, 1, 0, 0, 0, 1406, 1407, 6, 170, 11, 0, 1407, 357, 1, 0, 0, 0, 1408, 1409, 3, 70, 27, 0, 1409, 1410, 1, 0, 0, 0, 1410, 1411, 6, 171, 11, 0, 1411, 359, 1, 0, 0, 0, 1412, 1413, 3, 74, 29, 0, 1413, 1414, 1, 0, 0, 0, 1414, 1415, 6, 172, 17, 0, 1415, 1416, 6, 172, 12, 0, 1416, 361, 1, 0, 0, 0, 1417, 1418, 3, 72, 28, 0, 1418, 1419, 1, 0, 0, 0, 1419, 1420, 6, 173, 13, 0, 1420, 363, 1, 0, 0, 0, 1421, 1422, 3, 112, 48, 0, 1422, 1423, 1, 0, 0, 0, 1423, 1424, 6, 174, 19, 0, 1424, 365, 1, 0, 0, 0, 1425, 1426, 3, 116, 50, 0, 1426, 1427, 1, 0, 0, 0, 1427, 1428, 6, 175, 23, 0, 1428, 367, 1, 0, 0, 0, 1429, 1430, 3, 280, 132, 0, 1430, 1431, 1, 0, 0, 0, 1431, 1432, 6, 176, 33, 0, 1432, 1433, 6, 176, 34, 0, 1433, 369, 1, 0, 0, 0, 1434, 1435, 3, 220, 102, 0, 1435, 1436, 1, 0, 0, 0, 1436, 1437, 6, 177, 21, 0, 1437, 371, 1, 0, 0, 0, 1438, 1439, 3, 96, 40, 0, 1439, 1440, 1, 0, 0, 0, 1440, 1441, 6, 178, 22, 0, 1441, 373, 1, 0, 0, 0, 1442, 1443, 3, 66, 25, 0, 1443, 1444, 1, 0, 0, 0, 1444, 1445, 6, 179, 11, 0, 1445, 375, 1, 0, 0, 0, 1446, 1447, 3, 68, 26, 0, 1447, 1448, 1, 0, 0, 0, 1448, 1449, 6, 180, 11, 0, 1449, 377, 1, 0, 0, 0, 1450, 1451, 3, 70, 27, 0, 1451, 1452, 1, 0, 0, 0, 1452, 1453, 6, 181, 11, 0, 1453, 379, 1, 0, 0, 0, 1454, 1455, 3, 74, 29, 0, 1455, 1456, 1, 0, 0, 0, 1456, 1457, 6, 182, 17, 0, 1457, 1458, 6, 182, 12, 0, 1458, 1459, 6, 182, 12, 0, 1459, 381, 1, 0, 0, 0, 1460, 1461, 3, 112, 48, 0, 1461, 1462, 1, 0, 0, 0, 1462, 1463, 6, 183, 19, 0, 1463, 383, 1, 0, 0, 0, 1464, 1465, 3, 116, 50, 0, 1465, 1466, 1, 0, 0, 0, 1466, 1467, 6, 184, 23, 0, 1467, 385, 1, 0, 0, 0, 1468, 1469, 3, 246, 115, 0, 1469, 1470, 1, 0, 0, 0, 1470, 1471, 6, 185, 26, 0, 1471, 387, 1, 0, 0, 0, 1472, 1473, 3, 66, 25, 0, 1473, 1474, 1, 0, 0, 0, 1474, 1475, 6, 186, 11, 0, 1475, 389, 1, 0, 0, 0, 1476, 1477, 3, 68, 26, 0, 1477, 1478, 1, 0, 0, 0, 1478, 1479, 6, 187, 11, 0, 1479, 391, 1, 0, 0, 0, 1480, 1481, 3, 70, 27, 0, 1481, 1482, 1, 0, 0, 0, 1482, 1483, 6, 188, 11, 0, 1483, 393, 1, 0, 0, 0, 1484, 1485, 3, 74, 29, 0, 1485, 1486, 1, 0, 0, 0, 1486, 1487, 6, 189, 17, 0, 1487, 1488, 6, 189, 12, 0, 1488, 395, 1, 0, 0, 0, 1489, 1490, 3, 54, 19, 0, 1490, 1491, 1, 0, 0, 0, 1491, 1492, 6, 190, 35, 0, 1492, 397, 1, 0, 0, 0, 1493, 1494, 3, 266, 125, 0, 1494, 1495, 1, 0, 0, 0, 1495, 1496, 6, 191, 36, 0, 1496, 399, 1, 0, 0, 0, 1497, 1498, 3, 280, 132, 0, 1498, 1499, 1, 0, 0, 0, 1499, 1500, 6, 192, 33, 0, 1500, 1501, 6, 192, 12, 0, 1501, 1502, 6, 192, 0, 0, 1502, 401, 1, 0, 0, 0, 1503, 1504, 7, 20, 0, 0, 1504, 1505, 7, 2, 0, 0, 1505, 1506, 7, 1, 0, 0, 1506, 1507, 7, 9, 0, 0, 1507, 1508, 7, 17, 0, 0, 1508, 1509, 1, 0, 0, 0, 1509, 1510, 6, 193, 12, 0, 1510, 1511, 6, 193, 0, 0, 1511, 403, 1, 0, 0, 0, 1512, 1513, 3, 182, 83, 0, 1513, 1514, 1, 0, 0, 0, 1514, 1515, 6, 194, 32, 0, 1515, 405, 1, 0, 0, 0, 1516, 1517, 3, 186, 85, 0, 1517, 1518, 1, 0, 0, 0, 1518, 1519, 6, 195, 31, 0, 1519, 407, 1, 0, 0, 0, 1520, 1521, 3, 66, 25, 0, 1521, 1522, 1, 0, 0, 0, 1522, 1523, 6, 196, 11, 0, 1523, 409, 1, 0, 0, 0, 1524, 1525, 3, 68, 26, 0, 1525, 1526, 1, 0, 0, 0, 1526, 1527, 6, 197, 11, 0, 1527, 411, 1, 0, 0, 0, 1528, 1529, 3, 70, 27, 0, 1529, 1530, 1, 0, 0, 0, 1530, 1531, 6, 198, 11, 0, 1531, 413, 1, 0, 0, 0, 1532, 1533, 3, 74, 29, 0, 1533, 1534, 1, 0, 0, 0, 1534, 1535, 6, 199, 17, 0, 1535, 1536, 6, 199, 12, 0, 1536, 415, 1, 0, 0, 0, 1537, 1538, 3, 220, 102, 0, 1538, 1539, 1, 0, 0, 0, 1539, 1540, 6, 200, 21, 0, 1540, 1541, 6, 200, 12, 0, 1541, 1542, 6, 200, 37, 0, 1542, 417, 1, 0, 0, 0, 1543, 1544, 3, 96, 40, 0, 1544, 1545, 1, 0, 0, 0, 1545, 1546, 6, 201, 22, 0, 1546, 1547, 6, 201, 12, 0, 1547, 1548, 6, 201, 37, 0, 1548, 419, 1, 0, 0, 0, 1549, 1550, 3, 66, 25, 0, 1550, 1551, 1, 0, 0, 0, 1551, 1552, 6, 202, 11, 0, 1552, 421, 1, 0, 0, 0, 1553, 1554, 3, 68, 26, 0, 1554, 1555, 1, 0, 0, 0, 1555, 1556, 6, 203, 11, 0, 1556, 423, 1, 0, 0, 0, 1557, 1558, 3, 70, 27, 0, 1558, 1559, 1, 0, 0, 0, 1559, 1560, 6, 204, 11, 0, 1560, 425, 1, 0, 0, 0, 1561, 1562, 3, 72, 28, 0, 1562, 1563, 1, 0, 0, 0, 1563, 1564, 6, 205, 13, 0, 1564, 1565, 6, 205, 12, 0, 1565, 1566, 6, 205, 9, 0, 1566, 427, 1, 0, 0, 0, 1567, 1568, 3, 112, 48, 0, 1568, 1569, 1, 0, 0, 0, 1569, 1570, 6, 206, 19, 0, 1570, 1571, 6, 206, 12, 0, 1571, 1572, 6, 206, 9, 0, 1572, 429, 1, 0, 0, 0, 1573, 1574, 3, 66, 25, 0, 1574, 1575, 1, 0, 0, 0, 1575, 1576, 6, 207, 11, 0, 1576, 431, 1, 0, 0, 0, 1577, 1578, 3, 68, 26, 0, 1578, 1579, 1, 0, 0, 0, 1579, 1580, 6, 208, 11, 0, 1580, 433, 1, 0, 0, 0, 1581, 1582, 3, 70, 27, 0, 1582, 1583, 1, 0, 0, 0, 1583, 1584, 6, 209, 11, 0, 1584, 435, 1, 0, 0, 0, 1585, 1586, 3, 186, 85, 0, 1586, 1587, 1, 0, 0, 0, 1587, 1588, 6, 210, 12, 0, 1588, 1589, 6, 210, 0, 0, 1589, 1590, 6, 210, 31, 0, 1590, 437, 1, 0, 0, 0, 1591, 1592, 3, 182, 83, 0, 1592, 1593, 1, 0, 0, 0, 1593, 1594, 6, 211, 12, 0, 1594, 1595, 6, 211, 0, 0, 1595, 1596, 6, 211, 32, 0, 1596, 439, 1, 0, 0, 0, 1597, 1598, 3, 102, 43, 0, 1598, 1599, 1, 0, 0, 0, 1599, 1600, 6, 212, 12, 0, 1600, 1601, 6, 212, 0, 0, 1601, 1602, 6, 212, 38, 0, 1602, 441, 1, 0, 0, 0, 1603, 1604, 3, 74, 29, 0, 1604, 1605, 1, 0, 0, 0, 1605, 1606, 6, 213, 17, 0, 1606, 1607, 6, 213, 12, 0, 1607, 443, 1, 0, 0, 0, 66, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 657, 667, 671, 674, 683, 685, 696, 717, 722, 731, 738, 743, 745, 756, 764, 767, 769, 774, 779, 785, 792, 797, 803, 806, 814, 818, 949, 954, 961, 963, 979, 984, 989, 991, 997, 1074, 1079, 1128, 1132, 1137, 1142, 1147, 1149, 1153, 1155, 1242, 1246, 1251, 1396, 1398, 39, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 14, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 29, 0, 7, 16, 0, 7, 70, 0, 5, 0, 0, 7, 30, 0, 7, 71, 0, 7, 39, 0, 7, 37, 0, 7, 81, 0, 7, 31, 0, 7, 41, 0, 7, 53, 0, 7, 69, 0, 7, 85, 0, 5, 10, 0, 5, 7, 0, 7, 95, 0, 7, 94, 0, 7, 73, 0, 7, 72, 0, 7, 93, 0, 5, 12, 0, 7, 20, 0, 7, 89, 0, 5, 15, 0, 7, 34, 0] \ No newline at end of file +[4, 0, 128, 1601, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 4, 24, 654, 8, 24, 11, 24, 12, 24, 655, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 664, 8, 25, 10, 25, 12, 25, 667, 9, 25, 1, 25, 3, 25, 670, 8, 25, 1, 25, 3, 25, 673, 8, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 682, 8, 26, 10, 26, 12, 26, 685, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 4, 27, 693, 8, 27, 11, 27, 12, 27, 694, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 714, 8, 33, 1, 33, 4, 33, 717, 8, 33, 11, 33, 12, 33, 718, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 3, 36, 728, 8, 36, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 3, 38, 735, 8, 38, 1, 39, 1, 39, 1, 39, 5, 39, 740, 8, 39, 10, 39, 12, 39, 743, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 751, 8, 39, 10, 39, 12, 39, 754, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 761, 8, 39, 1, 39, 3, 39, 764, 8, 39, 3, 39, 766, 8, 39, 1, 40, 4, 40, 769, 8, 40, 11, 40, 12, 40, 770, 1, 41, 4, 41, 774, 8, 41, 11, 41, 12, 41, 775, 1, 41, 1, 41, 5, 41, 780, 8, 41, 10, 41, 12, 41, 783, 9, 41, 1, 41, 1, 41, 4, 41, 787, 8, 41, 11, 41, 12, 41, 788, 1, 41, 4, 41, 792, 8, 41, 11, 41, 12, 41, 793, 1, 41, 1, 41, 5, 41, 798, 8, 41, 10, 41, 12, 41, 801, 9, 41, 3, 41, 803, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 4, 41, 809, 8, 41, 11, 41, 12, 41, 810, 1, 41, 1, 41, 3, 41, 815, 8, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 3, 79, 943, 8, 79, 1, 79, 5, 79, 946, 8, 79, 10, 79, 12, 79, 949, 9, 79, 1, 79, 1, 79, 4, 79, 953, 8, 79, 11, 79, 12, 79, 954, 3, 79, 957, 8, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 5, 82, 971, 8, 82, 10, 82, 12, 82, 974, 9, 82, 1, 82, 1, 82, 3, 82, 978, 8, 82, 1, 82, 4, 82, 981, 8, 82, 11, 82, 12, 82, 982, 3, 82, 985, 8, 82, 1, 83, 1, 83, 4, 83, 989, 8, 83, 11, 83, 12, 83, 990, 1, 83, 1, 83, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 3, 100, 1068, 8, 100, 1, 101, 4, 101, 1071, 8, 101, 11, 101, 12, 101, 1072, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 3, 112, 1122, 8, 112, 1, 113, 1, 113, 3, 113, 1126, 8, 113, 1, 113, 5, 113, 1129, 8, 113, 10, 113, 12, 113, 1132, 9, 113, 1, 113, 1, 113, 3, 113, 1136, 8, 113, 1, 113, 4, 113, 1139, 8, 113, 11, 113, 12, 113, 1140, 3, 113, 1143, 8, 113, 1, 114, 1, 114, 4, 114, 1147, 8, 114, 11, 114, 12, 114, 1148, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 134, 4, 134, 1234, 8, 134, 11, 134, 12, 134, 1235, 1, 134, 1, 134, 3, 134, 1240, 8, 134, 1, 134, 4, 134, 1243, 8, 134, 11, 134, 12, 134, 1244, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 4, 167, 1390, 8, 167, 11, 167, 12, 167, 1391, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 212, 2, 683, 752, 0, 213, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 25, 66, 26, 68, 27, 70, 28, 72, 29, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 68, 172, 0, 174, 69, 176, 70, 178, 71, 180, 72, 182, 0, 184, 73, 186, 74, 188, 75, 190, 76, 192, 0, 194, 0, 196, 77, 198, 78, 200, 79, 202, 0, 204, 0, 206, 0, 208, 0, 210, 0, 212, 0, 214, 80, 216, 0, 218, 81, 220, 0, 222, 0, 224, 82, 226, 83, 228, 84, 230, 0, 232, 0, 234, 0, 236, 0, 238, 0, 240, 0, 242, 0, 244, 85, 246, 86, 248, 87, 250, 88, 252, 0, 254, 0, 256, 0, 258, 0, 260, 0, 262, 0, 264, 89, 266, 0, 268, 90, 270, 91, 272, 92, 274, 0, 276, 0, 278, 93, 280, 94, 282, 0, 284, 95, 286, 0, 288, 96, 290, 97, 292, 98, 294, 0, 296, 0, 298, 0, 300, 0, 302, 0, 304, 0, 306, 0, 308, 0, 310, 0, 312, 99, 314, 100, 316, 101, 318, 0, 320, 0, 322, 0, 324, 0, 326, 0, 328, 0, 330, 102, 332, 103, 334, 104, 336, 0, 338, 105, 340, 106, 342, 107, 344, 108, 346, 0, 348, 0, 350, 109, 352, 110, 354, 111, 356, 112, 358, 0, 360, 0, 362, 0, 364, 0, 366, 0, 368, 0, 370, 0, 372, 113, 374, 114, 376, 115, 378, 0, 380, 0, 382, 0, 384, 0, 386, 116, 388, 117, 390, 118, 392, 0, 394, 0, 396, 0, 398, 0, 400, 119, 402, 0, 404, 0, 406, 120, 408, 121, 410, 122, 412, 0, 414, 0, 416, 0, 418, 123, 420, 124, 422, 125, 424, 0, 426, 0, 428, 126, 430, 127, 432, 128, 434, 0, 436, 0, 438, 0, 440, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 2, 0, 74, 74, 106, 106, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1628, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 64, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 0, 68, 1, 0, 0, 0, 0, 70, 1, 0, 0, 0, 1, 72, 1, 0, 0, 0, 1, 94, 1, 0, 0, 0, 1, 96, 1, 0, 0, 0, 1, 98, 1, 0, 0, 0, 1, 100, 1, 0, 0, 0, 1, 102, 1, 0, 0, 0, 1, 104, 1, 0, 0, 0, 1, 106, 1, 0, 0, 0, 1, 108, 1, 0, 0, 0, 1, 110, 1, 0, 0, 0, 1, 112, 1, 0, 0, 0, 1, 114, 1, 0, 0, 0, 1, 116, 1, 0, 0, 0, 1, 118, 1, 0, 0, 0, 1, 120, 1, 0, 0, 0, 1, 122, 1, 0, 0, 0, 1, 124, 1, 0, 0, 0, 1, 126, 1, 0, 0, 0, 1, 128, 1, 0, 0, 0, 1, 130, 1, 0, 0, 0, 1, 132, 1, 0, 0, 0, 1, 134, 1, 0, 0, 0, 1, 136, 1, 0, 0, 0, 1, 138, 1, 0, 0, 0, 1, 140, 1, 0, 0, 0, 1, 142, 1, 0, 0, 0, 1, 144, 1, 0, 0, 0, 1, 146, 1, 0, 0, 0, 1, 148, 1, 0, 0, 0, 1, 150, 1, 0, 0, 0, 1, 152, 1, 0, 0, 0, 1, 154, 1, 0, 0, 0, 1, 156, 1, 0, 0, 0, 1, 158, 1, 0, 0, 0, 1, 160, 1, 0, 0, 0, 1, 162, 1, 0, 0, 0, 1, 164, 1, 0, 0, 0, 1, 166, 1, 0, 0, 0, 1, 168, 1, 0, 0, 0, 1, 170, 1, 0, 0, 0, 1, 172, 1, 0, 0, 0, 1, 174, 1, 0, 0, 0, 1, 176, 1, 0, 0, 0, 1, 178, 1, 0, 0, 0, 1, 180, 1, 0, 0, 0, 1, 184, 1, 0, 0, 0, 1, 186, 1, 0, 0, 0, 1, 188, 1, 0, 0, 0, 1, 190, 1, 0, 0, 0, 2, 192, 1, 0, 0, 0, 2, 194, 1, 0, 0, 0, 2, 196, 1, 0, 0, 0, 2, 198, 1, 0, 0, 0, 2, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 218, 1, 0, 0, 0, 3, 220, 1, 0, 0, 0, 3, 222, 1, 0, 0, 0, 3, 224, 1, 0, 0, 0, 3, 226, 1, 0, 0, 0, 3, 228, 1, 0, 0, 0, 4, 230, 1, 0, 0, 0, 4, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 4, 236, 1, 0, 0, 0, 4, 238, 1, 0, 0, 0, 4, 244, 1, 0, 0, 0, 4, 246, 1, 0, 0, 0, 4, 248, 1, 0, 0, 0, 4, 250, 1, 0, 0, 0, 5, 252, 1, 0, 0, 0, 5, 254, 1, 0, 0, 0, 5, 256, 1, 0, 0, 0, 5, 258, 1, 0, 0, 0, 5, 260, 1, 0, 0, 0, 5, 262, 1, 0, 0, 0, 5, 264, 1, 0, 0, 0, 5, 266, 1, 0, 0, 0, 5, 268, 1, 0, 0, 0, 5, 270, 1, 0, 0, 0, 5, 272, 1, 0, 0, 0, 6, 274, 1, 0, 0, 0, 6, 276, 1, 0, 0, 0, 6, 278, 1, 0, 0, 0, 6, 280, 1, 0, 0, 0, 6, 284, 1, 0, 0, 0, 6, 286, 1, 0, 0, 0, 6, 288, 1, 0, 0, 0, 6, 290, 1, 0, 0, 0, 6, 292, 1, 0, 0, 0, 7, 294, 1, 0, 0, 0, 7, 296, 1, 0, 0, 0, 7, 298, 1, 0, 0, 0, 7, 300, 1, 0, 0, 0, 7, 302, 1, 0, 0, 0, 7, 304, 1, 0, 0, 0, 7, 306, 1, 0, 0, 0, 7, 308, 1, 0, 0, 0, 7, 310, 1, 0, 0, 0, 7, 312, 1, 0, 0, 0, 7, 314, 1, 0, 0, 0, 7, 316, 1, 0, 0, 0, 8, 318, 1, 0, 0, 0, 8, 320, 1, 0, 0, 0, 8, 322, 1, 0, 0, 0, 8, 324, 1, 0, 0, 0, 8, 326, 1, 0, 0, 0, 8, 328, 1, 0, 0, 0, 8, 330, 1, 0, 0, 0, 8, 332, 1, 0, 0, 0, 8, 334, 1, 0, 0, 0, 9, 336, 1, 0, 0, 0, 9, 338, 1, 0, 0, 0, 9, 340, 1, 0, 0, 0, 9, 342, 1, 0, 0, 0, 9, 344, 1, 0, 0, 0, 10, 346, 1, 0, 0, 0, 10, 348, 1, 0, 0, 0, 10, 350, 1, 0, 0, 0, 10, 352, 1, 0, 0, 0, 10, 354, 1, 0, 0, 0, 10, 356, 1, 0, 0, 0, 11, 358, 1, 0, 0, 0, 11, 360, 1, 0, 0, 0, 11, 362, 1, 0, 0, 0, 11, 364, 1, 0, 0, 0, 11, 366, 1, 0, 0, 0, 11, 368, 1, 0, 0, 0, 11, 370, 1, 0, 0, 0, 11, 372, 1, 0, 0, 0, 11, 374, 1, 0, 0, 0, 11, 376, 1, 0, 0, 0, 12, 378, 1, 0, 0, 0, 12, 380, 1, 0, 0, 0, 12, 382, 1, 0, 0, 0, 12, 384, 1, 0, 0, 0, 12, 386, 1, 0, 0, 0, 12, 388, 1, 0, 0, 0, 12, 390, 1, 0, 0, 0, 13, 392, 1, 0, 0, 0, 13, 394, 1, 0, 0, 0, 13, 396, 1, 0, 0, 0, 13, 398, 1, 0, 0, 0, 13, 400, 1, 0, 0, 0, 13, 402, 1, 0, 0, 0, 13, 404, 1, 0, 0, 0, 13, 406, 1, 0, 0, 0, 13, 408, 1, 0, 0, 0, 13, 410, 1, 0, 0, 0, 14, 412, 1, 0, 0, 0, 14, 414, 1, 0, 0, 0, 14, 416, 1, 0, 0, 0, 14, 418, 1, 0, 0, 0, 14, 420, 1, 0, 0, 0, 14, 422, 1, 0, 0, 0, 15, 424, 1, 0, 0, 0, 15, 426, 1, 0, 0, 0, 15, 428, 1, 0, 0, 0, 15, 430, 1, 0, 0, 0, 15, 432, 1, 0, 0, 0, 15, 434, 1, 0, 0, 0, 15, 436, 1, 0, 0, 0, 15, 438, 1, 0, 0, 0, 15, 440, 1, 0, 0, 0, 16, 442, 1, 0, 0, 0, 18, 452, 1, 0, 0, 0, 20, 459, 1, 0, 0, 0, 22, 468, 1, 0, 0, 0, 24, 475, 1, 0, 0, 0, 26, 485, 1, 0, 0, 0, 28, 492, 1, 0, 0, 0, 30, 499, 1, 0, 0, 0, 32, 506, 1, 0, 0, 0, 34, 514, 1, 0, 0, 0, 36, 526, 1, 0, 0, 0, 38, 535, 1, 0, 0, 0, 40, 541, 1, 0, 0, 0, 42, 548, 1, 0, 0, 0, 44, 555, 1, 0, 0, 0, 46, 563, 1, 0, 0, 0, 48, 571, 1, 0, 0, 0, 50, 586, 1, 0, 0, 0, 52, 598, 1, 0, 0, 0, 54, 609, 1, 0, 0, 0, 56, 617, 1, 0, 0, 0, 58, 625, 1, 0, 0, 0, 60, 633, 1, 0, 0, 0, 62, 642, 1, 0, 0, 0, 64, 653, 1, 0, 0, 0, 66, 659, 1, 0, 0, 0, 68, 676, 1, 0, 0, 0, 70, 692, 1, 0, 0, 0, 72, 698, 1, 0, 0, 0, 74, 702, 1, 0, 0, 0, 76, 704, 1, 0, 0, 0, 78, 706, 1, 0, 0, 0, 80, 709, 1, 0, 0, 0, 82, 711, 1, 0, 0, 0, 84, 720, 1, 0, 0, 0, 86, 722, 1, 0, 0, 0, 88, 727, 1, 0, 0, 0, 90, 729, 1, 0, 0, 0, 92, 734, 1, 0, 0, 0, 94, 765, 1, 0, 0, 0, 96, 768, 1, 0, 0, 0, 98, 814, 1, 0, 0, 0, 100, 816, 1, 0, 0, 0, 102, 819, 1, 0, 0, 0, 104, 823, 1, 0, 0, 0, 106, 827, 1, 0, 0, 0, 108, 829, 1, 0, 0, 0, 110, 832, 1, 0, 0, 0, 112, 834, 1, 0, 0, 0, 114, 836, 1, 0, 0, 0, 116, 841, 1, 0, 0, 0, 118, 843, 1, 0, 0, 0, 120, 849, 1, 0, 0, 0, 122, 855, 1, 0, 0, 0, 124, 858, 1, 0, 0, 0, 126, 861, 1, 0, 0, 0, 128, 866, 1, 0, 0, 0, 130, 871, 1, 0, 0, 0, 132, 873, 1, 0, 0, 0, 134, 877, 1, 0, 0, 0, 136, 882, 1, 0, 0, 0, 138, 888, 1, 0, 0, 0, 140, 891, 1, 0, 0, 0, 142, 893, 1, 0, 0, 0, 144, 899, 1, 0, 0, 0, 146, 901, 1, 0, 0, 0, 148, 906, 1, 0, 0, 0, 150, 909, 1, 0, 0, 0, 152, 912, 1, 0, 0, 0, 154, 915, 1, 0, 0, 0, 156, 917, 1, 0, 0, 0, 158, 920, 1, 0, 0, 0, 160, 922, 1, 0, 0, 0, 162, 925, 1, 0, 0, 0, 164, 927, 1, 0, 0, 0, 166, 929, 1, 0, 0, 0, 168, 931, 1, 0, 0, 0, 170, 933, 1, 0, 0, 0, 172, 935, 1, 0, 0, 0, 174, 956, 1, 0, 0, 0, 176, 958, 1, 0, 0, 0, 178, 963, 1, 0, 0, 0, 180, 984, 1, 0, 0, 0, 182, 986, 1, 0, 0, 0, 184, 994, 1, 0, 0, 0, 186, 996, 1, 0, 0, 0, 188, 1000, 1, 0, 0, 0, 190, 1004, 1, 0, 0, 0, 192, 1008, 1, 0, 0, 0, 194, 1013, 1, 0, 0, 0, 196, 1018, 1, 0, 0, 0, 198, 1022, 1, 0, 0, 0, 200, 1026, 1, 0, 0, 0, 202, 1030, 1, 0, 0, 0, 204, 1035, 1, 0, 0, 0, 206, 1039, 1, 0, 0, 0, 208, 1043, 1, 0, 0, 0, 210, 1047, 1, 0, 0, 0, 212, 1051, 1, 0, 0, 0, 214, 1055, 1, 0, 0, 0, 216, 1067, 1, 0, 0, 0, 218, 1070, 1, 0, 0, 0, 220, 1074, 1, 0, 0, 0, 222, 1078, 1, 0, 0, 0, 224, 1082, 1, 0, 0, 0, 226, 1086, 1, 0, 0, 0, 228, 1090, 1, 0, 0, 0, 230, 1094, 1, 0, 0, 0, 232, 1099, 1, 0, 0, 0, 234, 1103, 1, 0, 0, 0, 236, 1107, 1, 0, 0, 0, 238, 1112, 1, 0, 0, 0, 240, 1121, 1, 0, 0, 0, 242, 1142, 1, 0, 0, 0, 244, 1146, 1, 0, 0, 0, 246, 1150, 1, 0, 0, 0, 248, 1154, 1, 0, 0, 0, 250, 1158, 1, 0, 0, 0, 252, 1162, 1, 0, 0, 0, 254, 1167, 1, 0, 0, 0, 256, 1171, 1, 0, 0, 0, 258, 1175, 1, 0, 0, 0, 260, 1179, 1, 0, 0, 0, 262, 1184, 1, 0, 0, 0, 264, 1189, 1, 0, 0, 0, 266, 1192, 1, 0, 0, 0, 268, 1196, 1, 0, 0, 0, 270, 1200, 1, 0, 0, 0, 272, 1204, 1, 0, 0, 0, 274, 1208, 1, 0, 0, 0, 276, 1213, 1, 0, 0, 0, 278, 1218, 1, 0, 0, 0, 280, 1223, 1, 0, 0, 0, 282, 1230, 1, 0, 0, 0, 284, 1239, 1, 0, 0, 0, 286, 1246, 1, 0, 0, 0, 288, 1250, 1, 0, 0, 0, 290, 1254, 1, 0, 0, 0, 292, 1258, 1, 0, 0, 0, 294, 1262, 1, 0, 0, 0, 296, 1268, 1, 0, 0, 0, 298, 1272, 1, 0, 0, 0, 300, 1276, 1, 0, 0, 0, 302, 1280, 1, 0, 0, 0, 304, 1284, 1, 0, 0, 0, 306, 1288, 1, 0, 0, 0, 308, 1292, 1, 0, 0, 0, 310, 1297, 1, 0, 0, 0, 312, 1302, 1, 0, 0, 0, 314, 1306, 1, 0, 0, 0, 316, 1310, 1, 0, 0, 0, 318, 1314, 1, 0, 0, 0, 320, 1319, 1, 0, 0, 0, 322, 1323, 1, 0, 0, 0, 324, 1328, 1, 0, 0, 0, 326, 1333, 1, 0, 0, 0, 328, 1337, 1, 0, 0, 0, 330, 1341, 1, 0, 0, 0, 332, 1345, 1, 0, 0, 0, 334, 1349, 1, 0, 0, 0, 336, 1353, 1, 0, 0, 0, 338, 1358, 1, 0, 0, 0, 340, 1363, 1, 0, 0, 0, 342, 1367, 1, 0, 0, 0, 344, 1371, 1, 0, 0, 0, 346, 1375, 1, 0, 0, 0, 348, 1380, 1, 0, 0, 0, 350, 1389, 1, 0, 0, 0, 352, 1393, 1, 0, 0, 0, 354, 1397, 1, 0, 0, 0, 356, 1401, 1, 0, 0, 0, 358, 1405, 1, 0, 0, 0, 360, 1410, 1, 0, 0, 0, 362, 1414, 1, 0, 0, 0, 364, 1418, 1, 0, 0, 0, 366, 1422, 1, 0, 0, 0, 368, 1427, 1, 0, 0, 0, 370, 1431, 1, 0, 0, 0, 372, 1435, 1, 0, 0, 0, 374, 1439, 1, 0, 0, 0, 376, 1443, 1, 0, 0, 0, 378, 1447, 1, 0, 0, 0, 380, 1453, 1, 0, 0, 0, 382, 1457, 1, 0, 0, 0, 384, 1461, 1, 0, 0, 0, 386, 1465, 1, 0, 0, 0, 388, 1469, 1, 0, 0, 0, 390, 1473, 1, 0, 0, 0, 392, 1477, 1, 0, 0, 0, 394, 1482, 1, 0, 0, 0, 396, 1486, 1, 0, 0, 0, 398, 1490, 1, 0, 0, 0, 400, 1496, 1, 0, 0, 0, 402, 1505, 1, 0, 0, 0, 404, 1509, 1, 0, 0, 0, 406, 1513, 1, 0, 0, 0, 408, 1517, 1, 0, 0, 0, 410, 1521, 1, 0, 0, 0, 412, 1525, 1, 0, 0, 0, 414, 1530, 1, 0, 0, 0, 416, 1536, 1, 0, 0, 0, 418, 1542, 1, 0, 0, 0, 420, 1546, 1, 0, 0, 0, 422, 1550, 1, 0, 0, 0, 424, 1554, 1, 0, 0, 0, 426, 1560, 1, 0, 0, 0, 428, 1566, 1, 0, 0, 0, 430, 1570, 1, 0, 0, 0, 432, 1574, 1, 0, 0, 0, 434, 1578, 1, 0, 0, 0, 436, 1584, 1, 0, 0, 0, 438, 1590, 1, 0, 0, 0, 440, 1596, 1, 0, 0, 0, 442, 443, 7, 0, 0, 0, 443, 444, 7, 1, 0, 0, 444, 445, 7, 2, 0, 0, 445, 446, 7, 2, 0, 0, 446, 447, 7, 3, 0, 0, 447, 448, 7, 4, 0, 0, 448, 449, 7, 5, 0, 0, 449, 450, 1, 0, 0, 0, 450, 451, 6, 0, 0, 0, 451, 17, 1, 0, 0, 0, 452, 453, 7, 0, 0, 0, 453, 454, 7, 6, 0, 0, 454, 455, 7, 7, 0, 0, 455, 456, 7, 8, 0, 0, 456, 457, 1, 0, 0, 0, 457, 458, 6, 1, 1, 0, 458, 19, 1, 0, 0, 0, 459, 460, 7, 3, 0, 0, 460, 461, 7, 9, 0, 0, 461, 462, 7, 6, 0, 0, 462, 463, 7, 1, 0, 0, 463, 464, 7, 4, 0, 0, 464, 465, 7, 10, 0, 0, 465, 466, 1, 0, 0, 0, 466, 467, 6, 2, 2, 0, 467, 21, 1, 0, 0, 0, 468, 469, 7, 3, 0, 0, 469, 470, 7, 11, 0, 0, 470, 471, 7, 12, 0, 0, 471, 472, 7, 13, 0, 0, 472, 473, 1, 0, 0, 0, 473, 474, 6, 3, 0, 0, 474, 23, 1, 0, 0, 0, 475, 476, 7, 3, 0, 0, 476, 477, 7, 14, 0, 0, 477, 478, 7, 8, 0, 0, 478, 479, 7, 13, 0, 0, 479, 480, 7, 12, 0, 0, 480, 481, 7, 1, 0, 0, 481, 482, 7, 9, 0, 0, 482, 483, 1, 0, 0, 0, 483, 484, 6, 4, 3, 0, 484, 25, 1, 0, 0, 0, 485, 486, 7, 15, 0, 0, 486, 487, 7, 6, 0, 0, 487, 488, 7, 7, 0, 0, 488, 489, 7, 16, 0, 0, 489, 490, 1, 0, 0, 0, 490, 491, 6, 5, 4, 0, 491, 27, 1, 0, 0, 0, 492, 493, 7, 17, 0, 0, 493, 494, 7, 6, 0, 0, 494, 495, 7, 7, 0, 0, 495, 496, 7, 18, 0, 0, 496, 497, 1, 0, 0, 0, 497, 498, 6, 6, 0, 0, 498, 29, 1, 0, 0, 0, 499, 500, 7, 18, 0, 0, 500, 501, 7, 3, 0, 0, 501, 502, 7, 3, 0, 0, 502, 503, 7, 8, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 7, 1, 0, 505, 31, 1, 0, 0, 0, 506, 507, 7, 13, 0, 0, 507, 508, 7, 1, 0, 0, 508, 509, 7, 16, 0, 0, 509, 510, 7, 1, 0, 0, 510, 511, 7, 5, 0, 0, 511, 512, 1, 0, 0, 0, 512, 513, 6, 8, 0, 0, 513, 33, 1, 0, 0, 0, 514, 515, 7, 16, 0, 0, 515, 516, 7, 11, 0, 0, 516, 517, 5, 95, 0, 0, 517, 518, 7, 3, 0, 0, 518, 519, 7, 14, 0, 0, 519, 520, 7, 8, 0, 0, 520, 521, 7, 12, 0, 0, 521, 522, 7, 9, 0, 0, 522, 523, 7, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 525, 6, 9, 5, 0, 525, 35, 1, 0, 0, 0, 526, 527, 7, 6, 0, 0, 527, 528, 7, 3, 0, 0, 528, 529, 7, 9, 0, 0, 529, 530, 7, 12, 0, 0, 530, 531, 7, 16, 0, 0, 531, 532, 7, 3, 0, 0, 532, 533, 1, 0, 0, 0, 533, 534, 6, 10, 6, 0, 534, 37, 1, 0, 0, 0, 535, 536, 7, 6, 0, 0, 536, 537, 7, 7, 0, 0, 537, 538, 7, 19, 0, 0, 538, 539, 1, 0, 0, 0, 539, 540, 6, 11, 0, 0, 540, 39, 1, 0, 0, 0, 541, 542, 7, 2, 0, 0, 542, 543, 7, 10, 0, 0, 543, 544, 7, 7, 0, 0, 544, 545, 7, 19, 0, 0, 545, 546, 1, 0, 0, 0, 546, 547, 6, 12, 7, 0, 547, 41, 1, 0, 0, 0, 548, 549, 7, 2, 0, 0, 549, 550, 7, 7, 0, 0, 550, 551, 7, 6, 0, 0, 551, 552, 7, 5, 0, 0, 552, 553, 1, 0, 0, 0, 553, 554, 6, 13, 0, 0, 554, 43, 1, 0, 0, 0, 555, 556, 7, 2, 0, 0, 556, 557, 7, 5, 0, 0, 557, 558, 7, 12, 0, 0, 558, 559, 7, 5, 0, 0, 559, 560, 7, 2, 0, 0, 560, 561, 1, 0, 0, 0, 561, 562, 6, 14, 0, 0, 562, 45, 1, 0, 0, 0, 563, 564, 7, 19, 0, 0, 564, 565, 7, 10, 0, 0, 565, 566, 7, 3, 0, 0, 566, 567, 7, 6, 0, 0, 567, 568, 7, 3, 0, 0, 568, 569, 1, 0, 0, 0, 569, 570, 6, 15, 0, 0, 570, 47, 1, 0, 0, 0, 571, 572, 4, 16, 0, 0, 572, 573, 7, 1, 0, 0, 573, 574, 7, 9, 0, 0, 574, 575, 7, 13, 0, 0, 575, 576, 7, 1, 0, 0, 576, 577, 7, 9, 0, 0, 577, 578, 7, 3, 0, 0, 578, 579, 7, 2, 0, 0, 579, 580, 7, 5, 0, 0, 580, 581, 7, 12, 0, 0, 581, 582, 7, 5, 0, 0, 582, 583, 7, 2, 0, 0, 583, 584, 1, 0, 0, 0, 584, 585, 6, 16, 0, 0, 585, 49, 1, 0, 0, 0, 586, 587, 4, 17, 1, 0, 587, 588, 7, 13, 0, 0, 588, 589, 7, 7, 0, 0, 589, 590, 7, 7, 0, 0, 590, 591, 7, 18, 0, 0, 591, 592, 7, 20, 0, 0, 592, 593, 7, 8, 0, 0, 593, 594, 5, 95, 0, 0, 594, 595, 5, 128020, 0, 0, 595, 596, 1, 0, 0, 0, 596, 597, 6, 17, 8, 0, 597, 51, 1, 0, 0, 0, 598, 599, 4, 18, 2, 0, 599, 600, 7, 16, 0, 0, 600, 601, 7, 3, 0, 0, 601, 602, 7, 5, 0, 0, 602, 603, 7, 6, 0, 0, 603, 604, 7, 1, 0, 0, 604, 605, 7, 4, 0, 0, 605, 606, 7, 2, 0, 0, 606, 607, 1, 0, 0, 0, 607, 608, 6, 18, 9, 0, 608, 53, 1, 0, 0, 0, 609, 610, 4, 19, 3, 0, 610, 611, 7, 21, 0, 0, 611, 612, 7, 7, 0, 0, 612, 613, 7, 1, 0, 0, 613, 614, 7, 9, 0, 0, 614, 615, 1, 0, 0, 0, 615, 616, 6, 19, 10, 0, 616, 55, 1, 0, 0, 0, 617, 618, 4, 20, 4, 0, 618, 619, 7, 15, 0, 0, 619, 620, 7, 20, 0, 0, 620, 621, 7, 13, 0, 0, 621, 622, 7, 13, 0, 0, 622, 623, 1, 0, 0, 0, 623, 624, 6, 20, 10, 0, 624, 57, 1, 0, 0, 0, 625, 626, 4, 21, 5, 0, 626, 627, 7, 13, 0, 0, 627, 628, 7, 3, 0, 0, 628, 629, 7, 15, 0, 0, 629, 630, 7, 5, 0, 0, 630, 631, 1, 0, 0, 0, 631, 632, 6, 21, 10, 0, 632, 59, 1, 0, 0, 0, 633, 634, 4, 22, 6, 0, 634, 635, 7, 6, 0, 0, 635, 636, 7, 1, 0, 0, 636, 637, 7, 17, 0, 0, 637, 638, 7, 10, 0, 0, 638, 639, 7, 5, 0, 0, 639, 640, 1, 0, 0, 0, 640, 641, 6, 22, 10, 0, 641, 61, 1, 0, 0, 0, 642, 643, 4, 23, 7, 0, 643, 644, 7, 13, 0, 0, 644, 645, 7, 7, 0, 0, 645, 646, 7, 7, 0, 0, 646, 647, 7, 18, 0, 0, 647, 648, 7, 20, 0, 0, 648, 649, 7, 8, 0, 0, 649, 650, 1, 0, 0, 0, 650, 651, 6, 23, 10, 0, 651, 63, 1, 0, 0, 0, 652, 654, 8, 22, 0, 0, 653, 652, 1, 0, 0, 0, 654, 655, 1, 0, 0, 0, 655, 653, 1, 0, 0, 0, 655, 656, 1, 0, 0, 0, 656, 657, 1, 0, 0, 0, 657, 658, 6, 24, 0, 0, 658, 65, 1, 0, 0, 0, 659, 660, 5, 47, 0, 0, 660, 661, 5, 47, 0, 0, 661, 665, 1, 0, 0, 0, 662, 664, 8, 23, 0, 0, 663, 662, 1, 0, 0, 0, 664, 667, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 669, 1, 0, 0, 0, 667, 665, 1, 0, 0, 0, 668, 670, 5, 13, 0, 0, 669, 668, 1, 0, 0, 0, 669, 670, 1, 0, 0, 0, 670, 672, 1, 0, 0, 0, 671, 673, 5, 10, 0, 0, 672, 671, 1, 0, 0, 0, 672, 673, 1, 0, 0, 0, 673, 674, 1, 0, 0, 0, 674, 675, 6, 25, 11, 0, 675, 67, 1, 0, 0, 0, 676, 677, 5, 47, 0, 0, 677, 678, 5, 42, 0, 0, 678, 683, 1, 0, 0, 0, 679, 682, 3, 68, 26, 0, 680, 682, 9, 0, 0, 0, 681, 679, 1, 0, 0, 0, 681, 680, 1, 0, 0, 0, 682, 685, 1, 0, 0, 0, 683, 684, 1, 0, 0, 0, 683, 681, 1, 0, 0, 0, 684, 686, 1, 0, 0, 0, 685, 683, 1, 0, 0, 0, 686, 687, 5, 42, 0, 0, 687, 688, 5, 47, 0, 0, 688, 689, 1, 0, 0, 0, 689, 690, 6, 26, 11, 0, 690, 69, 1, 0, 0, 0, 691, 693, 7, 24, 0, 0, 692, 691, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 694, 692, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 696, 1, 0, 0, 0, 696, 697, 6, 27, 11, 0, 697, 71, 1, 0, 0, 0, 698, 699, 5, 124, 0, 0, 699, 700, 1, 0, 0, 0, 700, 701, 6, 28, 12, 0, 701, 73, 1, 0, 0, 0, 702, 703, 7, 25, 0, 0, 703, 75, 1, 0, 0, 0, 704, 705, 7, 26, 0, 0, 705, 77, 1, 0, 0, 0, 706, 707, 5, 92, 0, 0, 707, 708, 7, 27, 0, 0, 708, 79, 1, 0, 0, 0, 709, 710, 8, 28, 0, 0, 710, 81, 1, 0, 0, 0, 711, 713, 7, 3, 0, 0, 712, 714, 7, 29, 0, 0, 713, 712, 1, 0, 0, 0, 713, 714, 1, 0, 0, 0, 714, 716, 1, 0, 0, 0, 715, 717, 3, 74, 29, 0, 716, 715, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 716, 1, 0, 0, 0, 718, 719, 1, 0, 0, 0, 719, 83, 1, 0, 0, 0, 720, 721, 5, 64, 0, 0, 721, 85, 1, 0, 0, 0, 722, 723, 5, 96, 0, 0, 723, 87, 1, 0, 0, 0, 724, 728, 8, 30, 0, 0, 725, 726, 5, 96, 0, 0, 726, 728, 5, 96, 0, 0, 727, 724, 1, 0, 0, 0, 727, 725, 1, 0, 0, 0, 728, 89, 1, 0, 0, 0, 729, 730, 5, 95, 0, 0, 730, 91, 1, 0, 0, 0, 731, 735, 3, 76, 30, 0, 732, 735, 3, 74, 29, 0, 733, 735, 3, 90, 37, 0, 734, 731, 1, 0, 0, 0, 734, 732, 1, 0, 0, 0, 734, 733, 1, 0, 0, 0, 735, 93, 1, 0, 0, 0, 736, 741, 5, 34, 0, 0, 737, 740, 3, 78, 31, 0, 738, 740, 3, 80, 32, 0, 739, 737, 1, 0, 0, 0, 739, 738, 1, 0, 0, 0, 740, 743, 1, 0, 0, 0, 741, 739, 1, 0, 0, 0, 741, 742, 1, 0, 0, 0, 742, 744, 1, 0, 0, 0, 743, 741, 1, 0, 0, 0, 744, 766, 5, 34, 0, 0, 745, 746, 5, 34, 0, 0, 746, 747, 5, 34, 0, 0, 747, 748, 5, 34, 0, 0, 748, 752, 1, 0, 0, 0, 749, 751, 8, 23, 0, 0, 750, 749, 1, 0, 0, 0, 751, 754, 1, 0, 0, 0, 752, 753, 1, 0, 0, 0, 752, 750, 1, 0, 0, 0, 753, 755, 1, 0, 0, 0, 754, 752, 1, 0, 0, 0, 755, 756, 5, 34, 0, 0, 756, 757, 5, 34, 0, 0, 757, 758, 5, 34, 0, 0, 758, 760, 1, 0, 0, 0, 759, 761, 5, 34, 0, 0, 760, 759, 1, 0, 0, 0, 760, 761, 1, 0, 0, 0, 761, 763, 1, 0, 0, 0, 762, 764, 5, 34, 0, 0, 763, 762, 1, 0, 0, 0, 763, 764, 1, 0, 0, 0, 764, 766, 1, 0, 0, 0, 765, 736, 1, 0, 0, 0, 765, 745, 1, 0, 0, 0, 766, 95, 1, 0, 0, 0, 767, 769, 3, 74, 29, 0, 768, 767, 1, 0, 0, 0, 769, 770, 1, 0, 0, 0, 770, 768, 1, 0, 0, 0, 770, 771, 1, 0, 0, 0, 771, 97, 1, 0, 0, 0, 772, 774, 3, 74, 29, 0, 773, 772, 1, 0, 0, 0, 774, 775, 1, 0, 0, 0, 775, 773, 1, 0, 0, 0, 775, 776, 1, 0, 0, 0, 776, 777, 1, 0, 0, 0, 777, 781, 3, 116, 50, 0, 778, 780, 3, 74, 29, 0, 779, 778, 1, 0, 0, 0, 780, 783, 1, 0, 0, 0, 781, 779, 1, 0, 0, 0, 781, 782, 1, 0, 0, 0, 782, 815, 1, 0, 0, 0, 783, 781, 1, 0, 0, 0, 784, 786, 3, 116, 50, 0, 785, 787, 3, 74, 29, 0, 786, 785, 1, 0, 0, 0, 787, 788, 1, 0, 0, 0, 788, 786, 1, 0, 0, 0, 788, 789, 1, 0, 0, 0, 789, 815, 1, 0, 0, 0, 790, 792, 3, 74, 29, 0, 791, 790, 1, 0, 0, 0, 792, 793, 1, 0, 0, 0, 793, 791, 1, 0, 0, 0, 793, 794, 1, 0, 0, 0, 794, 802, 1, 0, 0, 0, 795, 799, 3, 116, 50, 0, 796, 798, 3, 74, 29, 0, 797, 796, 1, 0, 0, 0, 798, 801, 1, 0, 0, 0, 799, 797, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 803, 1, 0, 0, 0, 801, 799, 1, 0, 0, 0, 802, 795, 1, 0, 0, 0, 802, 803, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 805, 3, 82, 33, 0, 805, 815, 1, 0, 0, 0, 806, 808, 3, 116, 50, 0, 807, 809, 3, 74, 29, 0, 808, 807, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 808, 1, 0, 0, 0, 810, 811, 1, 0, 0, 0, 811, 812, 1, 0, 0, 0, 812, 813, 3, 82, 33, 0, 813, 815, 1, 0, 0, 0, 814, 773, 1, 0, 0, 0, 814, 784, 1, 0, 0, 0, 814, 791, 1, 0, 0, 0, 814, 806, 1, 0, 0, 0, 815, 99, 1, 0, 0, 0, 816, 817, 7, 31, 0, 0, 817, 818, 7, 32, 0, 0, 818, 101, 1, 0, 0, 0, 819, 820, 7, 12, 0, 0, 820, 821, 7, 9, 0, 0, 821, 822, 7, 0, 0, 0, 822, 103, 1, 0, 0, 0, 823, 824, 7, 12, 0, 0, 824, 825, 7, 2, 0, 0, 825, 826, 7, 4, 0, 0, 826, 105, 1, 0, 0, 0, 827, 828, 5, 61, 0, 0, 828, 107, 1, 0, 0, 0, 829, 830, 5, 58, 0, 0, 830, 831, 5, 58, 0, 0, 831, 109, 1, 0, 0, 0, 832, 833, 5, 58, 0, 0, 833, 111, 1, 0, 0, 0, 834, 835, 5, 44, 0, 0, 835, 113, 1, 0, 0, 0, 836, 837, 7, 0, 0, 0, 837, 838, 7, 3, 0, 0, 838, 839, 7, 2, 0, 0, 839, 840, 7, 4, 0, 0, 840, 115, 1, 0, 0, 0, 841, 842, 5, 46, 0, 0, 842, 117, 1, 0, 0, 0, 843, 844, 7, 15, 0, 0, 844, 845, 7, 12, 0, 0, 845, 846, 7, 13, 0, 0, 846, 847, 7, 2, 0, 0, 847, 848, 7, 3, 0, 0, 848, 119, 1, 0, 0, 0, 849, 850, 7, 15, 0, 0, 850, 851, 7, 1, 0, 0, 851, 852, 7, 6, 0, 0, 852, 853, 7, 2, 0, 0, 853, 854, 7, 5, 0, 0, 854, 121, 1, 0, 0, 0, 855, 856, 7, 1, 0, 0, 856, 857, 7, 9, 0, 0, 857, 123, 1, 0, 0, 0, 858, 859, 7, 1, 0, 0, 859, 860, 7, 2, 0, 0, 860, 125, 1, 0, 0, 0, 861, 862, 7, 13, 0, 0, 862, 863, 7, 12, 0, 0, 863, 864, 7, 2, 0, 0, 864, 865, 7, 5, 0, 0, 865, 127, 1, 0, 0, 0, 866, 867, 7, 13, 0, 0, 867, 868, 7, 1, 0, 0, 868, 869, 7, 18, 0, 0, 869, 870, 7, 3, 0, 0, 870, 129, 1, 0, 0, 0, 871, 872, 5, 40, 0, 0, 872, 131, 1, 0, 0, 0, 873, 874, 7, 9, 0, 0, 874, 875, 7, 7, 0, 0, 875, 876, 7, 5, 0, 0, 876, 133, 1, 0, 0, 0, 877, 878, 7, 9, 0, 0, 878, 879, 7, 20, 0, 0, 879, 880, 7, 13, 0, 0, 880, 881, 7, 13, 0, 0, 881, 135, 1, 0, 0, 0, 882, 883, 7, 9, 0, 0, 883, 884, 7, 20, 0, 0, 884, 885, 7, 13, 0, 0, 885, 886, 7, 13, 0, 0, 886, 887, 7, 2, 0, 0, 887, 137, 1, 0, 0, 0, 888, 889, 7, 7, 0, 0, 889, 890, 7, 6, 0, 0, 890, 139, 1, 0, 0, 0, 891, 892, 5, 63, 0, 0, 892, 141, 1, 0, 0, 0, 893, 894, 7, 6, 0, 0, 894, 895, 7, 13, 0, 0, 895, 896, 7, 1, 0, 0, 896, 897, 7, 18, 0, 0, 897, 898, 7, 3, 0, 0, 898, 143, 1, 0, 0, 0, 899, 900, 5, 41, 0, 0, 900, 145, 1, 0, 0, 0, 901, 902, 7, 5, 0, 0, 902, 903, 7, 6, 0, 0, 903, 904, 7, 20, 0, 0, 904, 905, 7, 3, 0, 0, 905, 147, 1, 0, 0, 0, 906, 907, 5, 61, 0, 0, 907, 908, 5, 61, 0, 0, 908, 149, 1, 0, 0, 0, 909, 910, 5, 61, 0, 0, 910, 911, 5, 126, 0, 0, 911, 151, 1, 0, 0, 0, 912, 913, 5, 33, 0, 0, 913, 914, 5, 61, 0, 0, 914, 153, 1, 0, 0, 0, 915, 916, 5, 60, 0, 0, 916, 155, 1, 0, 0, 0, 917, 918, 5, 60, 0, 0, 918, 919, 5, 61, 0, 0, 919, 157, 1, 0, 0, 0, 920, 921, 5, 62, 0, 0, 921, 159, 1, 0, 0, 0, 922, 923, 5, 62, 0, 0, 923, 924, 5, 61, 0, 0, 924, 161, 1, 0, 0, 0, 925, 926, 5, 43, 0, 0, 926, 163, 1, 0, 0, 0, 927, 928, 5, 45, 0, 0, 928, 165, 1, 0, 0, 0, 929, 930, 5, 42, 0, 0, 930, 167, 1, 0, 0, 0, 931, 932, 5, 47, 0, 0, 932, 169, 1, 0, 0, 0, 933, 934, 5, 37, 0, 0, 934, 171, 1, 0, 0, 0, 935, 936, 3, 46, 15, 0, 936, 937, 1, 0, 0, 0, 937, 938, 6, 78, 13, 0, 938, 173, 1, 0, 0, 0, 939, 942, 3, 140, 62, 0, 940, 943, 3, 76, 30, 0, 941, 943, 3, 90, 37, 0, 942, 940, 1, 0, 0, 0, 942, 941, 1, 0, 0, 0, 943, 947, 1, 0, 0, 0, 944, 946, 3, 92, 38, 0, 945, 944, 1, 0, 0, 0, 946, 949, 1, 0, 0, 0, 947, 945, 1, 0, 0, 0, 947, 948, 1, 0, 0, 0, 948, 957, 1, 0, 0, 0, 949, 947, 1, 0, 0, 0, 950, 952, 3, 140, 62, 0, 951, 953, 3, 74, 29, 0, 952, 951, 1, 0, 0, 0, 953, 954, 1, 0, 0, 0, 954, 952, 1, 0, 0, 0, 954, 955, 1, 0, 0, 0, 955, 957, 1, 0, 0, 0, 956, 939, 1, 0, 0, 0, 956, 950, 1, 0, 0, 0, 957, 175, 1, 0, 0, 0, 958, 959, 5, 91, 0, 0, 959, 960, 1, 0, 0, 0, 960, 961, 6, 80, 0, 0, 961, 962, 6, 80, 0, 0, 962, 177, 1, 0, 0, 0, 963, 964, 5, 93, 0, 0, 964, 965, 1, 0, 0, 0, 965, 966, 6, 81, 12, 0, 966, 967, 6, 81, 12, 0, 967, 179, 1, 0, 0, 0, 968, 972, 3, 76, 30, 0, 969, 971, 3, 92, 38, 0, 970, 969, 1, 0, 0, 0, 971, 974, 1, 0, 0, 0, 972, 970, 1, 0, 0, 0, 972, 973, 1, 0, 0, 0, 973, 985, 1, 0, 0, 0, 974, 972, 1, 0, 0, 0, 975, 978, 3, 90, 37, 0, 976, 978, 3, 84, 34, 0, 977, 975, 1, 0, 0, 0, 977, 976, 1, 0, 0, 0, 978, 980, 1, 0, 0, 0, 979, 981, 3, 92, 38, 0, 980, 979, 1, 0, 0, 0, 981, 982, 1, 0, 0, 0, 982, 980, 1, 0, 0, 0, 982, 983, 1, 0, 0, 0, 983, 985, 1, 0, 0, 0, 984, 968, 1, 0, 0, 0, 984, 977, 1, 0, 0, 0, 985, 181, 1, 0, 0, 0, 986, 988, 3, 86, 35, 0, 987, 989, 3, 88, 36, 0, 988, 987, 1, 0, 0, 0, 989, 990, 1, 0, 0, 0, 990, 988, 1, 0, 0, 0, 990, 991, 1, 0, 0, 0, 991, 992, 1, 0, 0, 0, 992, 993, 3, 86, 35, 0, 993, 183, 1, 0, 0, 0, 994, 995, 3, 182, 83, 0, 995, 185, 1, 0, 0, 0, 996, 997, 3, 66, 25, 0, 997, 998, 1, 0, 0, 0, 998, 999, 6, 85, 11, 0, 999, 187, 1, 0, 0, 0, 1000, 1001, 3, 68, 26, 0, 1001, 1002, 1, 0, 0, 0, 1002, 1003, 6, 86, 11, 0, 1003, 189, 1, 0, 0, 0, 1004, 1005, 3, 70, 27, 0, 1005, 1006, 1, 0, 0, 0, 1006, 1007, 6, 87, 11, 0, 1007, 191, 1, 0, 0, 0, 1008, 1009, 3, 176, 80, 0, 1009, 1010, 1, 0, 0, 0, 1010, 1011, 6, 88, 14, 0, 1011, 1012, 6, 88, 15, 0, 1012, 193, 1, 0, 0, 0, 1013, 1014, 3, 72, 28, 0, 1014, 1015, 1, 0, 0, 0, 1015, 1016, 6, 89, 16, 0, 1016, 1017, 6, 89, 12, 0, 1017, 195, 1, 0, 0, 0, 1018, 1019, 3, 70, 27, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 90, 11, 0, 1021, 197, 1, 0, 0, 0, 1022, 1023, 3, 66, 25, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 91, 11, 0, 1025, 199, 1, 0, 0, 0, 1026, 1027, 3, 68, 26, 0, 1027, 1028, 1, 0, 0, 0, 1028, 1029, 6, 92, 11, 0, 1029, 201, 1, 0, 0, 0, 1030, 1031, 3, 72, 28, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1033, 6, 93, 16, 0, 1033, 1034, 6, 93, 12, 0, 1034, 203, 1, 0, 0, 0, 1035, 1036, 3, 176, 80, 0, 1036, 1037, 1, 0, 0, 0, 1037, 1038, 6, 94, 14, 0, 1038, 205, 1, 0, 0, 0, 1039, 1040, 3, 178, 81, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1042, 6, 95, 17, 0, 1042, 207, 1, 0, 0, 0, 1043, 1044, 3, 110, 47, 0, 1044, 1045, 1, 0, 0, 0, 1045, 1046, 6, 96, 18, 0, 1046, 209, 1, 0, 0, 0, 1047, 1048, 3, 112, 48, 0, 1048, 1049, 1, 0, 0, 0, 1049, 1050, 6, 97, 19, 0, 1050, 211, 1, 0, 0, 0, 1051, 1052, 3, 106, 45, 0, 1052, 1053, 1, 0, 0, 0, 1053, 1054, 6, 98, 20, 0, 1054, 213, 1, 0, 0, 0, 1055, 1056, 7, 16, 0, 0, 1056, 1057, 7, 3, 0, 0, 1057, 1058, 7, 5, 0, 0, 1058, 1059, 7, 12, 0, 0, 1059, 1060, 7, 0, 0, 0, 1060, 1061, 7, 12, 0, 0, 1061, 1062, 7, 5, 0, 0, 1062, 1063, 7, 12, 0, 0, 1063, 215, 1, 0, 0, 0, 1064, 1068, 8, 33, 0, 0, 1065, 1066, 5, 47, 0, 0, 1066, 1068, 8, 34, 0, 0, 1067, 1064, 1, 0, 0, 0, 1067, 1065, 1, 0, 0, 0, 1068, 217, 1, 0, 0, 0, 1069, 1071, 3, 216, 100, 0, 1070, 1069, 1, 0, 0, 0, 1071, 1072, 1, 0, 0, 0, 1072, 1070, 1, 0, 0, 0, 1072, 1073, 1, 0, 0, 0, 1073, 219, 1, 0, 0, 0, 1074, 1075, 3, 218, 101, 0, 1075, 1076, 1, 0, 0, 0, 1076, 1077, 6, 102, 21, 0, 1077, 221, 1, 0, 0, 0, 1078, 1079, 3, 94, 39, 0, 1079, 1080, 1, 0, 0, 0, 1080, 1081, 6, 103, 22, 0, 1081, 223, 1, 0, 0, 0, 1082, 1083, 3, 66, 25, 0, 1083, 1084, 1, 0, 0, 0, 1084, 1085, 6, 104, 11, 0, 1085, 225, 1, 0, 0, 0, 1086, 1087, 3, 68, 26, 0, 1087, 1088, 1, 0, 0, 0, 1088, 1089, 6, 105, 11, 0, 1089, 227, 1, 0, 0, 0, 1090, 1091, 3, 70, 27, 0, 1091, 1092, 1, 0, 0, 0, 1092, 1093, 6, 106, 11, 0, 1093, 229, 1, 0, 0, 0, 1094, 1095, 3, 72, 28, 0, 1095, 1096, 1, 0, 0, 0, 1096, 1097, 6, 107, 16, 0, 1097, 1098, 6, 107, 12, 0, 1098, 231, 1, 0, 0, 0, 1099, 1100, 3, 116, 50, 0, 1100, 1101, 1, 0, 0, 0, 1101, 1102, 6, 108, 23, 0, 1102, 233, 1, 0, 0, 0, 1103, 1104, 3, 112, 48, 0, 1104, 1105, 1, 0, 0, 0, 1105, 1106, 6, 109, 19, 0, 1106, 235, 1, 0, 0, 0, 1107, 1108, 4, 110, 8, 0, 1108, 1109, 3, 140, 62, 0, 1109, 1110, 1, 0, 0, 0, 1110, 1111, 6, 110, 24, 0, 1111, 237, 1, 0, 0, 0, 1112, 1113, 4, 111, 9, 0, 1113, 1114, 3, 174, 79, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 111, 25, 0, 1116, 239, 1, 0, 0, 0, 1117, 1122, 3, 76, 30, 0, 1118, 1122, 3, 74, 29, 0, 1119, 1122, 3, 90, 37, 0, 1120, 1122, 3, 166, 75, 0, 1121, 1117, 1, 0, 0, 0, 1121, 1118, 1, 0, 0, 0, 1121, 1119, 1, 0, 0, 0, 1121, 1120, 1, 0, 0, 0, 1122, 241, 1, 0, 0, 0, 1123, 1126, 3, 76, 30, 0, 1124, 1126, 3, 166, 75, 0, 1125, 1123, 1, 0, 0, 0, 1125, 1124, 1, 0, 0, 0, 1126, 1130, 1, 0, 0, 0, 1127, 1129, 3, 240, 112, 0, 1128, 1127, 1, 0, 0, 0, 1129, 1132, 1, 0, 0, 0, 1130, 1128, 1, 0, 0, 0, 1130, 1131, 1, 0, 0, 0, 1131, 1143, 1, 0, 0, 0, 1132, 1130, 1, 0, 0, 0, 1133, 1136, 3, 90, 37, 0, 1134, 1136, 3, 84, 34, 0, 1135, 1133, 1, 0, 0, 0, 1135, 1134, 1, 0, 0, 0, 1136, 1138, 1, 0, 0, 0, 1137, 1139, 3, 240, 112, 0, 1138, 1137, 1, 0, 0, 0, 1139, 1140, 1, 0, 0, 0, 1140, 1138, 1, 0, 0, 0, 1140, 1141, 1, 0, 0, 0, 1141, 1143, 1, 0, 0, 0, 1142, 1125, 1, 0, 0, 0, 1142, 1135, 1, 0, 0, 0, 1143, 243, 1, 0, 0, 0, 1144, 1147, 3, 242, 113, 0, 1145, 1147, 3, 182, 83, 0, 1146, 1144, 1, 0, 0, 0, 1146, 1145, 1, 0, 0, 0, 1147, 1148, 1, 0, 0, 0, 1148, 1146, 1, 0, 0, 0, 1148, 1149, 1, 0, 0, 0, 1149, 245, 1, 0, 0, 0, 1150, 1151, 3, 66, 25, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 115, 11, 0, 1153, 247, 1, 0, 0, 0, 1154, 1155, 3, 68, 26, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 6, 116, 11, 0, 1157, 249, 1, 0, 0, 0, 1158, 1159, 3, 70, 27, 0, 1159, 1160, 1, 0, 0, 0, 1160, 1161, 6, 117, 11, 0, 1161, 251, 1, 0, 0, 0, 1162, 1163, 3, 72, 28, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1165, 6, 118, 16, 0, 1165, 1166, 6, 118, 12, 0, 1166, 253, 1, 0, 0, 0, 1167, 1168, 3, 106, 45, 0, 1168, 1169, 1, 0, 0, 0, 1169, 1170, 6, 119, 20, 0, 1170, 255, 1, 0, 0, 0, 1171, 1172, 3, 112, 48, 0, 1172, 1173, 1, 0, 0, 0, 1173, 1174, 6, 120, 19, 0, 1174, 257, 1, 0, 0, 0, 1175, 1176, 3, 116, 50, 0, 1176, 1177, 1, 0, 0, 0, 1177, 1178, 6, 121, 23, 0, 1178, 259, 1, 0, 0, 0, 1179, 1180, 4, 122, 10, 0, 1180, 1181, 3, 140, 62, 0, 1181, 1182, 1, 0, 0, 0, 1182, 1183, 6, 122, 24, 0, 1183, 261, 1, 0, 0, 0, 1184, 1185, 4, 123, 11, 0, 1185, 1186, 3, 174, 79, 0, 1186, 1187, 1, 0, 0, 0, 1187, 1188, 6, 123, 25, 0, 1188, 263, 1, 0, 0, 0, 1189, 1190, 7, 12, 0, 0, 1190, 1191, 7, 2, 0, 0, 1191, 265, 1, 0, 0, 0, 1192, 1193, 3, 244, 114, 0, 1193, 1194, 1, 0, 0, 0, 1194, 1195, 6, 125, 26, 0, 1195, 267, 1, 0, 0, 0, 1196, 1197, 3, 66, 25, 0, 1197, 1198, 1, 0, 0, 0, 1198, 1199, 6, 126, 11, 0, 1199, 269, 1, 0, 0, 0, 1200, 1201, 3, 68, 26, 0, 1201, 1202, 1, 0, 0, 0, 1202, 1203, 6, 127, 11, 0, 1203, 271, 1, 0, 0, 0, 1204, 1205, 3, 70, 27, 0, 1205, 1206, 1, 0, 0, 0, 1206, 1207, 6, 128, 11, 0, 1207, 273, 1, 0, 0, 0, 1208, 1209, 3, 72, 28, 0, 1209, 1210, 1, 0, 0, 0, 1210, 1211, 6, 129, 16, 0, 1211, 1212, 6, 129, 12, 0, 1212, 275, 1, 0, 0, 0, 1213, 1214, 3, 176, 80, 0, 1214, 1215, 1, 0, 0, 0, 1215, 1216, 6, 130, 14, 0, 1216, 1217, 6, 130, 27, 0, 1217, 277, 1, 0, 0, 0, 1218, 1219, 7, 7, 0, 0, 1219, 1220, 7, 9, 0, 0, 1220, 1221, 1, 0, 0, 0, 1221, 1222, 6, 131, 28, 0, 1222, 279, 1, 0, 0, 0, 1223, 1224, 7, 19, 0, 0, 1224, 1225, 7, 1, 0, 0, 1225, 1226, 7, 5, 0, 0, 1226, 1227, 7, 10, 0, 0, 1227, 1228, 1, 0, 0, 0, 1228, 1229, 6, 132, 28, 0, 1229, 281, 1, 0, 0, 0, 1230, 1231, 8, 35, 0, 0, 1231, 283, 1, 0, 0, 0, 1232, 1234, 3, 282, 133, 0, 1233, 1232, 1, 0, 0, 0, 1234, 1235, 1, 0, 0, 0, 1235, 1233, 1, 0, 0, 0, 1235, 1236, 1, 0, 0, 0, 1236, 1237, 1, 0, 0, 0, 1237, 1238, 3, 110, 47, 0, 1238, 1240, 1, 0, 0, 0, 1239, 1233, 1, 0, 0, 0, 1239, 1240, 1, 0, 0, 0, 1240, 1242, 1, 0, 0, 0, 1241, 1243, 3, 282, 133, 0, 1242, 1241, 1, 0, 0, 0, 1243, 1244, 1, 0, 0, 0, 1244, 1242, 1, 0, 0, 0, 1244, 1245, 1, 0, 0, 0, 1245, 285, 1, 0, 0, 0, 1246, 1247, 3, 284, 134, 0, 1247, 1248, 1, 0, 0, 0, 1248, 1249, 6, 135, 29, 0, 1249, 287, 1, 0, 0, 0, 1250, 1251, 3, 66, 25, 0, 1251, 1252, 1, 0, 0, 0, 1252, 1253, 6, 136, 11, 0, 1253, 289, 1, 0, 0, 0, 1254, 1255, 3, 68, 26, 0, 1255, 1256, 1, 0, 0, 0, 1256, 1257, 6, 137, 11, 0, 1257, 291, 1, 0, 0, 0, 1258, 1259, 3, 70, 27, 0, 1259, 1260, 1, 0, 0, 0, 1260, 1261, 6, 138, 11, 0, 1261, 293, 1, 0, 0, 0, 1262, 1263, 3, 72, 28, 0, 1263, 1264, 1, 0, 0, 0, 1264, 1265, 6, 139, 16, 0, 1265, 1266, 6, 139, 12, 0, 1266, 1267, 6, 139, 12, 0, 1267, 295, 1, 0, 0, 0, 1268, 1269, 3, 106, 45, 0, 1269, 1270, 1, 0, 0, 0, 1270, 1271, 6, 140, 20, 0, 1271, 297, 1, 0, 0, 0, 1272, 1273, 3, 112, 48, 0, 1273, 1274, 1, 0, 0, 0, 1274, 1275, 6, 141, 19, 0, 1275, 299, 1, 0, 0, 0, 1276, 1277, 3, 116, 50, 0, 1277, 1278, 1, 0, 0, 0, 1278, 1279, 6, 142, 23, 0, 1279, 301, 1, 0, 0, 0, 1280, 1281, 3, 280, 132, 0, 1281, 1282, 1, 0, 0, 0, 1282, 1283, 6, 143, 30, 0, 1283, 303, 1, 0, 0, 0, 1284, 1285, 3, 244, 114, 0, 1285, 1286, 1, 0, 0, 0, 1286, 1287, 6, 144, 26, 0, 1287, 305, 1, 0, 0, 0, 1288, 1289, 3, 184, 84, 0, 1289, 1290, 1, 0, 0, 0, 1290, 1291, 6, 145, 31, 0, 1291, 307, 1, 0, 0, 0, 1292, 1293, 4, 146, 12, 0, 1293, 1294, 3, 140, 62, 0, 1294, 1295, 1, 0, 0, 0, 1295, 1296, 6, 146, 24, 0, 1296, 309, 1, 0, 0, 0, 1297, 1298, 4, 147, 13, 0, 1298, 1299, 3, 174, 79, 0, 1299, 1300, 1, 0, 0, 0, 1300, 1301, 6, 147, 25, 0, 1301, 311, 1, 0, 0, 0, 1302, 1303, 3, 66, 25, 0, 1303, 1304, 1, 0, 0, 0, 1304, 1305, 6, 148, 11, 0, 1305, 313, 1, 0, 0, 0, 1306, 1307, 3, 68, 26, 0, 1307, 1308, 1, 0, 0, 0, 1308, 1309, 6, 149, 11, 0, 1309, 315, 1, 0, 0, 0, 1310, 1311, 3, 70, 27, 0, 1311, 1312, 1, 0, 0, 0, 1312, 1313, 6, 150, 11, 0, 1313, 317, 1, 0, 0, 0, 1314, 1315, 3, 72, 28, 0, 1315, 1316, 1, 0, 0, 0, 1316, 1317, 6, 151, 16, 0, 1317, 1318, 6, 151, 12, 0, 1318, 319, 1, 0, 0, 0, 1319, 1320, 3, 116, 50, 0, 1320, 1321, 1, 0, 0, 0, 1321, 1322, 6, 152, 23, 0, 1322, 321, 1, 0, 0, 0, 1323, 1324, 4, 153, 14, 0, 1324, 1325, 3, 140, 62, 0, 1325, 1326, 1, 0, 0, 0, 1326, 1327, 6, 153, 24, 0, 1327, 323, 1, 0, 0, 0, 1328, 1329, 4, 154, 15, 0, 1329, 1330, 3, 174, 79, 0, 1330, 1331, 1, 0, 0, 0, 1331, 1332, 6, 154, 25, 0, 1332, 325, 1, 0, 0, 0, 1333, 1334, 3, 184, 84, 0, 1334, 1335, 1, 0, 0, 0, 1335, 1336, 6, 155, 31, 0, 1336, 327, 1, 0, 0, 0, 1337, 1338, 3, 180, 82, 0, 1338, 1339, 1, 0, 0, 0, 1339, 1340, 6, 156, 32, 0, 1340, 329, 1, 0, 0, 0, 1341, 1342, 3, 66, 25, 0, 1342, 1343, 1, 0, 0, 0, 1343, 1344, 6, 157, 11, 0, 1344, 331, 1, 0, 0, 0, 1345, 1346, 3, 68, 26, 0, 1346, 1347, 1, 0, 0, 0, 1347, 1348, 6, 158, 11, 0, 1348, 333, 1, 0, 0, 0, 1349, 1350, 3, 70, 27, 0, 1350, 1351, 1, 0, 0, 0, 1351, 1352, 6, 159, 11, 0, 1352, 335, 1, 0, 0, 0, 1353, 1354, 3, 72, 28, 0, 1354, 1355, 1, 0, 0, 0, 1355, 1356, 6, 160, 16, 0, 1356, 1357, 6, 160, 12, 0, 1357, 337, 1, 0, 0, 0, 1358, 1359, 7, 1, 0, 0, 1359, 1360, 7, 9, 0, 0, 1360, 1361, 7, 15, 0, 0, 1361, 1362, 7, 7, 0, 0, 1362, 339, 1, 0, 0, 0, 1363, 1364, 3, 66, 25, 0, 1364, 1365, 1, 0, 0, 0, 1365, 1366, 6, 162, 11, 0, 1366, 341, 1, 0, 0, 0, 1367, 1368, 3, 68, 26, 0, 1368, 1369, 1, 0, 0, 0, 1369, 1370, 6, 163, 11, 0, 1370, 343, 1, 0, 0, 0, 1371, 1372, 3, 70, 27, 0, 1372, 1373, 1, 0, 0, 0, 1373, 1374, 6, 164, 11, 0, 1374, 345, 1, 0, 0, 0, 1375, 1376, 3, 178, 81, 0, 1376, 1377, 1, 0, 0, 0, 1377, 1378, 6, 165, 17, 0, 1378, 1379, 6, 165, 12, 0, 1379, 347, 1, 0, 0, 0, 1380, 1381, 3, 110, 47, 0, 1381, 1382, 1, 0, 0, 0, 1382, 1383, 6, 166, 18, 0, 1383, 349, 1, 0, 0, 0, 1384, 1390, 3, 84, 34, 0, 1385, 1390, 3, 74, 29, 0, 1386, 1390, 3, 116, 50, 0, 1387, 1390, 3, 76, 30, 0, 1388, 1390, 3, 90, 37, 0, 1389, 1384, 1, 0, 0, 0, 1389, 1385, 1, 0, 0, 0, 1389, 1386, 1, 0, 0, 0, 1389, 1387, 1, 0, 0, 0, 1389, 1388, 1, 0, 0, 0, 1390, 1391, 1, 0, 0, 0, 1391, 1389, 1, 0, 0, 0, 1391, 1392, 1, 0, 0, 0, 1392, 351, 1, 0, 0, 0, 1393, 1394, 3, 66, 25, 0, 1394, 1395, 1, 0, 0, 0, 1395, 1396, 6, 168, 11, 0, 1396, 353, 1, 0, 0, 0, 1397, 1398, 3, 68, 26, 0, 1398, 1399, 1, 0, 0, 0, 1399, 1400, 6, 169, 11, 0, 1400, 355, 1, 0, 0, 0, 1401, 1402, 3, 70, 27, 0, 1402, 1403, 1, 0, 0, 0, 1403, 1404, 6, 170, 11, 0, 1404, 357, 1, 0, 0, 0, 1405, 1406, 3, 72, 28, 0, 1406, 1407, 1, 0, 0, 0, 1407, 1408, 6, 171, 16, 0, 1408, 1409, 6, 171, 12, 0, 1409, 359, 1, 0, 0, 0, 1410, 1411, 3, 110, 47, 0, 1411, 1412, 1, 0, 0, 0, 1412, 1413, 6, 172, 18, 0, 1413, 361, 1, 0, 0, 0, 1414, 1415, 3, 112, 48, 0, 1415, 1416, 1, 0, 0, 0, 1416, 1417, 6, 173, 19, 0, 1417, 363, 1, 0, 0, 0, 1418, 1419, 3, 116, 50, 0, 1419, 1420, 1, 0, 0, 0, 1420, 1421, 6, 174, 23, 0, 1421, 365, 1, 0, 0, 0, 1422, 1423, 3, 278, 131, 0, 1423, 1424, 1, 0, 0, 0, 1424, 1425, 6, 175, 33, 0, 1425, 1426, 6, 175, 34, 0, 1426, 367, 1, 0, 0, 0, 1427, 1428, 3, 218, 101, 0, 1428, 1429, 1, 0, 0, 0, 1429, 1430, 6, 176, 21, 0, 1430, 369, 1, 0, 0, 0, 1431, 1432, 3, 94, 39, 0, 1432, 1433, 1, 0, 0, 0, 1433, 1434, 6, 177, 22, 0, 1434, 371, 1, 0, 0, 0, 1435, 1436, 3, 66, 25, 0, 1436, 1437, 1, 0, 0, 0, 1437, 1438, 6, 178, 11, 0, 1438, 373, 1, 0, 0, 0, 1439, 1440, 3, 68, 26, 0, 1440, 1441, 1, 0, 0, 0, 1441, 1442, 6, 179, 11, 0, 1442, 375, 1, 0, 0, 0, 1443, 1444, 3, 70, 27, 0, 1444, 1445, 1, 0, 0, 0, 1445, 1446, 6, 180, 11, 0, 1446, 377, 1, 0, 0, 0, 1447, 1448, 3, 72, 28, 0, 1448, 1449, 1, 0, 0, 0, 1449, 1450, 6, 181, 16, 0, 1450, 1451, 6, 181, 12, 0, 1451, 1452, 6, 181, 12, 0, 1452, 379, 1, 0, 0, 0, 1453, 1454, 3, 112, 48, 0, 1454, 1455, 1, 0, 0, 0, 1455, 1456, 6, 182, 19, 0, 1456, 381, 1, 0, 0, 0, 1457, 1458, 3, 116, 50, 0, 1458, 1459, 1, 0, 0, 0, 1459, 1460, 6, 183, 23, 0, 1460, 383, 1, 0, 0, 0, 1461, 1462, 3, 244, 114, 0, 1462, 1463, 1, 0, 0, 0, 1463, 1464, 6, 184, 26, 0, 1464, 385, 1, 0, 0, 0, 1465, 1466, 3, 66, 25, 0, 1466, 1467, 1, 0, 0, 0, 1467, 1468, 6, 185, 11, 0, 1468, 387, 1, 0, 0, 0, 1469, 1470, 3, 68, 26, 0, 1470, 1471, 1, 0, 0, 0, 1471, 1472, 6, 186, 11, 0, 1472, 389, 1, 0, 0, 0, 1473, 1474, 3, 70, 27, 0, 1474, 1475, 1, 0, 0, 0, 1475, 1476, 6, 187, 11, 0, 1476, 391, 1, 0, 0, 0, 1477, 1478, 3, 72, 28, 0, 1478, 1479, 1, 0, 0, 0, 1479, 1480, 6, 188, 16, 0, 1480, 1481, 6, 188, 12, 0, 1481, 393, 1, 0, 0, 0, 1482, 1483, 3, 54, 19, 0, 1483, 1484, 1, 0, 0, 0, 1484, 1485, 6, 189, 35, 0, 1485, 395, 1, 0, 0, 0, 1486, 1487, 3, 264, 124, 0, 1487, 1488, 1, 0, 0, 0, 1488, 1489, 6, 190, 36, 0, 1489, 397, 1, 0, 0, 0, 1490, 1491, 3, 278, 131, 0, 1491, 1492, 1, 0, 0, 0, 1492, 1493, 6, 191, 33, 0, 1493, 1494, 6, 191, 12, 0, 1494, 1495, 6, 191, 0, 0, 1495, 399, 1, 0, 0, 0, 1496, 1497, 7, 20, 0, 0, 1497, 1498, 7, 2, 0, 0, 1498, 1499, 7, 1, 0, 0, 1499, 1500, 7, 9, 0, 0, 1500, 1501, 7, 17, 0, 0, 1501, 1502, 1, 0, 0, 0, 1502, 1503, 6, 192, 12, 0, 1503, 1504, 6, 192, 0, 0, 1504, 401, 1, 0, 0, 0, 1505, 1506, 3, 180, 82, 0, 1506, 1507, 1, 0, 0, 0, 1507, 1508, 6, 193, 32, 0, 1508, 403, 1, 0, 0, 0, 1509, 1510, 3, 184, 84, 0, 1510, 1511, 1, 0, 0, 0, 1511, 1512, 6, 194, 31, 0, 1512, 405, 1, 0, 0, 0, 1513, 1514, 3, 66, 25, 0, 1514, 1515, 1, 0, 0, 0, 1515, 1516, 6, 195, 11, 0, 1516, 407, 1, 0, 0, 0, 1517, 1518, 3, 68, 26, 0, 1518, 1519, 1, 0, 0, 0, 1519, 1520, 6, 196, 11, 0, 1520, 409, 1, 0, 0, 0, 1521, 1522, 3, 70, 27, 0, 1522, 1523, 1, 0, 0, 0, 1523, 1524, 6, 197, 11, 0, 1524, 411, 1, 0, 0, 0, 1525, 1526, 3, 72, 28, 0, 1526, 1527, 1, 0, 0, 0, 1527, 1528, 6, 198, 16, 0, 1528, 1529, 6, 198, 12, 0, 1529, 413, 1, 0, 0, 0, 1530, 1531, 3, 218, 101, 0, 1531, 1532, 1, 0, 0, 0, 1532, 1533, 6, 199, 21, 0, 1533, 1534, 6, 199, 12, 0, 1534, 1535, 6, 199, 37, 0, 1535, 415, 1, 0, 0, 0, 1536, 1537, 3, 94, 39, 0, 1537, 1538, 1, 0, 0, 0, 1538, 1539, 6, 200, 22, 0, 1539, 1540, 6, 200, 12, 0, 1540, 1541, 6, 200, 37, 0, 1541, 417, 1, 0, 0, 0, 1542, 1543, 3, 66, 25, 0, 1543, 1544, 1, 0, 0, 0, 1544, 1545, 6, 201, 11, 0, 1545, 419, 1, 0, 0, 0, 1546, 1547, 3, 68, 26, 0, 1547, 1548, 1, 0, 0, 0, 1548, 1549, 6, 202, 11, 0, 1549, 421, 1, 0, 0, 0, 1550, 1551, 3, 70, 27, 0, 1551, 1552, 1, 0, 0, 0, 1552, 1553, 6, 203, 11, 0, 1553, 423, 1, 0, 0, 0, 1554, 1555, 3, 110, 47, 0, 1555, 1556, 1, 0, 0, 0, 1556, 1557, 6, 204, 18, 0, 1557, 1558, 6, 204, 12, 0, 1558, 1559, 6, 204, 9, 0, 1559, 425, 1, 0, 0, 0, 1560, 1561, 3, 112, 48, 0, 1561, 1562, 1, 0, 0, 0, 1562, 1563, 6, 205, 19, 0, 1563, 1564, 6, 205, 12, 0, 1564, 1565, 6, 205, 9, 0, 1565, 427, 1, 0, 0, 0, 1566, 1567, 3, 66, 25, 0, 1567, 1568, 1, 0, 0, 0, 1568, 1569, 6, 206, 11, 0, 1569, 429, 1, 0, 0, 0, 1570, 1571, 3, 68, 26, 0, 1571, 1572, 1, 0, 0, 0, 1572, 1573, 6, 207, 11, 0, 1573, 431, 1, 0, 0, 0, 1574, 1575, 3, 70, 27, 0, 1575, 1576, 1, 0, 0, 0, 1576, 1577, 6, 208, 11, 0, 1577, 433, 1, 0, 0, 0, 1578, 1579, 3, 184, 84, 0, 1579, 1580, 1, 0, 0, 0, 1580, 1581, 6, 209, 12, 0, 1581, 1582, 6, 209, 0, 0, 1582, 1583, 6, 209, 31, 0, 1583, 435, 1, 0, 0, 0, 1584, 1585, 3, 180, 82, 0, 1585, 1586, 1, 0, 0, 0, 1586, 1587, 6, 210, 12, 0, 1587, 1588, 6, 210, 0, 0, 1588, 1589, 6, 210, 32, 0, 1589, 437, 1, 0, 0, 0, 1590, 1591, 3, 100, 42, 0, 1591, 1592, 1, 0, 0, 0, 1592, 1593, 6, 211, 12, 0, 1593, 1594, 6, 211, 0, 0, 1594, 1595, 6, 211, 38, 0, 1595, 439, 1, 0, 0, 0, 1596, 1597, 3, 72, 28, 0, 1597, 1598, 1, 0, 0, 0, 1598, 1599, 6, 212, 16, 0, 1599, 1600, 6, 212, 12, 0, 1600, 441, 1, 0, 0, 0, 66, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 655, 665, 669, 672, 681, 683, 694, 713, 718, 727, 734, 739, 741, 752, 760, 763, 765, 770, 775, 781, 788, 793, 799, 802, 810, 814, 942, 947, 954, 956, 972, 977, 982, 984, 990, 1067, 1072, 1121, 1125, 1130, 1135, 1140, 1142, 1146, 1148, 1235, 1239, 1244, 1389, 1391, 39, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 14, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 70, 0, 5, 0, 0, 7, 29, 0, 7, 71, 0, 7, 38, 0, 7, 39, 0, 7, 36, 0, 7, 81, 0, 7, 30, 0, 7, 41, 0, 7, 53, 0, 7, 69, 0, 7, 85, 0, 5, 10, 0, 5, 7, 0, 7, 95, 0, 7, 94, 0, 7, 73, 0, 7, 72, 0, 7, 93, 0, 5, 12, 0, 7, 20, 0, 7, 89, 0, 5, 15, 0, 7, 33, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index 915264f21910f..f04582e820e28 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -29,9 +29,9 @@ public class EsqlBaseLexer extends LexerConfig { LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, DEV_JOIN=20, DEV_JOIN_FULL=21, DEV_JOIN_LEFT=22, DEV_JOIN_RIGHT=23, DEV_JOIN_LOOKUP=24, - UNKNOWN_CMD=25, LINE_COMMENT=26, MULTILINE_COMMENT=27, WS=28, COLON=29, - PIPE=30, QUOTED_STRING=31, INTEGER_LITERAL=32, DECIMAL_LITERAL=33, BY=34, - AND=35, ASC=36, ASSIGN=37, CAST_OP=38, COMMA=39, DESC=40, DOT=41, FALSE=42, + UNKNOWN_CMD=25, LINE_COMMENT=26, MULTILINE_COMMENT=27, WS=28, PIPE=29, + QUOTED_STRING=30, INTEGER_LITERAL=31, DECIMAL_LITERAL=32, BY=33, AND=34, + ASC=35, ASSIGN=36, CAST_OP=37, COLON=38, COMMA=39, DESC=40, DOT=41, FALSE=42, FIRST=43, IN=44, IS=45, LAST=46, LIKE=47, LP=48, NOT=49, NULL=50, NULLS=51, OR=52, PARAM=53, RLIKE=54, RP=55, TRUE=56, EQ=57, CIEQ=58, NEQ=59, LT=60, LTE=61, GT=62, GTE=63, PLUS=64, MINUS=65, ASTERISK=66, SLASH=67, PERCENT=68, @@ -73,22 +73,22 @@ private static String[] makeRuleNames() { "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", "UNKNOWN_CMD", - "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "COLON", "PIPE", "DIGIT", - "LETTER", "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", - "BACKQUOTE", "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", + "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "DIGIT", "LETTER", + "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", + "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", - "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", - "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "EXPRESSION_COLON", "NESTED_WHERE", "NAMED_OR_POSITIONAL_PARAM", - "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", - "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", - "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", - "EXPLAIN_MULTILINE_COMMENT", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", - "FROM_COLON", "FROM_COMMA", "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", - "UNQUOTED_SOURCE", "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", - "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", - "PROJECT_PARAM", "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", + "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", + "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", + "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "SLASH", "PERCENT", "NESTED_WHERE", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", + "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", + "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_OPENING_BRACKET", + "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COLON", + "FROM_COMMA", "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", "UNQUOTED_SOURCE", + "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "PROJECT_PARAM", + "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", "RENAME_PARAM", "RENAME_NAMED_OR_POSITIONAL_PARAM", "AS", "RENAME_ID_PATTERN", @@ -125,8 +125,8 @@ private static String[] makeLiteralNames() { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, - null, null, null, null, null, "':'", "'|'", null, null, null, "'by'", - "'and'", "'asc'", "'='", "'::'", "','", "'desc'", "'.'", "'false'", "'first'", + null, null, null, null, null, "'|'", null, null, null, "'by'", "'and'", + "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, null, @@ -144,28 +144,28 @@ private static String[] makeSymbolicNames() { "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", - "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "COLON", "PIPE", - "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", - "ASSIGN", "CAST_OP", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", - "IS", "LAST", "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", - "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", - "MINUS", "ASTERISK", "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", - "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", - "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_WS", - "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "METADATA", "UNQUOTED_SOURCE", - "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", - "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", - "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", - "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", - "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", - "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", - "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", - "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", - "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", - "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", - "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_LINE_COMMENT", - "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", - "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" + "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", + "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", + "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", + "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", + "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", + "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", + "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", + "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", + "LOOKUP_FIELD_WS", "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", + "JOIN_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", + "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + "CLOSING_METRICS_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -246,23 +246,21 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return DEV_JOIN_RIGHT_sempred((RuleContext)_localctx, predIndex); case 23: return DEV_JOIN_LOOKUP_sempred((RuleContext)_localctx, predIndex); - case 78: - return EXPRESSION_COLON_sempred((RuleContext)_localctx, predIndex); - case 111: + case 110: return PROJECT_PARAM_sempred((RuleContext)_localctx, predIndex); - case 112: + case 111: return PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 123: + case 122: return RENAME_PARAM_sempred((RuleContext)_localctx, predIndex); - case 124: + case 123: return RENAME_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 147: + case 146: return ENRICH_FIELD_PARAM_sempred((RuleContext)_localctx, predIndex); - case 148: + case 147: return ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 154: + case 153: return MVEXPAND_PARAM_sempred((RuleContext)_localctx, predIndex); - case 155: + case 154: return MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); } return true; @@ -323,72 +321,65 @@ private boolean DEV_JOIN_LOOKUP_sempred(RuleContext _localctx, int predIndex) { } return true; } - private boolean EXPRESSION_COLON_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 8: - return this.isDevVersion(); - } - return true; - } private boolean PROJECT_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 9: + case 8: return this.isDevVersion(); } return true; } private boolean PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 10: + case 9: return this.isDevVersion(); } return true; } private boolean RENAME_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 11: + case 10: return this.isDevVersion(); } return true; } private boolean RENAME_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 12: + case 11: return this.isDevVersion(); } return true; } private boolean ENRICH_FIELD_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 13: + case 12: return this.isDevVersion(); } return true; } private boolean ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 14: + case 13: return this.isDevVersion(); } return true; } private boolean MVEXPAND_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 15: + case 14: return this.isDevVersion(); } return true; } private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 16: + case 15: return this.isDevVersion(); } return true; } public static final String _serializedATN = - "\u0004\u0000\u0080\u0648\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\u0004\u0000\u0080\u0641\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ @@ -450,991 +441,987 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u00cb\u0002\u00cc\u0007\u00cc\u0002\u00cd\u0007\u00cd\u0002\u00ce\u0007"+ "\u00ce\u0002\u00cf\u0007\u00cf\u0002\u00d0\u0007\u00d0\u0002\u00d1\u0007"+ "\u00d1\u0002\u00d2\u0007\u00d2\u0002\u00d3\u0007\u00d3\u0002\u00d4\u0007"+ - "\u00d4\u0002\u00d5\u0007\u00d5\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ - "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ - "\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u00d4\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001"+ "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b"+ - "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ - "\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ + "\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b"+ + "\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001"+ + "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001"+ "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ - "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ + "\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ - "\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001"+ "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ + "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001"+ "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ + "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001"+ "\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001"+ - "\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ - "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001"+ - "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001"+ + "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ + "\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001"+ + "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001"+ "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001"+ - "\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ - "\u0017\u0001\u0018\u0004\u0018\u0290\b\u0018\u000b\u0018\f\u0018\u0291"+ - "\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ - "\u0005\u0019\u029a\b\u0019\n\u0019\f\u0019\u029d\t\u0019\u0001\u0019\u0003"+ - "\u0019\u02a0\b\u0019\u0001\u0019\u0003\u0019\u02a3\b\u0019\u0001\u0019"+ - "\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ - "\u0005\u001a\u02ac\b\u001a\n\u001a\f\u001a\u02af\t\u001a\u0001\u001a\u0001"+ - "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0004\u001b\u02b7"+ - "\b\u001b\u000b\u001b\f\u001b\u02b8\u0001\u001b\u0001\u001b\u0001\u001c"+ - "\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001e"+ - "\u0001\u001e\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001 \u0001!\u0001"+ - "!\u0001\"\u0001\"\u0003\"\u02ce\b\"\u0001\"\u0004\"\u02d1\b\"\u000b\""+ - "\f\"\u02d2\u0001#\u0001#\u0001$\u0001$\u0001%\u0001%\u0001%\u0003%\u02dc"+ - "\b%\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0003\'\u02e3\b\'\u0001(\u0001"+ - "(\u0001(\u0005(\u02e8\b(\n(\f(\u02eb\t(\u0001(\u0001(\u0001(\u0001(\u0001"+ - "(\u0001(\u0005(\u02f3\b(\n(\f(\u02f6\t(\u0001(\u0001(\u0001(\u0001(\u0001"+ - "(\u0003(\u02fd\b(\u0001(\u0003(\u0300\b(\u0003(\u0302\b(\u0001)\u0004"+ - ")\u0305\b)\u000b)\f)\u0306\u0001*\u0004*\u030a\b*\u000b*\f*\u030b\u0001"+ - "*\u0001*\u0005*\u0310\b*\n*\f*\u0313\t*\u0001*\u0001*\u0004*\u0317\b*"+ - "\u000b*\f*\u0318\u0001*\u0004*\u031c\b*\u000b*\f*\u031d\u0001*\u0001*"+ - "\u0005*\u0322\b*\n*\f*\u0325\t*\u0003*\u0327\b*\u0001*\u0001*\u0001*\u0001"+ - "*\u0004*\u032d\b*\u000b*\f*\u032e\u0001*\u0001*\u0003*\u0333\b*\u0001"+ - "+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0001-\u0001-\u0001-\u0001"+ - "-\u0001.\u0001.\u0001/\u0001/\u0001/\u00010\u00010\u00011\u00011\u0001"+ - "1\u00011\u00011\u00012\u00012\u00013\u00013\u00013\u00013\u00013\u0001"+ - "3\u00014\u00014\u00014\u00014\u00014\u00014\u00015\u00015\u00015\u0001"+ - "6\u00016\u00016\u00017\u00017\u00017\u00017\u00017\u00018\u00018\u0001"+ - "8\u00018\u00018\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0001;\u0001"+ - ";\u0001;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001<\u0001"+ - "=\u0001=\u0001=\u0001>\u0001>\u0001?\u0001?\u0001?\u0001?\u0001?\u0001"+ - "?\u0001@\u0001@\u0001A\u0001A\u0001A\u0001A\u0001A\u0001B\u0001B\u0001"+ - "B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001E\u0001F\u0001"+ - "F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001H\u0001I\u0001I\u0001J\u0001"+ - "J\u0001K\u0001K\u0001L\u0001L\u0001M\u0001M\u0001N\u0001N\u0001N\u0001"+ - "N\u0001N\u0001O\u0001O\u0001O\u0001O\u0001P\u0001P\u0001P\u0003P\u03b6"+ - "\bP\u0001P\u0005P\u03b9\bP\nP\fP\u03bc\tP\u0001P\u0001P\u0004P\u03c0\b"+ - "P\u000bP\fP\u03c1\u0003P\u03c4\bP\u0001Q\u0001Q\u0001Q\u0001Q\u0001Q\u0001"+ - "R\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0005S\u03d2\bS\nS\fS\u03d5"+ - "\tS\u0001S\u0001S\u0003S\u03d9\bS\u0001S\u0004S\u03dc\bS\u000bS\fS\u03dd"+ - "\u0003S\u03e0\bS\u0001T\u0001T\u0004T\u03e4\bT\u000bT\fT\u03e5\u0001T"+ - "\u0001T\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001"+ - "W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001"+ - "Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001"+ - "\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001"+ - "^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001"+ - "`\u0001a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001"+ - "c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001d\u0001d\u0001d\u0001"+ - "d\u0001d\u0001e\u0001e\u0001e\u0003e\u0433\be\u0001f\u0004f\u0436\bf\u000b"+ - "f\ff\u0437\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001"+ + "\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0004"+ + "\u0018\u028e\b\u0018\u000b\u0018\f\u0018\u028f\u0001\u0018\u0001\u0018"+ + "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0005\u0019\u0298\b\u0019"+ + "\n\u0019\f\u0019\u029b\t\u0019\u0001\u0019\u0003\u0019\u029e\b\u0019\u0001"+ + "\u0019\u0003\u0019\u02a1\b\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001"+ + "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u02aa\b\u001a\n"+ + "\u001a\f\u001a\u02ad\t\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001"+ + "\u001a\u0001\u001a\u0001\u001b\u0004\u001b\u02b5\b\u001b\u000b\u001b\f"+ + "\u001b\u02b6\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001c"+ + "\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001f"+ + "\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001!\u0001!\u0003!\u02ca\b!\u0001"+ + "!\u0004!\u02cd\b!\u000b!\f!\u02ce\u0001\"\u0001\"\u0001#\u0001#\u0001"+ + "$\u0001$\u0001$\u0003$\u02d8\b$\u0001%\u0001%\u0001&\u0001&\u0001&\u0003"+ + "&\u02df\b&\u0001\'\u0001\'\u0001\'\u0005\'\u02e4\b\'\n\'\f\'\u02e7\t\'"+ + "\u0001\'\u0001\'\u0001\'\u0001\'\u0001\'\u0001\'\u0005\'\u02ef\b\'\n\'"+ + "\f\'\u02f2\t\'\u0001\'\u0001\'\u0001\'\u0001\'\u0001\'\u0003\'\u02f9\b"+ + "\'\u0001\'\u0003\'\u02fc\b\'\u0003\'\u02fe\b\'\u0001(\u0004(\u0301\b("+ + "\u000b(\f(\u0302\u0001)\u0004)\u0306\b)\u000b)\f)\u0307\u0001)\u0001)"+ + "\u0005)\u030c\b)\n)\f)\u030f\t)\u0001)\u0001)\u0004)\u0313\b)\u000b)\f"+ + ")\u0314\u0001)\u0004)\u0318\b)\u000b)\f)\u0319\u0001)\u0001)\u0005)\u031e"+ + "\b)\n)\f)\u0321\t)\u0003)\u0323\b)\u0001)\u0001)\u0001)\u0001)\u0004)"+ + "\u0329\b)\u000b)\f)\u032a\u0001)\u0001)\u0003)\u032f\b)\u0001*\u0001*"+ + "\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0001"+ + "-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u00011\u0001"+ + "1\u00011\u00011\u00011\u00012\u00012\u00013\u00013\u00013\u00013\u0001"+ + "3\u00013\u00014\u00014\u00014\u00014\u00014\u00014\u00015\u00015\u0001"+ + "5\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u00017\u00018\u0001"+ + "8\u00018\u00018\u00018\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0001"+ + ";\u0001;\u0001;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001"+ + "<\u0001=\u0001=\u0001=\u0001>\u0001>\u0001?\u0001?\u0001?\u0001?\u0001"+ + "?\u0001?\u0001@\u0001@\u0001A\u0001A\u0001A\u0001A\u0001A\u0001B\u0001"+ + "B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001E\u0001"+ + "F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001H\u0001I\u0001I\u0001"+ + "J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001M\u0001M\u0001N\u0001N\u0001"+ + "N\u0001N\u0001O\u0001O\u0001O\u0003O\u03af\bO\u0001O\u0005O\u03b2\bO\n"+ + "O\fO\u03b5\tO\u0001O\u0001O\u0004O\u03b9\bO\u000bO\fO\u03ba\u0003O\u03bd"+ + "\bO\u0001P\u0001P\u0001P\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001"+ + "Q\u0001R\u0001R\u0005R\u03cb\bR\nR\fR\u03ce\tR\u0001R\u0001R\u0003R\u03d2"+ + "\bR\u0001R\u0004R\u03d5\bR\u000bR\fR\u03d6\u0003R\u03d9\bR\u0001S\u0001"+ + "S\u0004S\u03dd\bS\u000bS\fS\u03de\u0001S\u0001S\u0001T\u0001T\u0001U\u0001"+ + "U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001"+ + "W\u0001X\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001"+ + "Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001"+ + "\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001"+ + "^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001"+ + "a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001"+ + "c\u0001c\u0001c\u0001c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0003"+ + "d\u042c\bd\u0001e\u0004e\u042f\be\u000be\fe\u0430\u0001f\u0001f\u0001"+ + "f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001"+ "i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001"+ - "k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001"+ - "m\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001o\u0001"+ - "p\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0003q\u0469"+ - "\bq\u0001r\u0001r\u0003r\u046d\br\u0001r\u0005r\u0470\br\nr\fr\u0473\t"+ - "r\u0001r\u0001r\u0003r\u0477\br\u0001r\u0004r\u047a\br\u000br\fr\u047b"+ - "\u0003r\u047e\br\u0001s\u0001s\u0004s\u0482\bs\u000bs\fs\u0483\u0001t"+ - "\u0001t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001"+ - "v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001"+ - "x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001"+ - "{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001|\u0001}\u0001"+ - "}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f"+ - "\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081"+ - "\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082"+ - "\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083"+ - "\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084"+ - "\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085"+ - "\u0001\u0085\u0001\u0086\u0001\u0086\u0001\u0087\u0004\u0087\u04d9\b\u0087"+ - "\u000b\u0087\f\u0087\u04da\u0001\u0087\u0001\u0087\u0003\u0087\u04df\b"+ - "\u0087\u0001\u0087\u0004\u0087\u04e2\b\u0087\u000b\u0087\f\u0087\u04e3"+ - "\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089"+ - "\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a"+ - "\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008c\u0001\u008c"+ - "\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d"+ - "\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e"+ - "\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090"+ - "\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091"+ - "\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093"+ - "\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094"+ - "\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095"+ - "\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097"+ - "\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0098"+ - "\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u009a"+ - "\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b"+ - "\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c"+ - "\u0001\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e"+ - "\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009f\u0001\u009f\u0001\u009f"+ - "\u0001\u009f\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a1"+ - "\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a2\u0001\u00a2"+ - "\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3"+ - "\u0001\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5"+ - "\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001\u00a6"+ - "\u0001\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7"+ - "\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0004\u00a8"+ - "\u0575\b\u00a8\u000b\u00a8\f\u00a8\u0576\u0001\u00a9\u0001\u00a9\u0001"+ - "\u00a9\u0001\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001"+ - "\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac\u0001"+ - "\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001"+ - "\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001"+ - "\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001"+ - "\u00b0\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001"+ - "\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001\u00b3\u0001"+ - "\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001"+ - "\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6\u0001\u00b6\u0001"+ - "\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001"+ - "\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001"+ - "\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001"+ - "\u00ba\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001"+ - "\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd\u0001"+ - "\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001\u00be\u0001"+ - "\u00be\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00c0\u0001"+ - "\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001"+ - "\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001"+ - "\u00c1\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001"+ - "\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4\u0001"+ - "\u00c4\u0001\u00c4\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001"+ - "\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c7\u0001\u00c7\u0001"+ - "\u00c7\u0001\u00c7\u0001\u00c7\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001"+ - "\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c9\u0001\u00c9\u0001\u00c9\u0001"+ - "\u00c9\u0001\u00c9\u0001\u00c9\u0001\u00ca\u0001\u00ca\u0001\u00ca\u0001"+ - "\u00ca\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cc\u0001"+ - "\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001"+ - "\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00ce\u0001\u00ce\u0001\u00ce\u0001"+ - "\u00ce\u0001\u00ce\u0001\u00ce\u0001\u00cf\u0001\u00cf\u0001\u00cf\u0001"+ - "\u00cf\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d1\u0001"+ - "\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001"+ - "\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001"+ - "\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d4\u0001\u00d4\u0001\u00d4\u0001"+ - "\u00d4\u0001\u00d4\u0001\u00d4\u0001\u00d5\u0001\u00d5\u0001\u00d5\u0001"+ - "\u00d5\u0001\u00d5\u0002\u02ad\u02f4\u0000\u00d6\u0010\u0001\u0012\u0002"+ - "\u0014\u0003\u0016\u0004\u0018\u0005\u001a\u0006\u001c\u0007\u001e\b "+ - "\t\"\n$\u000b&\f(\r*\u000e,\u000f.\u00100\u00112\u00124\u00136\u00148"+ - "\u0015:\u0016<\u0017>\u0018@\u0019B\u001aD\u001bF\u001cH\u001dJ\u001e"+ - "L\u0000N\u0000P\u0000R\u0000T\u0000V\u0000X\u0000Z\u0000\\\u0000^\u0000"+ - "`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+z,|-~.\u0080/\u00820\u00841\u00862\u0088"+ - "3\u008a4\u008c5\u008e6\u00907\u00928\u00949\u0096:\u0098;\u009a<\u009c"+ - "=\u009e>\u00a0?\u00a2@\u00a4A\u00a6B\u00a8C\u00aaD\u00ac\u0000\u00ae\u0000"+ - "\u00b0E\u00b2F\u00b4G\u00b6H\u00b8\u0000\u00baI\u00bcJ\u00beK\u00c0L\u00c2"+ - "\u0000\u00c4\u0000\u00c6M\u00c8N\u00caO\u00cc\u0000\u00ce\u0000\u00d0"+ - "\u0000\u00d2\u0000\u00d4\u0000\u00d6\u0000\u00d8P\u00da\u0000\u00dcQ\u00de"+ - "\u0000\u00e0\u0000\u00e2R\u00e4S\u00e6T\u00e8\u0000\u00ea\u0000\u00ec"+ - "\u0000\u00ee\u0000\u00f0\u0000\u00f2\u0000\u00f4\u0000\u00f6U\u00f8V\u00fa"+ - "W\u00fcX\u00fe\u0000\u0100\u0000\u0102\u0000\u0104\u0000\u0106\u0000\u0108"+ - "\u0000\u010aY\u010c\u0000\u010eZ\u0110[\u0112\\\u0114\u0000\u0116\u0000"+ - "\u0118]\u011a^\u011c\u0000\u011e_\u0120\u0000\u0122`\u0124a\u0126b\u0128"+ - "\u0000\u012a\u0000\u012c\u0000\u012e\u0000\u0130\u0000\u0132\u0000\u0134"+ - "\u0000\u0136\u0000\u0138\u0000\u013ac\u013cd\u013ee\u0140\u0000\u0142"+ - "\u0000\u0144\u0000\u0146\u0000\u0148\u0000\u014a\u0000\u014cf\u014eg\u0150"+ - "h\u0152\u0000\u0154i\u0156j\u0158k\u015al\u015c\u0000\u015e\u0000\u0160"+ - "m\u0162n\u0164o\u0166p\u0168\u0000\u016a\u0000\u016c\u0000\u016e\u0000"+ - "\u0170\u0000\u0172\u0000\u0174\u0000\u0176q\u0178r\u017as\u017c\u0000"+ - "\u017e\u0000\u0180\u0000\u0182\u0000\u0184t\u0186u\u0188v\u018a\u0000"+ - "\u018c\u0000\u018e\u0000\u0190\u0000\u0192w\u0194\u0000\u0196\u0000\u0198"+ - "x\u019ay\u019cz\u019e\u0000\u01a0\u0000\u01a2\u0000\u01a4{\u01a6|\u01a8"+ - "}\u01aa\u0000\u01ac\u0000\u01ae~\u01b0\u007f\u01b2\u0080\u01b4\u0000\u01b6"+ - "\u0000\u01b8\u0000\u01ba\u0000\u0010\u0000\u0001\u0002\u0003\u0004\u0005"+ - "\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f$\u0002\u0000DDdd\u0002\u0000"+ - "IIii\u0002\u0000SSss\u0002\u0000EEee\u0002\u0000CCcc\u0002\u0000TTtt\u0002"+ - "\u0000RRrr\u0002\u0000OOoo\u0002\u0000PPpp\u0002\u0000NNnn\u0002\u0000"+ - "HHhh\u0002\u0000VVvv\u0002\u0000AAaa\u0002\u0000LLll\u0002\u0000XXxx\u0002"+ - "\u0000FFff\u0002\u0000MMmm\u0002\u0000GGgg\u0002\u0000KKkk\u0002\u0000"+ - "WWww\u0002\u0000UUuu\u0002\u0000JJjj\u0006\u0000\t\n\r\r //[[]]\u0002"+ - "\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002\u0000AZaz\b\u0000"+ - "\"\"NNRRTT\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001"+ - "\u0000``\u0002\u0000BBbb\u0002\u0000YYyy\u000b\u0000\t\n\r\r \"\",,/"+ - "/::==[[]]||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u0663"+ - "\u0000\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001\u0000\u0000\u0000"+ - "\u0000\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001\u0000\u0000\u0000"+ - "\u0000\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001\u0000\u0000\u0000"+ - "\u0000\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001\u0000\u0000\u0000"+ - "\u0000 \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000\u0000\u0000\u0000"+ - "$\u0001\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000\u0000(\u0001"+ - "\u0000\u0000\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,\u0001\u0000\u0000"+ - "\u0000\u0000.\u0001\u0000\u0000\u0000\u00000\u0001\u0000\u0000\u0000\u0000"+ - "2\u0001\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000\u00006\u0001"+ - "\u0000\u0000\u0000\u00008\u0001\u0000\u0000\u0000\u0000:\u0001\u0000\u0000"+ - "\u0000\u0000<\u0001\u0000\u0000\u0000\u0000>\u0001\u0000\u0000\u0000\u0000"+ - "@\u0001\u0000\u0000\u0000\u0000B\u0001\u0000\u0000\u0000\u0000D\u0001"+ - "\u0000\u0000\u0000\u0000F\u0001\u0000\u0000\u0000\u0000H\u0001\u0000\u0000"+ - "\u0000\u0001J\u0001\u0000\u0000\u0000\u0001`\u0001\u0000\u0000\u0000\u0001"+ - "b\u0001\u0000\u0000\u0000\u0001d\u0001\u0000\u0000\u0000\u0001f\u0001"+ - "\u0000\u0000\u0000\u0001h\u0001\u0000\u0000\u0000\u0001j\u0001\u0000\u0000"+ - "\u0000\u0001l\u0001\u0000\u0000\u0000\u0001n\u0001\u0000\u0000\u0000\u0001"+ - "p\u0001\u0000\u0000\u0000\u0001r\u0001\u0000\u0000\u0000\u0001t\u0001"+ - "\u0000\u0000\u0000\u0001v\u0001\u0000\u0000\u0000\u0001x\u0001\u0000\u0000"+ - "\u0000\u0001z\u0001\u0000\u0000\u0000\u0001|\u0001\u0000\u0000\u0000\u0001"+ - "~\u0001\u0000\u0000\u0000\u0001\u0080\u0001\u0000\u0000\u0000\u0001\u0082"+ - "\u0001\u0000\u0000\u0000\u0001\u0084\u0001\u0000\u0000\u0000\u0001\u0086"+ - "\u0001\u0000\u0000\u0000\u0001\u0088\u0001\u0000\u0000\u0000\u0001\u008a"+ - "\u0001\u0000\u0000\u0000\u0001\u008c\u0001\u0000\u0000\u0000\u0001\u008e"+ - "\u0001\u0000\u0000\u0000\u0001\u0090\u0001\u0000\u0000\u0000\u0001\u0092"+ - "\u0001\u0000\u0000\u0000\u0001\u0094\u0001\u0000\u0000\u0000\u0001\u0096"+ - "\u0001\u0000\u0000\u0000\u0001\u0098\u0001\u0000\u0000\u0000\u0001\u009a"+ - "\u0001\u0000\u0000\u0000\u0001\u009c\u0001\u0000\u0000\u0000\u0001\u009e"+ - "\u0001\u0000\u0000\u0000\u0001\u00a0\u0001\u0000\u0000\u0000\u0001\u00a2"+ - "\u0001\u0000\u0000\u0000\u0001\u00a4\u0001\u0000\u0000\u0000\u0001\u00a6"+ - "\u0001\u0000\u0000\u0000\u0001\u00a8\u0001\u0000\u0000\u0000\u0001\u00aa"+ - "\u0001\u0000\u0000\u0000\u0001\u00ac\u0001\u0000\u0000\u0000\u0001\u00ae"+ - "\u0001\u0000\u0000\u0000\u0001\u00b0\u0001\u0000\u0000\u0000\u0001\u00b2"+ - "\u0001\u0000\u0000\u0000\u0001\u00b4\u0001\u0000\u0000\u0000\u0001\u00b6"+ - "\u0001\u0000\u0000\u0000\u0001\u00ba\u0001\u0000\u0000\u0000\u0001\u00bc"+ - "\u0001\u0000\u0000\u0000\u0001\u00be\u0001\u0000\u0000\u0000\u0001\u00c0"+ - "\u0001\u0000\u0000\u0000\u0002\u00c2\u0001\u0000\u0000\u0000\u0002\u00c4"+ - "\u0001\u0000\u0000\u0000\u0002\u00c6\u0001\u0000\u0000\u0000\u0002\u00c8"+ - "\u0001\u0000\u0000\u0000\u0002\u00ca\u0001\u0000\u0000\u0000\u0003\u00cc"+ - "\u0001\u0000\u0000\u0000\u0003\u00ce\u0001\u0000\u0000\u0000\u0003\u00d0"+ - "\u0001\u0000\u0000\u0000\u0003\u00d2\u0001\u0000\u0000\u0000\u0003\u00d4"+ - "\u0001\u0000\u0000\u0000\u0003\u00d6\u0001\u0000\u0000\u0000\u0003\u00d8"+ - "\u0001\u0000\u0000\u0000\u0003\u00dc\u0001\u0000\u0000\u0000\u0003\u00de"+ - "\u0001\u0000\u0000\u0000\u0003\u00e0\u0001\u0000\u0000\u0000\u0003\u00e2"+ - "\u0001\u0000\u0000\u0000\u0003\u00e4\u0001\u0000\u0000\u0000\u0003\u00e6"+ - "\u0001\u0000\u0000\u0000\u0004\u00e8\u0001\u0000\u0000\u0000\u0004\u00ea"+ - "\u0001\u0000\u0000\u0000\u0004\u00ec\u0001\u0000\u0000\u0000\u0004\u00ee"+ - "\u0001\u0000\u0000\u0000\u0004\u00f0\u0001\u0000\u0000\u0000\u0004\u00f6"+ - "\u0001\u0000\u0000\u0000\u0004\u00f8\u0001\u0000\u0000\u0000\u0004\u00fa"+ - "\u0001\u0000\u0000\u0000\u0004\u00fc\u0001\u0000\u0000\u0000\u0005\u00fe"+ - "\u0001\u0000\u0000\u0000\u0005\u0100\u0001\u0000\u0000\u0000\u0005\u0102"+ - "\u0001\u0000\u0000\u0000\u0005\u0104\u0001\u0000\u0000\u0000\u0005\u0106"+ - "\u0001\u0000\u0000\u0000\u0005\u0108\u0001\u0000\u0000\u0000\u0005\u010a"+ - "\u0001\u0000\u0000\u0000\u0005\u010c\u0001\u0000\u0000\u0000\u0005\u010e"+ - "\u0001\u0000\u0000\u0000\u0005\u0110\u0001\u0000\u0000\u0000\u0005\u0112"+ - "\u0001\u0000\u0000\u0000\u0006\u0114\u0001\u0000\u0000\u0000\u0006\u0116"+ - "\u0001\u0000\u0000\u0000\u0006\u0118\u0001\u0000\u0000\u0000\u0006\u011a"+ - "\u0001\u0000\u0000\u0000\u0006\u011e\u0001\u0000\u0000\u0000\u0006\u0120"+ - "\u0001\u0000\u0000\u0000\u0006\u0122\u0001\u0000\u0000\u0000\u0006\u0124"+ - "\u0001\u0000\u0000\u0000\u0006\u0126\u0001\u0000\u0000\u0000\u0007\u0128"+ - "\u0001\u0000\u0000\u0000\u0007\u012a\u0001\u0000\u0000\u0000\u0007\u012c"+ - "\u0001\u0000\u0000\u0000\u0007\u012e\u0001\u0000\u0000\u0000\u0007\u0130"+ - "\u0001\u0000\u0000\u0000\u0007\u0132\u0001\u0000\u0000\u0000\u0007\u0134"+ - "\u0001\u0000\u0000\u0000\u0007\u0136\u0001\u0000\u0000\u0000\u0007\u0138"+ - "\u0001\u0000\u0000\u0000\u0007\u013a\u0001\u0000\u0000\u0000\u0007\u013c"+ - "\u0001\u0000\u0000\u0000\u0007\u013e\u0001\u0000\u0000\u0000\b\u0140\u0001"+ - "\u0000\u0000\u0000\b\u0142\u0001\u0000\u0000\u0000\b\u0144\u0001\u0000"+ - "\u0000\u0000\b\u0146\u0001\u0000\u0000\u0000\b\u0148\u0001\u0000\u0000"+ - "\u0000\b\u014a\u0001\u0000\u0000\u0000\b\u014c\u0001\u0000\u0000\u0000"+ - "\b\u014e\u0001\u0000\u0000\u0000\b\u0150\u0001\u0000\u0000\u0000\t\u0152"+ - "\u0001\u0000\u0000\u0000\t\u0154\u0001\u0000\u0000\u0000\t\u0156\u0001"+ - "\u0000\u0000\u0000\t\u0158\u0001\u0000\u0000\u0000\t\u015a\u0001\u0000"+ - "\u0000\u0000\n\u015c\u0001\u0000\u0000\u0000\n\u015e\u0001\u0000\u0000"+ - "\u0000\n\u0160\u0001\u0000\u0000\u0000\n\u0162\u0001\u0000\u0000\u0000"+ - "\n\u0164\u0001\u0000\u0000\u0000\n\u0166\u0001\u0000\u0000\u0000\u000b"+ - "\u0168\u0001\u0000\u0000\u0000\u000b\u016a\u0001\u0000\u0000\u0000\u000b"+ - "\u016c\u0001\u0000\u0000\u0000\u000b\u016e\u0001\u0000\u0000\u0000\u000b"+ - "\u0170\u0001\u0000\u0000\u0000\u000b\u0172\u0001\u0000\u0000\u0000\u000b"+ - "\u0174\u0001\u0000\u0000\u0000\u000b\u0176\u0001\u0000\u0000\u0000\u000b"+ - "\u0178\u0001\u0000\u0000\u0000\u000b\u017a\u0001\u0000\u0000\u0000\f\u017c"+ - "\u0001\u0000\u0000\u0000\f\u017e\u0001\u0000\u0000\u0000\f\u0180\u0001"+ - "\u0000\u0000\u0000\f\u0182\u0001\u0000\u0000\u0000\f\u0184\u0001\u0000"+ - "\u0000\u0000\f\u0186\u0001\u0000\u0000\u0000\f\u0188\u0001\u0000\u0000"+ - "\u0000\r\u018a\u0001\u0000\u0000\u0000\r\u018c\u0001\u0000\u0000\u0000"+ - "\r\u018e\u0001\u0000\u0000\u0000\r\u0190\u0001\u0000\u0000\u0000\r\u0192"+ - "\u0001\u0000\u0000\u0000\r\u0194\u0001\u0000\u0000\u0000\r\u0196\u0001"+ - "\u0000\u0000\u0000\r\u0198\u0001\u0000\u0000\u0000\r\u019a\u0001\u0000"+ - "\u0000\u0000\r\u019c\u0001\u0000\u0000\u0000\u000e\u019e\u0001\u0000\u0000"+ - "\u0000\u000e\u01a0\u0001\u0000\u0000\u0000\u000e\u01a2\u0001\u0000\u0000"+ - "\u0000\u000e\u01a4\u0001\u0000\u0000\u0000\u000e\u01a6\u0001\u0000\u0000"+ - "\u0000\u000e\u01a8\u0001\u0000\u0000\u0000\u000f\u01aa\u0001\u0000\u0000"+ - "\u0000\u000f\u01ac\u0001\u0000\u0000\u0000\u000f\u01ae\u0001\u0000\u0000"+ - "\u0000\u000f\u01b0\u0001\u0000\u0000\u0000\u000f\u01b2\u0001\u0000\u0000"+ - "\u0000\u000f\u01b4\u0001\u0000\u0000\u0000\u000f\u01b6\u0001\u0000\u0000"+ - "\u0000\u000f\u01b8\u0001\u0000\u0000\u0000\u000f\u01ba\u0001\u0000\u0000"+ - "\u0000\u0010\u01bc\u0001\u0000\u0000\u0000\u0012\u01c6\u0001\u0000\u0000"+ - "\u0000\u0014\u01cd\u0001\u0000\u0000\u0000\u0016\u01d6\u0001\u0000\u0000"+ - "\u0000\u0018\u01dd\u0001\u0000\u0000\u0000\u001a\u01e7\u0001\u0000\u0000"+ - "\u0000\u001c\u01ee\u0001\u0000\u0000\u0000\u001e\u01f5\u0001\u0000\u0000"+ - "\u0000 \u01fc\u0001\u0000\u0000\u0000\"\u0204\u0001\u0000\u0000\u0000"+ - "$\u0210\u0001\u0000\u0000\u0000&\u0219\u0001\u0000\u0000\u0000(\u021f"+ - "\u0001\u0000\u0000\u0000*\u0226\u0001\u0000\u0000\u0000,\u022d\u0001\u0000"+ - "\u0000\u0000.\u0235\u0001\u0000\u0000\u00000\u023d\u0001\u0000\u0000\u0000"+ - "2\u024c\u0001\u0000\u0000\u00004\u0258\u0001\u0000\u0000\u00006\u0263"+ - "\u0001\u0000\u0000\u00008\u026b\u0001\u0000\u0000\u0000:\u0273\u0001\u0000"+ - "\u0000\u0000<\u027b\u0001\u0000\u0000\u0000>\u0284\u0001\u0000\u0000\u0000"+ - "@\u028f\u0001\u0000\u0000\u0000B\u0295\u0001\u0000\u0000\u0000D\u02a6"+ - "\u0001\u0000\u0000\u0000F\u02b6\u0001\u0000\u0000\u0000H\u02bc\u0001\u0000"+ - "\u0000\u0000J\u02be\u0001\u0000\u0000\u0000L\u02c2\u0001\u0000\u0000\u0000"+ - "N\u02c4\u0001\u0000\u0000\u0000P\u02c6\u0001\u0000\u0000\u0000R\u02c9"+ - "\u0001\u0000\u0000\u0000T\u02cb\u0001\u0000\u0000\u0000V\u02d4\u0001\u0000"+ - "\u0000\u0000X\u02d6\u0001\u0000\u0000\u0000Z\u02db\u0001\u0000\u0000\u0000"+ - "\\\u02dd\u0001\u0000\u0000\u0000^\u02e2\u0001\u0000\u0000\u0000`\u0301"+ - "\u0001\u0000\u0000\u0000b\u0304\u0001\u0000\u0000\u0000d\u0332\u0001\u0000"+ - "\u0000\u0000f\u0334\u0001\u0000\u0000\u0000h\u0337\u0001\u0000\u0000\u0000"+ - "j\u033b\u0001\u0000\u0000\u0000l\u033f\u0001\u0000\u0000\u0000n\u0341"+ - "\u0001\u0000\u0000\u0000p\u0344\u0001\u0000\u0000\u0000r\u0346\u0001\u0000"+ - "\u0000\u0000t\u034b\u0001\u0000\u0000\u0000v\u034d\u0001\u0000\u0000\u0000"+ - "x\u0353\u0001\u0000\u0000\u0000z\u0359\u0001\u0000\u0000\u0000|\u035c"+ - "\u0001\u0000\u0000\u0000~\u035f\u0001\u0000\u0000\u0000\u0080\u0364\u0001"+ - "\u0000\u0000\u0000\u0082\u0369\u0001\u0000\u0000\u0000\u0084\u036b\u0001"+ - "\u0000\u0000\u0000\u0086\u036f\u0001\u0000\u0000\u0000\u0088\u0374\u0001"+ - "\u0000\u0000\u0000\u008a\u037a\u0001\u0000\u0000\u0000\u008c\u037d\u0001"+ - "\u0000\u0000\u0000\u008e\u037f\u0001\u0000\u0000\u0000\u0090\u0385\u0001"+ - "\u0000\u0000\u0000\u0092\u0387\u0001\u0000\u0000\u0000\u0094\u038c\u0001"+ - "\u0000\u0000\u0000\u0096\u038f\u0001\u0000\u0000\u0000\u0098\u0392\u0001"+ - "\u0000\u0000\u0000\u009a\u0395\u0001\u0000\u0000\u0000\u009c\u0397\u0001"+ - "\u0000\u0000\u0000\u009e\u039a\u0001\u0000\u0000\u0000\u00a0\u039c\u0001"+ - "\u0000\u0000\u0000\u00a2\u039f\u0001\u0000\u0000\u0000\u00a4\u03a1\u0001"+ - "\u0000\u0000\u0000\u00a6\u03a3\u0001\u0000\u0000\u0000\u00a8\u03a5\u0001"+ - "\u0000\u0000\u0000\u00aa\u03a7\u0001\u0000\u0000\u0000\u00ac\u03a9\u0001"+ - "\u0000\u0000\u0000\u00ae\u03ae\u0001\u0000\u0000\u0000\u00b0\u03c3\u0001"+ - "\u0000\u0000\u0000\u00b2\u03c5\u0001\u0000\u0000\u0000\u00b4\u03ca\u0001"+ - "\u0000\u0000\u0000\u00b6\u03df\u0001\u0000\u0000\u0000\u00b8\u03e1\u0001"+ - "\u0000\u0000\u0000\u00ba\u03e9\u0001\u0000\u0000\u0000\u00bc\u03eb\u0001"+ - "\u0000\u0000\u0000\u00be\u03ef\u0001\u0000\u0000\u0000\u00c0\u03f3\u0001"+ - "\u0000\u0000\u0000\u00c2\u03f7\u0001\u0000\u0000\u0000\u00c4\u03fc\u0001"+ - "\u0000\u0000\u0000\u00c6\u0401\u0001\u0000\u0000\u0000\u00c8\u0405\u0001"+ - "\u0000\u0000\u0000\u00ca\u0409\u0001\u0000\u0000\u0000\u00cc\u040d\u0001"+ - "\u0000\u0000\u0000\u00ce\u0412\u0001\u0000\u0000\u0000\u00d0\u0416\u0001"+ - "\u0000\u0000\u0000\u00d2\u041a\u0001\u0000\u0000\u0000\u00d4\u041e\u0001"+ - "\u0000\u0000\u0000\u00d6\u0422\u0001\u0000\u0000\u0000\u00d8\u0426\u0001"+ - "\u0000\u0000\u0000\u00da\u0432\u0001\u0000\u0000\u0000\u00dc\u0435\u0001"+ - "\u0000\u0000\u0000\u00de\u0439\u0001\u0000\u0000\u0000\u00e0\u043d\u0001"+ - "\u0000\u0000\u0000\u00e2\u0441\u0001\u0000\u0000\u0000\u00e4\u0445\u0001"+ - "\u0000\u0000\u0000\u00e6\u0449\u0001\u0000\u0000\u0000\u00e8\u044d\u0001"+ - "\u0000\u0000\u0000\u00ea\u0452\u0001\u0000\u0000\u0000\u00ec\u0456\u0001"+ - "\u0000\u0000\u0000\u00ee\u045a\u0001\u0000\u0000\u0000\u00f0\u045f\u0001"+ - "\u0000\u0000\u0000\u00f2\u0468\u0001\u0000\u0000\u0000\u00f4\u047d\u0001"+ - "\u0000\u0000\u0000\u00f6\u0481\u0001\u0000\u0000\u0000\u00f8\u0485\u0001"+ - "\u0000\u0000\u0000\u00fa\u0489\u0001\u0000\u0000\u0000\u00fc\u048d\u0001"+ - "\u0000\u0000\u0000\u00fe\u0491\u0001\u0000\u0000\u0000\u0100\u0496\u0001"+ - "\u0000\u0000\u0000\u0102\u049a\u0001\u0000\u0000\u0000\u0104\u049e\u0001"+ - "\u0000\u0000\u0000\u0106\u04a2\u0001\u0000\u0000\u0000\u0108\u04a7\u0001"+ - "\u0000\u0000\u0000\u010a\u04ac\u0001\u0000\u0000\u0000\u010c\u04af\u0001"+ - "\u0000\u0000\u0000\u010e\u04b3\u0001\u0000\u0000\u0000\u0110\u04b7\u0001"+ - "\u0000\u0000\u0000\u0112\u04bb\u0001\u0000\u0000\u0000\u0114\u04bf\u0001"+ - "\u0000\u0000\u0000\u0116\u04c4\u0001\u0000\u0000\u0000\u0118\u04c9\u0001"+ - "\u0000\u0000\u0000\u011a\u04ce\u0001\u0000\u0000\u0000\u011c\u04d5\u0001"+ - "\u0000\u0000\u0000\u011e\u04de\u0001\u0000\u0000\u0000\u0120\u04e5\u0001"+ - "\u0000\u0000\u0000\u0122\u04e9\u0001\u0000\u0000\u0000\u0124\u04ed\u0001"+ - "\u0000\u0000\u0000\u0126\u04f1\u0001\u0000\u0000\u0000\u0128\u04f5\u0001"+ - "\u0000\u0000\u0000\u012a\u04fb\u0001\u0000\u0000\u0000\u012c\u04ff\u0001"+ - "\u0000\u0000\u0000\u012e\u0503\u0001\u0000\u0000\u0000\u0130\u0507\u0001"+ - "\u0000\u0000\u0000\u0132\u050b\u0001\u0000\u0000\u0000\u0134\u050f\u0001"+ - "\u0000\u0000\u0000\u0136\u0513\u0001\u0000\u0000\u0000\u0138\u0518\u0001"+ - "\u0000\u0000\u0000\u013a\u051d\u0001\u0000\u0000\u0000\u013c\u0521\u0001"+ - "\u0000\u0000\u0000\u013e\u0525\u0001\u0000\u0000\u0000\u0140\u0529\u0001"+ - "\u0000\u0000\u0000\u0142\u052e\u0001\u0000\u0000\u0000\u0144\u0532\u0001"+ - "\u0000\u0000\u0000\u0146\u0537\u0001\u0000\u0000\u0000\u0148\u053c\u0001"+ - "\u0000\u0000\u0000\u014a\u0540\u0001\u0000\u0000\u0000\u014c\u0544\u0001"+ - "\u0000\u0000\u0000\u014e\u0548\u0001\u0000\u0000\u0000\u0150\u054c\u0001"+ - "\u0000\u0000\u0000\u0152\u0550\u0001\u0000\u0000\u0000\u0154\u0555\u0001"+ - "\u0000\u0000\u0000\u0156\u055a\u0001\u0000\u0000\u0000\u0158\u055e\u0001"+ - "\u0000\u0000\u0000\u015a\u0562\u0001\u0000\u0000\u0000\u015c\u0566\u0001"+ - "\u0000\u0000\u0000\u015e\u056b\u0001\u0000\u0000\u0000\u0160\u0574\u0001"+ - "\u0000\u0000\u0000\u0162\u0578\u0001\u0000\u0000\u0000\u0164\u057c\u0001"+ - "\u0000\u0000\u0000\u0166\u0580\u0001\u0000\u0000\u0000\u0168\u0584\u0001"+ - "\u0000\u0000\u0000\u016a\u0589\u0001\u0000\u0000\u0000\u016c\u058d\u0001"+ - "\u0000\u0000\u0000\u016e\u0591\u0001\u0000\u0000\u0000\u0170\u0595\u0001"+ - "\u0000\u0000\u0000\u0172\u059a\u0001\u0000\u0000\u0000\u0174\u059e\u0001"+ - "\u0000\u0000\u0000\u0176\u05a2\u0001\u0000\u0000\u0000\u0178\u05a6\u0001"+ - "\u0000\u0000\u0000\u017a\u05aa\u0001\u0000\u0000\u0000\u017c\u05ae\u0001"+ - "\u0000\u0000\u0000\u017e\u05b4\u0001\u0000\u0000\u0000\u0180\u05b8\u0001"+ - "\u0000\u0000\u0000\u0182\u05bc\u0001\u0000\u0000\u0000\u0184\u05c0\u0001"+ - "\u0000\u0000\u0000\u0186\u05c4\u0001\u0000\u0000\u0000\u0188\u05c8\u0001"+ - "\u0000\u0000\u0000\u018a\u05cc\u0001\u0000\u0000\u0000\u018c\u05d1\u0001"+ - "\u0000\u0000\u0000\u018e\u05d5\u0001\u0000\u0000\u0000\u0190\u05d9\u0001"+ - "\u0000\u0000\u0000\u0192\u05df\u0001\u0000\u0000\u0000\u0194\u05e8\u0001"+ - "\u0000\u0000\u0000\u0196\u05ec\u0001\u0000\u0000\u0000\u0198\u05f0\u0001"+ - "\u0000\u0000\u0000\u019a\u05f4\u0001\u0000\u0000\u0000\u019c\u05f8\u0001"+ - "\u0000\u0000\u0000\u019e\u05fc\u0001\u0000\u0000\u0000\u01a0\u0601\u0001"+ - "\u0000\u0000\u0000\u01a2\u0607\u0001\u0000\u0000\u0000\u01a4\u060d\u0001"+ - "\u0000\u0000\u0000\u01a6\u0611\u0001\u0000\u0000\u0000\u01a8\u0615\u0001"+ - "\u0000\u0000\u0000\u01aa\u0619\u0001\u0000\u0000\u0000\u01ac\u061f\u0001"+ - "\u0000\u0000\u0000\u01ae\u0625\u0001\u0000\u0000\u0000\u01b0\u0629\u0001"+ - "\u0000\u0000\u0000\u01b2\u062d\u0001\u0000\u0000\u0000\u01b4\u0631\u0001"+ - "\u0000\u0000\u0000\u01b6\u0637\u0001\u0000\u0000\u0000\u01b8\u063d\u0001"+ - "\u0000\u0000\u0000\u01ba\u0643\u0001\u0000\u0000\u0000\u01bc\u01bd\u0007"+ - "\u0000\u0000\u0000\u01bd\u01be\u0007\u0001\u0000\u0000\u01be\u01bf\u0007"+ - "\u0002\u0000\u0000\u01bf\u01c0\u0007\u0002\u0000\u0000\u01c0\u01c1\u0007"+ - "\u0003\u0000\u0000\u01c1\u01c2\u0007\u0004\u0000\u0000\u01c2\u01c3\u0007"+ - "\u0005\u0000\u0000\u01c3\u01c4\u0001\u0000\u0000\u0000\u01c4\u01c5\u0006"+ - "\u0000\u0000\u0000\u01c5\u0011\u0001\u0000\u0000\u0000\u01c6\u01c7\u0007"+ - "\u0000\u0000\u0000\u01c7\u01c8\u0007\u0006\u0000\u0000\u01c8\u01c9\u0007"+ - "\u0007\u0000\u0000\u01c9\u01ca\u0007\b\u0000\u0000\u01ca\u01cb\u0001\u0000"+ - "\u0000\u0000\u01cb\u01cc\u0006\u0001\u0001\u0000\u01cc\u0013\u0001\u0000"+ - "\u0000\u0000\u01cd\u01ce\u0007\u0003\u0000\u0000\u01ce\u01cf\u0007\t\u0000"+ - "\u0000\u01cf\u01d0\u0007\u0006\u0000\u0000\u01d0\u01d1\u0007\u0001\u0000"+ - "\u0000\u01d1\u01d2\u0007\u0004\u0000\u0000\u01d2\u01d3\u0007\n\u0000\u0000"+ - "\u01d3\u01d4\u0001\u0000\u0000\u0000\u01d4\u01d5\u0006\u0002\u0002\u0000"+ - "\u01d5\u0015\u0001\u0000\u0000\u0000\u01d6\u01d7\u0007\u0003\u0000\u0000"+ - "\u01d7\u01d8\u0007\u000b\u0000\u0000\u01d8\u01d9\u0007\f\u0000\u0000\u01d9"+ - "\u01da\u0007\r\u0000\u0000\u01da\u01db\u0001\u0000\u0000\u0000\u01db\u01dc"+ - "\u0006\u0003\u0000\u0000\u01dc\u0017\u0001\u0000\u0000\u0000\u01dd\u01de"+ - "\u0007\u0003\u0000\u0000\u01de\u01df\u0007\u000e\u0000\u0000\u01df\u01e0"+ - "\u0007\b\u0000\u0000\u01e0\u01e1\u0007\r\u0000\u0000\u01e1\u01e2\u0007"+ - "\f\u0000\u0000\u01e2\u01e3\u0007\u0001\u0000\u0000\u01e3\u01e4\u0007\t"+ - "\u0000\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000\u01e5\u01e6\u0006\u0004"+ - "\u0003\u0000\u01e6\u0019\u0001\u0000\u0000\u0000\u01e7\u01e8\u0007\u000f"+ - "\u0000\u0000\u01e8\u01e9\u0007\u0006\u0000\u0000\u01e9\u01ea\u0007\u0007"+ - "\u0000\u0000\u01ea\u01eb\u0007\u0010\u0000\u0000\u01eb\u01ec\u0001\u0000"+ - "\u0000\u0000\u01ec\u01ed\u0006\u0005\u0004\u0000\u01ed\u001b\u0001\u0000"+ - "\u0000\u0000\u01ee\u01ef\u0007\u0011\u0000\u0000\u01ef\u01f0\u0007\u0006"+ - "\u0000\u0000\u01f0\u01f1\u0007\u0007\u0000\u0000\u01f1\u01f2\u0007\u0012"+ - "\u0000\u0000\u01f2\u01f3\u0001\u0000\u0000\u0000\u01f3\u01f4\u0006\u0006"+ - "\u0000\u0000\u01f4\u001d\u0001\u0000\u0000\u0000\u01f5\u01f6\u0007\u0012"+ - "\u0000\u0000\u01f6\u01f7\u0007\u0003\u0000\u0000\u01f7\u01f8\u0007\u0003"+ - "\u0000\u0000\u01f8\u01f9\u0007\b\u0000\u0000\u01f9\u01fa\u0001\u0000\u0000"+ - "\u0000\u01fa\u01fb\u0006\u0007\u0001\u0000\u01fb\u001f\u0001\u0000\u0000"+ - "\u0000\u01fc\u01fd\u0007\r\u0000\u0000\u01fd\u01fe\u0007\u0001\u0000\u0000"+ - "\u01fe\u01ff\u0007\u0010\u0000\u0000\u01ff\u0200\u0007\u0001\u0000\u0000"+ - "\u0200\u0201\u0007\u0005\u0000\u0000\u0201\u0202\u0001\u0000\u0000\u0000"+ - "\u0202\u0203\u0006\b\u0000\u0000\u0203!\u0001\u0000\u0000\u0000\u0204"+ - "\u0205\u0007\u0010\u0000\u0000\u0205\u0206\u0007\u000b\u0000\u0000\u0206"+ - "\u0207\u0005_\u0000\u0000\u0207\u0208\u0007\u0003\u0000\u0000\u0208\u0209"+ - "\u0007\u000e\u0000\u0000\u0209\u020a\u0007\b\u0000\u0000\u020a\u020b\u0007"+ - "\f\u0000\u0000\u020b\u020c\u0007\t\u0000\u0000\u020c\u020d\u0007\u0000"+ - "\u0000\u0000\u020d\u020e\u0001\u0000\u0000\u0000\u020e\u020f\u0006\t\u0005"+ - "\u0000\u020f#\u0001\u0000\u0000\u0000\u0210\u0211\u0007\u0006\u0000\u0000"+ - "\u0211\u0212\u0007\u0003\u0000\u0000\u0212\u0213\u0007\t\u0000\u0000\u0213"+ - "\u0214\u0007\f\u0000\u0000\u0214\u0215\u0007\u0010\u0000\u0000\u0215\u0216"+ - "\u0007\u0003\u0000\u0000\u0216\u0217\u0001\u0000\u0000\u0000\u0217\u0218"+ - "\u0006\n\u0006\u0000\u0218%\u0001\u0000\u0000\u0000\u0219\u021a\u0007"+ - "\u0006\u0000\u0000\u021a\u021b\u0007\u0007\u0000\u0000\u021b\u021c\u0007"+ - "\u0013\u0000\u0000\u021c\u021d\u0001\u0000\u0000\u0000\u021d\u021e\u0006"+ - "\u000b\u0000\u0000\u021e\'\u0001\u0000\u0000\u0000\u021f\u0220\u0007\u0002"+ - "\u0000\u0000\u0220\u0221\u0007\n\u0000\u0000\u0221\u0222\u0007\u0007\u0000"+ - "\u0000\u0222\u0223\u0007\u0013\u0000\u0000\u0223\u0224\u0001\u0000\u0000"+ - "\u0000\u0224\u0225\u0006\f\u0007\u0000\u0225)\u0001\u0000\u0000\u0000"+ - "\u0226\u0227\u0007\u0002\u0000\u0000\u0227\u0228\u0007\u0007\u0000\u0000"+ - "\u0228\u0229\u0007\u0006\u0000\u0000\u0229\u022a\u0007\u0005\u0000\u0000"+ - "\u022a\u022b\u0001\u0000\u0000\u0000\u022b\u022c\u0006\r\u0000\u0000\u022c"+ - "+\u0001\u0000\u0000\u0000\u022d\u022e\u0007\u0002\u0000\u0000\u022e\u022f"+ - "\u0007\u0005\u0000\u0000\u022f\u0230\u0007\f\u0000\u0000\u0230\u0231\u0007"+ - "\u0005\u0000\u0000\u0231\u0232\u0007\u0002\u0000\u0000\u0232\u0233\u0001"+ - "\u0000\u0000\u0000\u0233\u0234\u0006\u000e\u0000\u0000\u0234-\u0001\u0000"+ - "\u0000\u0000\u0235\u0236\u0007\u0013\u0000\u0000\u0236\u0237\u0007\n\u0000"+ - "\u0000\u0237\u0238\u0007\u0003\u0000\u0000\u0238\u0239\u0007\u0006\u0000"+ - "\u0000\u0239\u023a\u0007\u0003\u0000\u0000\u023a\u023b\u0001\u0000\u0000"+ - "\u0000\u023b\u023c\u0006\u000f\u0000\u0000\u023c/\u0001\u0000\u0000\u0000"+ - "\u023d\u023e\u0004\u0010\u0000\u0000\u023e\u023f\u0007\u0001\u0000\u0000"+ - "\u023f\u0240\u0007\t\u0000\u0000\u0240\u0241\u0007\r\u0000\u0000\u0241"+ - "\u0242\u0007\u0001\u0000\u0000\u0242\u0243\u0007\t\u0000\u0000\u0243\u0244"+ - "\u0007\u0003\u0000\u0000\u0244\u0245\u0007\u0002\u0000\u0000\u0245\u0246"+ - "\u0007\u0005\u0000\u0000\u0246\u0247\u0007\f\u0000\u0000\u0247\u0248\u0007"+ - "\u0005\u0000\u0000\u0248\u0249\u0007\u0002\u0000\u0000\u0249\u024a\u0001"+ - "\u0000\u0000\u0000\u024a\u024b\u0006\u0010\u0000\u0000\u024b1\u0001\u0000"+ - "\u0000\u0000\u024c\u024d\u0004\u0011\u0001\u0000\u024d\u024e\u0007\r\u0000"+ - "\u0000\u024e\u024f\u0007\u0007\u0000\u0000\u024f\u0250\u0007\u0007\u0000"+ - "\u0000\u0250\u0251\u0007\u0012\u0000\u0000\u0251\u0252\u0007\u0014\u0000"+ - "\u0000\u0252\u0253\u0007\b\u0000\u0000\u0253\u0254\u0005_\u0000\u0000"+ - "\u0254\u0255\u0005\u8001\uf414\u0000\u0000\u0255\u0256\u0001\u0000\u0000"+ - "\u0000\u0256\u0257\u0006\u0011\b\u0000\u02573\u0001\u0000\u0000\u0000"+ - "\u0258\u0259\u0004\u0012\u0002\u0000\u0259\u025a\u0007\u0010\u0000\u0000"+ - "\u025a\u025b\u0007\u0003\u0000\u0000\u025b\u025c\u0007\u0005\u0000\u0000"+ - "\u025c\u025d\u0007\u0006\u0000\u0000\u025d\u025e\u0007\u0001\u0000\u0000"+ - "\u025e\u025f\u0007\u0004\u0000\u0000\u025f\u0260\u0007\u0002\u0000\u0000"+ - "\u0260\u0261\u0001\u0000\u0000\u0000\u0261\u0262\u0006\u0012\t\u0000\u0262"+ - "5\u0001\u0000\u0000\u0000\u0263\u0264\u0004\u0013\u0003\u0000\u0264\u0265"+ - "\u0007\u0015\u0000\u0000\u0265\u0266\u0007\u0007\u0000\u0000\u0266\u0267"+ - "\u0007\u0001\u0000\u0000\u0267\u0268\u0007\t\u0000\u0000\u0268\u0269\u0001"+ - "\u0000\u0000\u0000\u0269\u026a\u0006\u0013\n\u0000\u026a7\u0001\u0000"+ - "\u0000\u0000\u026b\u026c\u0004\u0014\u0004\u0000\u026c\u026d\u0007\u000f"+ - "\u0000\u0000\u026d\u026e\u0007\u0014\u0000\u0000\u026e\u026f\u0007\r\u0000"+ - "\u0000\u026f\u0270\u0007\r\u0000\u0000\u0270\u0271\u0001\u0000\u0000\u0000"+ - "\u0271\u0272\u0006\u0014\n\u0000\u02729\u0001\u0000\u0000\u0000\u0273"+ - "\u0274\u0004\u0015\u0005\u0000\u0274\u0275\u0007\r\u0000\u0000\u0275\u0276"+ - "\u0007\u0003\u0000\u0000\u0276\u0277\u0007\u000f\u0000\u0000\u0277\u0278"+ - "\u0007\u0005\u0000\u0000\u0278\u0279\u0001\u0000\u0000\u0000\u0279\u027a"+ - "\u0006\u0015\n\u0000\u027a;\u0001\u0000\u0000\u0000\u027b\u027c\u0004"+ - "\u0016\u0006\u0000\u027c\u027d\u0007\u0006\u0000\u0000\u027d\u027e\u0007"+ - "\u0001\u0000\u0000\u027e\u027f\u0007\u0011\u0000\u0000\u027f\u0280\u0007"+ - "\n\u0000\u0000\u0280\u0281\u0007\u0005\u0000\u0000\u0281\u0282\u0001\u0000"+ - "\u0000\u0000\u0282\u0283\u0006\u0016\n\u0000\u0283=\u0001\u0000\u0000"+ - "\u0000\u0284\u0285\u0004\u0017\u0007\u0000\u0285\u0286\u0007\r\u0000\u0000"+ - "\u0286\u0287\u0007\u0007\u0000\u0000\u0287\u0288\u0007\u0007\u0000\u0000"+ - "\u0288\u0289\u0007\u0012\u0000\u0000\u0289\u028a\u0007\u0014\u0000\u0000"+ - "\u028a\u028b\u0007\b\u0000\u0000\u028b\u028c\u0001\u0000\u0000\u0000\u028c"+ - "\u028d\u0006\u0017\n\u0000\u028d?\u0001\u0000\u0000\u0000\u028e\u0290"+ - "\b\u0016\u0000\u0000\u028f\u028e\u0001\u0000\u0000\u0000\u0290\u0291\u0001"+ - "\u0000\u0000\u0000\u0291\u028f\u0001\u0000\u0000\u0000\u0291\u0292\u0001"+ - "\u0000\u0000\u0000\u0292\u0293\u0001\u0000\u0000\u0000\u0293\u0294\u0006"+ - "\u0018\u0000\u0000\u0294A\u0001\u0000\u0000\u0000\u0295\u0296\u0005/\u0000"+ - "\u0000\u0296\u0297\u0005/\u0000\u0000\u0297\u029b\u0001\u0000\u0000\u0000"+ - "\u0298\u029a\b\u0017\u0000\u0000\u0299\u0298\u0001\u0000\u0000\u0000\u029a"+ - "\u029d\u0001\u0000\u0000\u0000\u029b\u0299\u0001\u0000\u0000\u0000\u029b"+ - "\u029c\u0001\u0000\u0000\u0000\u029c\u029f\u0001\u0000\u0000\u0000\u029d"+ - "\u029b\u0001\u0000\u0000\u0000\u029e\u02a0\u0005\r\u0000\u0000\u029f\u029e"+ - "\u0001\u0000\u0000\u0000\u029f\u02a0\u0001\u0000\u0000\u0000\u02a0\u02a2"+ - "\u0001\u0000\u0000\u0000\u02a1\u02a3\u0005\n\u0000\u0000\u02a2\u02a1\u0001"+ - "\u0000\u0000\u0000\u02a2\u02a3\u0001\u0000\u0000\u0000\u02a3\u02a4\u0001"+ - "\u0000\u0000\u0000\u02a4\u02a5\u0006\u0019\u000b\u0000\u02a5C\u0001\u0000"+ - "\u0000\u0000\u02a6\u02a7\u0005/\u0000\u0000\u02a7\u02a8\u0005*\u0000\u0000"+ - "\u02a8\u02ad\u0001\u0000\u0000\u0000\u02a9\u02ac\u0003D\u001a\u0000\u02aa"+ - "\u02ac\t\u0000\u0000\u0000\u02ab\u02a9\u0001\u0000\u0000\u0000\u02ab\u02aa"+ - "\u0001\u0000\u0000\u0000\u02ac\u02af\u0001\u0000\u0000\u0000\u02ad\u02ae"+ - "\u0001\u0000\u0000\u0000\u02ad\u02ab\u0001\u0000\u0000\u0000\u02ae\u02b0"+ - "\u0001\u0000\u0000\u0000\u02af\u02ad\u0001\u0000\u0000\u0000\u02b0\u02b1"+ - "\u0005*\u0000\u0000\u02b1\u02b2\u0005/\u0000\u0000\u02b2\u02b3\u0001\u0000"+ - "\u0000\u0000\u02b3\u02b4\u0006\u001a\u000b\u0000\u02b4E\u0001\u0000\u0000"+ - "\u0000\u02b5\u02b7\u0007\u0018\u0000\u0000\u02b6\u02b5\u0001\u0000\u0000"+ - "\u0000\u02b7\u02b8\u0001\u0000\u0000\u0000\u02b8\u02b6\u0001\u0000\u0000"+ - "\u0000\u02b8\u02b9\u0001\u0000\u0000\u0000\u02b9\u02ba\u0001\u0000\u0000"+ - "\u0000\u02ba\u02bb\u0006\u001b\u000b\u0000\u02bbG\u0001\u0000\u0000\u0000"+ - "\u02bc\u02bd\u0005:\u0000\u0000\u02bdI\u0001\u0000\u0000\u0000\u02be\u02bf"+ - "\u0005|\u0000\u0000\u02bf\u02c0\u0001\u0000\u0000\u0000\u02c0\u02c1\u0006"+ - "\u001d\f\u0000\u02c1K\u0001\u0000\u0000\u0000\u02c2\u02c3\u0007\u0019"+ - "\u0000\u0000\u02c3M\u0001\u0000\u0000\u0000\u02c4\u02c5\u0007\u001a\u0000"+ - "\u0000\u02c5O\u0001\u0000\u0000\u0000\u02c6\u02c7\u0005\\\u0000\u0000"+ - "\u02c7\u02c8\u0007\u001b\u0000\u0000\u02c8Q\u0001\u0000\u0000\u0000\u02c9"+ - "\u02ca\b\u001c\u0000\u0000\u02caS\u0001\u0000\u0000\u0000\u02cb\u02cd"+ - "\u0007\u0003\u0000\u0000\u02cc\u02ce\u0007\u001d\u0000\u0000\u02cd\u02cc"+ - "\u0001\u0000\u0000\u0000\u02cd\u02ce\u0001\u0000\u0000\u0000\u02ce\u02d0"+ - "\u0001\u0000\u0000\u0000\u02cf\u02d1\u0003L\u001e\u0000\u02d0\u02cf\u0001"+ - "\u0000\u0000\u0000\u02d1\u02d2\u0001\u0000\u0000\u0000\u02d2\u02d0\u0001"+ - "\u0000\u0000\u0000\u02d2\u02d3\u0001\u0000\u0000\u0000\u02d3U\u0001\u0000"+ - "\u0000\u0000\u02d4\u02d5\u0005@\u0000\u0000\u02d5W\u0001\u0000\u0000\u0000"+ - "\u02d6\u02d7\u0005`\u0000\u0000\u02d7Y\u0001\u0000\u0000\u0000\u02d8\u02dc"+ - "\b\u001e\u0000\u0000\u02d9\u02da\u0005`\u0000\u0000\u02da\u02dc\u0005"+ - "`\u0000\u0000\u02db\u02d8\u0001\u0000\u0000\u0000\u02db\u02d9\u0001\u0000"+ - "\u0000\u0000\u02dc[\u0001\u0000\u0000\u0000\u02dd\u02de\u0005_\u0000\u0000"+ - "\u02de]\u0001\u0000\u0000\u0000\u02df\u02e3\u0003N\u001f\u0000\u02e0\u02e3"+ - "\u0003L\u001e\u0000\u02e1\u02e3\u0003\\&\u0000\u02e2\u02df\u0001\u0000"+ - "\u0000\u0000\u02e2\u02e0\u0001\u0000\u0000\u0000\u02e2\u02e1\u0001\u0000"+ - "\u0000\u0000\u02e3_\u0001\u0000\u0000\u0000\u02e4\u02e9\u0005\"\u0000"+ - "\u0000\u02e5\u02e8\u0003P \u0000\u02e6\u02e8\u0003R!\u0000\u02e7\u02e5"+ - "\u0001\u0000\u0000\u0000\u02e7\u02e6\u0001\u0000\u0000\u0000\u02e8\u02eb"+ - "\u0001\u0000\u0000\u0000\u02e9\u02e7\u0001\u0000\u0000\u0000\u02e9\u02ea"+ - "\u0001\u0000\u0000\u0000\u02ea\u02ec\u0001\u0000\u0000\u0000\u02eb\u02e9"+ - "\u0001\u0000\u0000\u0000\u02ec\u0302\u0005\"\u0000\u0000\u02ed\u02ee\u0005"+ - "\"\u0000\u0000\u02ee\u02ef\u0005\"\u0000\u0000\u02ef\u02f0\u0005\"\u0000"+ - "\u0000\u02f0\u02f4\u0001\u0000\u0000\u0000\u02f1\u02f3\b\u0017\u0000\u0000"+ - "\u02f2\u02f1\u0001\u0000\u0000\u0000\u02f3\u02f6\u0001\u0000\u0000\u0000"+ - "\u02f4\u02f5\u0001\u0000\u0000\u0000\u02f4\u02f2\u0001\u0000\u0000\u0000"+ - "\u02f5\u02f7\u0001\u0000\u0000\u0000\u02f6\u02f4\u0001\u0000\u0000\u0000"+ - "\u02f7\u02f8\u0005\"\u0000\u0000\u02f8\u02f9\u0005\"\u0000\u0000\u02f9"+ - "\u02fa\u0005\"\u0000\u0000\u02fa\u02fc\u0001\u0000\u0000\u0000\u02fb\u02fd"+ - "\u0005\"\u0000\u0000\u02fc\u02fb\u0001\u0000\u0000\u0000\u02fc\u02fd\u0001"+ - "\u0000\u0000\u0000\u02fd\u02ff\u0001\u0000\u0000\u0000\u02fe\u0300\u0005"+ - "\"\u0000\u0000\u02ff\u02fe\u0001\u0000\u0000\u0000\u02ff\u0300\u0001\u0000"+ - "\u0000\u0000\u0300\u0302\u0001\u0000\u0000\u0000\u0301\u02e4\u0001\u0000"+ - "\u0000\u0000\u0301\u02ed\u0001\u0000\u0000\u0000\u0302a\u0001\u0000\u0000"+ - "\u0000\u0303\u0305\u0003L\u001e\u0000\u0304\u0303\u0001\u0000\u0000\u0000"+ - "\u0305\u0306\u0001\u0000\u0000\u0000\u0306\u0304\u0001\u0000\u0000\u0000"+ - "\u0306\u0307\u0001\u0000\u0000\u0000\u0307c\u0001\u0000\u0000\u0000\u0308"+ - "\u030a\u0003L\u001e\u0000\u0309\u0308\u0001\u0000\u0000\u0000\u030a\u030b"+ - "\u0001\u0000\u0000\u0000\u030b\u0309\u0001\u0000\u0000\u0000\u030b\u030c"+ - "\u0001\u0000\u0000\u0000\u030c\u030d\u0001\u0000\u0000\u0000\u030d\u0311"+ - "\u0003t2\u0000\u030e\u0310\u0003L\u001e\u0000\u030f\u030e\u0001\u0000"+ - "\u0000\u0000\u0310\u0313\u0001\u0000\u0000\u0000\u0311\u030f\u0001\u0000"+ - "\u0000\u0000\u0311\u0312\u0001\u0000\u0000\u0000\u0312\u0333\u0001\u0000"+ - "\u0000\u0000\u0313\u0311\u0001\u0000\u0000\u0000\u0314\u0316\u0003t2\u0000"+ - "\u0315\u0317\u0003L\u001e\u0000\u0316\u0315\u0001\u0000\u0000\u0000\u0317"+ - "\u0318\u0001\u0000\u0000\u0000\u0318\u0316\u0001\u0000\u0000\u0000\u0318"+ - "\u0319\u0001\u0000\u0000\u0000\u0319\u0333\u0001\u0000\u0000\u0000\u031a"+ - "\u031c\u0003L\u001e\u0000\u031b\u031a\u0001\u0000\u0000\u0000\u031c\u031d"+ - "\u0001\u0000\u0000\u0000\u031d\u031b\u0001\u0000\u0000\u0000\u031d\u031e"+ - "\u0001\u0000\u0000\u0000\u031e\u0326\u0001\u0000\u0000\u0000\u031f\u0323"+ - "\u0003t2\u0000\u0320\u0322\u0003L\u001e\u0000\u0321\u0320\u0001\u0000"+ - "\u0000\u0000\u0322\u0325\u0001\u0000\u0000\u0000\u0323\u0321\u0001\u0000"+ - "\u0000\u0000\u0323\u0324\u0001\u0000\u0000\u0000\u0324\u0327\u0001\u0000"+ - "\u0000\u0000\u0325\u0323\u0001\u0000\u0000\u0000\u0326\u031f\u0001\u0000"+ - "\u0000\u0000\u0326\u0327\u0001\u0000\u0000\u0000\u0327\u0328\u0001\u0000"+ - "\u0000\u0000\u0328\u0329\u0003T\"\u0000\u0329\u0333\u0001\u0000\u0000"+ - "\u0000\u032a\u032c\u0003t2\u0000\u032b\u032d\u0003L\u001e\u0000\u032c"+ - "\u032b\u0001\u0000\u0000\u0000\u032d\u032e\u0001\u0000\u0000\u0000\u032e"+ - "\u032c\u0001\u0000\u0000\u0000\u032e\u032f\u0001\u0000\u0000\u0000\u032f"+ - "\u0330\u0001\u0000\u0000\u0000\u0330\u0331\u0003T\"\u0000\u0331\u0333"+ - "\u0001\u0000\u0000\u0000\u0332\u0309\u0001\u0000\u0000\u0000\u0332\u0314"+ - "\u0001\u0000\u0000\u0000\u0332\u031b\u0001\u0000\u0000\u0000\u0332\u032a"+ - "\u0001\u0000\u0000\u0000\u0333e\u0001\u0000\u0000\u0000\u0334\u0335\u0007"+ - "\u001f\u0000\u0000\u0335\u0336\u0007 \u0000\u0000\u0336g\u0001\u0000\u0000"+ - "\u0000\u0337\u0338\u0007\f\u0000\u0000\u0338\u0339\u0007\t\u0000\u0000"+ - "\u0339\u033a\u0007\u0000\u0000\u0000\u033ai\u0001\u0000\u0000\u0000\u033b"+ - "\u033c\u0007\f\u0000\u0000\u033c\u033d\u0007\u0002\u0000\u0000\u033d\u033e"+ - "\u0007\u0004\u0000\u0000\u033ek\u0001\u0000\u0000\u0000\u033f\u0340\u0005"+ - "=\u0000\u0000\u0340m\u0001\u0000\u0000\u0000\u0341\u0342\u0005:\u0000"+ - "\u0000\u0342\u0343\u0005:\u0000\u0000\u0343o\u0001\u0000\u0000\u0000\u0344"+ - "\u0345\u0005,\u0000\u0000\u0345q\u0001\u0000\u0000\u0000\u0346\u0347\u0007"+ - "\u0000\u0000\u0000\u0347\u0348\u0007\u0003\u0000\u0000\u0348\u0349\u0007"+ - "\u0002\u0000\u0000\u0349\u034a\u0007\u0004\u0000\u0000\u034as\u0001\u0000"+ - "\u0000\u0000\u034b\u034c\u0005.\u0000\u0000\u034cu\u0001\u0000\u0000\u0000"+ - "\u034d\u034e\u0007\u000f\u0000\u0000\u034e\u034f\u0007\f\u0000\u0000\u034f"+ - "\u0350\u0007\r\u0000\u0000\u0350\u0351\u0007\u0002\u0000\u0000\u0351\u0352"+ - "\u0007\u0003\u0000\u0000\u0352w\u0001\u0000\u0000\u0000\u0353\u0354\u0007"+ - "\u000f\u0000\u0000\u0354\u0355\u0007\u0001\u0000\u0000\u0355\u0356\u0007"+ - "\u0006\u0000\u0000\u0356\u0357\u0007\u0002\u0000\u0000\u0357\u0358\u0007"+ - "\u0005\u0000\u0000\u0358y\u0001\u0000\u0000\u0000\u0359\u035a\u0007\u0001"+ - "\u0000\u0000\u035a\u035b\u0007\t\u0000\u0000\u035b{\u0001\u0000\u0000"+ - "\u0000\u035c\u035d\u0007\u0001\u0000\u0000\u035d\u035e\u0007\u0002\u0000"+ - "\u0000\u035e}\u0001\u0000\u0000\u0000\u035f\u0360\u0007\r\u0000\u0000"+ - "\u0360\u0361\u0007\f\u0000\u0000\u0361\u0362\u0007\u0002\u0000\u0000\u0362"+ - "\u0363\u0007\u0005\u0000\u0000\u0363\u007f\u0001\u0000\u0000\u0000\u0364"+ - "\u0365\u0007\r\u0000\u0000\u0365\u0366\u0007\u0001\u0000\u0000\u0366\u0367"+ - "\u0007\u0012\u0000\u0000\u0367\u0368\u0007\u0003\u0000\u0000\u0368\u0081"+ - "\u0001\u0000\u0000\u0000\u0369\u036a\u0005(\u0000\u0000\u036a\u0083\u0001"+ - "\u0000\u0000\u0000\u036b\u036c\u0007\t\u0000\u0000\u036c\u036d\u0007\u0007"+ - "\u0000\u0000\u036d\u036e\u0007\u0005\u0000\u0000\u036e\u0085\u0001\u0000"+ - "\u0000\u0000\u036f\u0370\u0007\t\u0000\u0000\u0370\u0371\u0007\u0014\u0000"+ - "\u0000\u0371\u0372\u0007\r\u0000\u0000\u0372\u0373\u0007\r\u0000\u0000"+ - "\u0373\u0087\u0001\u0000\u0000\u0000\u0374\u0375\u0007\t\u0000\u0000\u0375"+ - "\u0376\u0007\u0014\u0000\u0000\u0376\u0377\u0007\r\u0000\u0000\u0377\u0378"+ - "\u0007\r\u0000\u0000\u0378\u0379\u0007\u0002\u0000\u0000\u0379\u0089\u0001"+ - "\u0000\u0000\u0000\u037a\u037b\u0007\u0007\u0000\u0000\u037b\u037c\u0007"+ - "\u0006\u0000\u0000\u037c\u008b\u0001\u0000\u0000\u0000\u037d\u037e\u0005"+ - "?\u0000\u0000\u037e\u008d\u0001\u0000\u0000\u0000\u037f\u0380\u0007\u0006"+ - "\u0000\u0000\u0380\u0381\u0007\r\u0000\u0000\u0381\u0382\u0007\u0001\u0000"+ - "\u0000\u0382\u0383\u0007\u0012\u0000\u0000\u0383\u0384\u0007\u0003\u0000"+ - "\u0000\u0384\u008f\u0001\u0000\u0000\u0000\u0385\u0386\u0005)\u0000\u0000"+ - "\u0386\u0091\u0001\u0000\u0000\u0000\u0387\u0388\u0007\u0005\u0000\u0000"+ - "\u0388\u0389\u0007\u0006\u0000\u0000\u0389\u038a\u0007\u0014\u0000\u0000"+ - "\u038a\u038b\u0007\u0003\u0000\u0000\u038b\u0093\u0001\u0000\u0000\u0000"+ - "\u038c\u038d\u0005=\u0000\u0000\u038d\u038e\u0005=\u0000\u0000\u038e\u0095"+ - "\u0001\u0000\u0000\u0000\u038f\u0390\u0005=\u0000\u0000\u0390\u0391\u0005"+ - "~\u0000\u0000\u0391\u0097\u0001\u0000\u0000\u0000\u0392\u0393\u0005!\u0000"+ - "\u0000\u0393\u0394\u0005=\u0000\u0000\u0394\u0099\u0001\u0000\u0000\u0000"+ - "\u0395\u0396\u0005<\u0000\u0000\u0396\u009b\u0001\u0000\u0000\u0000\u0397"+ - "\u0398\u0005<\u0000\u0000\u0398\u0399\u0005=\u0000\u0000\u0399\u009d\u0001"+ - "\u0000\u0000\u0000\u039a\u039b\u0005>\u0000\u0000\u039b\u009f\u0001\u0000"+ - "\u0000\u0000\u039c\u039d\u0005>\u0000\u0000\u039d\u039e\u0005=\u0000\u0000"+ - "\u039e\u00a1\u0001\u0000\u0000\u0000\u039f\u03a0\u0005+\u0000\u0000\u03a0"+ - "\u00a3\u0001\u0000\u0000\u0000\u03a1\u03a2\u0005-\u0000\u0000\u03a2\u00a5"+ - "\u0001\u0000\u0000\u0000\u03a3\u03a4\u0005*\u0000\u0000\u03a4\u00a7\u0001"+ - "\u0000\u0000\u0000\u03a5\u03a6\u0005/\u0000\u0000\u03a6\u00a9\u0001\u0000"+ - "\u0000\u0000\u03a7\u03a8\u0005%\u0000\u0000\u03a8\u00ab\u0001\u0000\u0000"+ - "\u0000\u03a9\u03aa\u0004N\b\u0000\u03aa\u03ab\u0003H\u001c\u0000\u03ab"+ - "\u03ac\u0001\u0000\u0000\u0000\u03ac\u03ad\u0006N\r\u0000\u03ad\u00ad"+ - "\u0001\u0000\u0000\u0000\u03ae\u03af\u0003.\u000f\u0000\u03af\u03b0\u0001"+ - "\u0000\u0000\u0000\u03b0\u03b1\u0006O\u000e\u0000\u03b1\u00af\u0001\u0000"+ - "\u0000\u0000\u03b2\u03b5\u0003\u008c>\u0000\u03b3\u03b6\u0003N\u001f\u0000"+ - "\u03b4\u03b6\u0003\\&\u0000\u03b5\u03b3\u0001\u0000\u0000\u0000\u03b5"+ - "\u03b4\u0001\u0000\u0000\u0000\u03b6\u03ba\u0001\u0000\u0000\u0000\u03b7"+ - "\u03b9\u0003^\'\u0000\u03b8\u03b7\u0001\u0000\u0000\u0000\u03b9\u03bc"+ - "\u0001\u0000\u0000\u0000\u03ba\u03b8\u0001\u0000\u0000\u0000\u03ba\u03bb"+ - "\u0001\u0000\u0000\u0000\u03bb\u03c4\u0001\u0000\u0000\u0000\u03bc\u03ba"+ - "\u0001\u0000\u0000\u0000\u03bd\u03bf\u0003\u008c>\u0000\u03be\u03c0\u0003"+ - "L\u001e\u0000\u03bf\u03be\u0001\u0000\u0000\u0000\u03c0\u03c1\u0001\u0000"+ - "\u0000\u0000\u03c1\u03bf\u0001\u0000\u0000\u0000\u03c1\u03c2\u0001\u0000"+ - "\u0000\u0000\u03c2\u03c4\u0001\u0000\u0000\u0000\u03c3\u03b2\u0001\u0000"+ - "\u0000\u0000\u03c3\u03bd\u0001\u0000\u0000\u0000\u03c4\u00b1\u0001\u0000"+ - "\u0000\u0000\u03c5\u03c6\u0005[\u0000\u0000\u03c6\u03c7\u0001\u0000\u0000"+ - "\u0000\u03c7\u03c8\u0006Q\u0000\u0000\u03c8\u03c9\u0006Q\u0000\u0000\u03c9"+ - "\u00b3\u0001\u0000\u0000\u0000\u03ca\u03cb\u0005]\u0000\u0000\u03cb\u03cc"+ - "\u0001\u0000\u0000\u0000\u03cc\u03cd\u0006R\f\u0000\u03cd\u03ce\u0006"+ - "R\f\u0000\u03ce\u00b5\u0001\u0000\u0000\u0000\u03cf\u03d3\u0003N\u001f"+ - "\u0000\u03d0\u03d2\u0003^\'\u0000\u03d1\u03d0\u0001\u0000\u0000\u0000"+ - "\u03d2\u03d5\u0001\u0000\u0000\u0000\u03d3\u03d1\u0001\u0000\u0000\u0000"+ - "\u03d3\u03d4\u0001\u0000\u0000\u0000\u03d4\u03e0\u0001\u0000\u0000\u0000"+ - "\u03d5\u03d3\u0001\u0000\u0000\u0000\u03d6\u03d9\u0003\\&\u0000\u03d7"+ - "\u03d9\u0003V#\u0000\u03d8\u03d6\u0001\u0000\u0000\u0000\u03d8\u03d7\u0001"+ - "\u0000\u0000\u0000\u03d9\u03db\u0001\u0000\u0000\u0000\u03da\u03dc\u0003"+ - "^\'\u0000\u03db\u03da\u0001\u0000\u0000\u0000\u03dc\u03dd\u0001\u0000"+ - "\u0000\u0000\u03dd\u03db\u0001\u0000\u0000\u0000\u03dd\u03de\u0001\u0000"+ - "\u0000\u0000\u03de\u03e0\u0001\u0000\u0000\u0000\u03df\u03cf\u0001\u0000"+ - "\u0000\u0000\u03df\u03d8\u0001\u0000\u0000\u0000\u03e0\u00b7\u0001\u0000"+ - "\u0000\u0000\u03e1\u03e3\u0003X$\u0000\u03e2\u03e4\u0003Z%\u0000\u03e3"+ - "\u03e2\u0001\u0000\u0000\u0000\u03e4\u03e5\u0001\u0000\u0000\u0000\u03e5"+ - "\u03e3\u0001\u0000\u0000\u0000\u03e5\u03e6\u0001\u0000\u0000\u0000\u03e6"+ - "\u03e7\u0001\u0000\u0000\u0000\u03e7\u03e8\u0003X$\u0000\u03e8\u00b9\u0001"+ - "\u0000\u0000\u0000\u03e9\u03ea\u0003\u00b8T\u0000\u03ea\u00bb\u0001\u0000"+ - "\u0000\u0000\u03eb\u03ec\u0003B\u0019\u0000\u03ec\u03ed\u0001\u0000\u0000"+ - "\u0000\u03ed\u03ee\u0006V\u000b\u0000\u03ee\u00bd\u0001\u0000\u0000\u0000"+ - "\u03ef\u03f0\u0003D\u001a\u0000\u03f0\u03f1\u0001\u0000\u0000\u0000\u03f1"+ - "\u03f2\u0006W\u000b\u0000\u03f2\u00bf\u0001\u0000\u0000\u0000\u03f3\u03f4"+ - "\u0003F\u001b\u0000\u03f4\u03f5\u0001\u0000\u0000\u0000\u03f5\u03f6\u0006"+ - "X\u000b\u0000\u03f6\u00c1\u0001\u0000\u0000\u0000\u03f7\u03f8\u0003\u00b2"+ - "Q\u0000\u03f8\u03f9\u0001\u0000\u0000\u0000\u03f9\u03fa\u0006Y\u000f\u0000"+ - "\u03fa\u03fb\u0006Y\u0010\u0000\u03fb\u00c3\u0001\u0000\u0000\u0000\u03fc"+ - "\u03fd\u0003J\u001d\u0000\u03fd\u03fe\u0001\u0000\u0000\u0000\u03fe\u03ff"+ - "\u0006Z\u0011\u0000\u03ff\u0400\u0006Z\f\u0000\u0400\u00c5\u0001\u0000"+ - "\u0000\u0000\u0401\u0402\u0003F\u001b\u0000\u0402\u0403\u0001\u0000\u0000"+ - "\u0000\u0403\u0404\u0006[\u000b\u0000\u0404\u00c7\u0001\u0000\u0000\u0000"+ - "\u0405\u0406\u0003B\u0019\u0000\u0406\u0407\u0001\u0000\u0000\u0000\u0407"+ - "\u0408\u0006\\\u000b\u0000\u0408\u00c9\u0001\u0000\u0000\u0000\u0409\u040a"+ - "\u0003D\u001a\u0000\u040a\u040b\u0001\u0000\u0000\u0000\u040b\u040c\u0006"+ - "]\u000b\u0000\u040c\u00cb\u0001\u0000\u0000\u0000\u040d\u040e\u0003J\u001d"+ - "\u0000\u040e\u040f\u0001\u0000\u0000\u0000\u040f\u0410\u0006^\u0011\u0000"+ - "\u0410\u0411\u0006^\f\u0000\u0411\u00cd\u0001\u0000\u0000\u0000\u0412"+ - "\u0413\u0003\u00b2Q\u0000\u0413\u0414\u0001\u0000\u0000\u0000\u0414\u0415"+ - "\u0006_\u000f\u0000\u0415\u00cf\u0001\u0000\u0000\u0000\u0416\u0417\u0003"+ - "\u00b4R\u0000\u0417\u0418\u0001\u0000\u0000\u0000\u0418\u0419\u0006`\u0012"+ - "\u0000\u0419\u00d1\u0001\u0000\u0000\u0000\u041a\u041b\u0003H\u001c\u0000"+ - "\u041b\u041c\u0001\u0000\u0000\u0000\u041c\u041d\u0006a\r\u0000\u041d"+ - "\u00d3\u0001\u0000\u0000\u0000\u041e\u041f\u0003p0\u0000\u041f\u0420\u0001"+ - "\u0000\u0000\u0000\u0420\u0421\u0006b\u0013\u0000\u0421\u00d5\u0001\u0000"+ - "\u0000\u0000\u0422\u0423\u0003l.\u0000\u0423\u0424\u0001\u0000\u0000\u0000"+ - "\u0424\u0425\u0006c\u0014\u0000\u0425\u00d7\u0001\u0000\u0000\u0000\u0426"+ - "\u0427\u0007\u0010\u0000\u0000\u0427\u0428\u0007\u0003\u0000\u0000\u0428"+ - "\u0429\u0007\u0005\u0000\u0000\u0429\u042a\u0007\f\u0000\u0000\u042a\u042b"+ - "\u0007\u0000\u0000\u0000\u042b\u042c\u0007\f\u0000\u0000\u042c\u042d\u0007"+ - "\u0005\u0000\u0000\u042d\u042e\u0007\f\u0000\u0000\u042e\u00d9\u0001\u0000"+ - "\u0000\u0000\u042f\u0433\b!\u0000\u0000\u0430\u0431\u0005/\u0000\u0000"+ - "\u0431\u0433\b\"\u0000\u0000\u0432\u042f\u0001\u0000\u0000\u0000\u0432"+ - "\u0430\u0001\u0000\u0000\u0000\u0433\u00db\u0001\u0000\u0000\u0000\u0434"+ - "\u0436\u0003\u00dae\u0000\u0435\u0434\u0001\u0000\u0000\u0000\u0436\u0437"+ - "\u0001\u0000\u0000\u0000\u0437\u0435\u0001\u0000\u0000\u0000\u0437\u0438"+ - "\u0001\u0000\u0000\u0000\u0438\u00dd\u0001\u0000\u0000\u0000\u0439\u043a"+ - "\u0003\u00dcf\u0000\u043a\u043b\u0001\u0000\u0000\u0000\u043b\u043c\u0006"+ - "g\u0015\u0000\u043c\u00df\u0001\u0000\u0000\u0000\u043d\u043e\u0003`("+ - "\u0000\u043e\u043f\u0001\u0000\u0000\u0000\u043f\u0440\u0006h\u0016\u0000"+ - "\u0440\u00e1\u0001\u0000\u0000\u0000\u0441\u0442\u0003B\u0019\u0000\u0442"+ - "\u0443\u0001\u0000\u0000\u0000\u0443\u0444\u0006i\u000b\u0000\u0444\u00e3"+ - "\u0001\u0000\u0000\u0000\u0445\u0446\u0003D\u001a\u0000\u0446\u0447\u0001"+ - "\u0000\u0000\u0000\u0447\u0448\u0006j\u000b\u0000\u0448\u00e5\u0001\u0000"+ - "\u0000\u0000\u0449\u044a\u0003F\u001b\u0000\u044a\u044b\u0001\u0000\u0000"+ - "\u0000\u044b\u044c\u0006k\u000b\u0000\u044c\u00e7\u0001\u0000\u0000\u0000"+ - "\u044d\u044e\u0003J\u001d\u0000\u044e\u044f\u0001\u0000\u0000\u0000\u044f"+ - "\u0450\u0006l\u0011\u0000\u0450\u0451\u0006l\f\u0000\u0451\u00e9\u0001"+ - "\u0000\u0000\u0000\u0452\u0453\u0003t2\u0000\u0453\u0454\u0001\u0000\u0000"+ - "\u0000\u0454\u0455\u0006m\u0017\u0000\u0455\u00eb\u0001\u0000\u0000\u0000"+ - "\u0456\u0457\u0003p0\u0000\u0457\u0458\u0001\u0000\u0000\u0000\u0458\u0459"+ - "\u0006n\u0013\u0000\u0459\u00ed\u0001\u0000\u0000\u0000\u045a\u045b\u0004"+ - "o\t\u0000\u045b\u045c\u0003\u008c>\u0000\u045c\u045d\u0001\u0000\u0000"+ - "\u0000\u045d\u045e\u0006o\u0018\u0000\u045e\u00ef\u0001\u0000\u0000\u0000"+ - "\u045f\u0460\u0004p\n\u0000\u0460\u0461\u0003\u00b0P\u0000\u0461\u0462"+ - "\u0001\u0000\u0000\u0000\u0462\u0463\u0006p\u0019\u0000\u0463\u00f1\u0001"+ - "\u0000\u0000\u0000\u0464\u0469\u0003N\u001f\u0000\u0465\u0469\u0003L\u001e"+ - "\u0000\u0466\u0469\u0003\\&\u0000\u0467\u0469\u0003\u00a6K\u0000\u0468"+ - "\u0464\u0001\u0000\u0000\u0000\u0468\u0465\u0001\u0000\u0000\u0000\u0468"+ - "\u0466\u0001\u0000\u0000\u0000\u0468\u0467\u0001\u0000\u0000\u0000\u0469"+ - "\u00f3\u0001\u0000\u0000\u0000\u046a\u046d\u0003N\u001f\u0000\u046b\u046d"+ - "\u0003\u00a6K\u0000\u046c\u046a\u0001\u0000\u0000\u0000\u046c\u046b\u0001"+ - "\u0000\u0000\u0000\u046d\u0471\u0001\u0000\u0000\u0000\u046e\u0470\u0003"+ - "\u00f2q\u0000\u046f\u046e\u0001\u0000\u0000\u0000\u0470\u0473\u0001\u0000"+ - "\u0000\u0000\u0471\u046f\u0001\u0000\u0000\u0000\u0471\u0472\u0001\u0000"+ - "\u0000\u0000\u0472\u047e\u0001\u0000\u0000\u0000\u0473\u0471\u0001\u0000"+ - "\u0000\u0000\u0474\u0477\u0003\\&\u0000\u0475\u0477\u0003V#\u0000\u0476"+ - "\u0474\u0001\u0000\u0000\u0000\u0476\u0475\u0001\u0000\u0000\u0000\u0477"+ - "\u0479\u0001\u0000\u0000\u0000\u0478\u047a\u0003\u00f2q\u0000\u0479\u0478"+ - "\u0001\u0000\u0000\u0000\u047a\u047b\u0001\u0000\u0000\u0000\u047b\u0479"+ - "\u0001\u0000\u0000\u0000\u047b\u047c\u0001\u0000\u0000\u0000\u047c\u047e"+ - "\u0001\u0000\u0000\u0000\u047d\u046c\u0001\u0000\u0000\u0000\u047d\u0476"+ - "\u0001\u0000\u0000\u0000\u047e\u00f5\u0001\u0000\u0000\u0000\u047f\u0482"+ - "\u0003\u00f4r\u0000\u0480\u0482\u0003\u00b8T\u0000\u0481\u047f\u0001\u0000"+ - "\u0000\u0000\u0481\u0480\u0001\u0000\u0000\u0000\u0482\u0483\u0001\u0000"+ - "\u0000\u0000\u0483\u0481\u0001\u0000\u0000\u0000\u0483\u0484\u0001\u0000"+ - "\u0000\u0000\u0484\u00f7\u0001\u0000\u0000\u0000\u0485\u0486\u0003B\u0019"+ - "\u0000\u0486\u0487\u0001\u0000\u0000\u0000\u0487\u0488\u0006t\u000b\u0000"+ - "\u0488\u00f9\u0001\u0000\u0000\u0000\u0489\u048a\u0003D\u001a\u0000\u048a"+ - "\u048b\u0001\u0000\u0000\u0000\u048b\u048c\u0006u\u000b\u0000\u048c\u00fb"+ - "\u0001\u0000\u0000\u0000\u048d\u048e\u0003F\u001b\u0000\u048e\u048f\u0001"+ - "\u0000\u0000\u0000\u048f\u0490\u0006v\u000b\u0000\u0490\u00fd\u0001\u0000"+ - "\u0000\u0000\u0491\u0492\u0003J\u001d\u0000\u0492\u0493\u0001\u0000\u0000"+ - "\u0000\u0493\u0494\u0006w\u0011\u0000\u0494\u0495\u0006w\f\u0000\u0495"+ - "\u00ff\u0001\u0000\u0000\u0000\u0496\u0497\u0003l.\u0000\u0497\u0498\u0001"+ - "\u0000\u0000\u0000\u0498\u0499\u0006x\u0014\u0000\u0499\u0101\u0001\u0000"+ - "\u0000\u0000\u049a\u049b\u0003p0\u0000\u049b\u049c\u0001\u0000\u0000\u0000"+ - "\u049c\u049d\u0006y\u0013\u0000\u049d\u0103\u0001\u0000\u0000\u0000\u049e"+ - "\u049f\u0003t2\u0000\u049f\u04a0\u0001\u0000\u0000\u0000\u04a0\u04a1\u0006"+ - "z\u0017\u0000\u04a1\u0105\u0001\u0000\u0000\u0000\u04a2\u04a3\u0004{\u000b"+ - "\u0000\u04a3\u04a4\u0003\u008c>\u0000\u04a4\u04a5\u0001\u0000\u0000\u0000"+ - "\u04a5\u04a6\u0006{\u0018\u0000\u04a6\u0107\u0001\u0000\u0000\u0000\u04a7"+ - "\u04a8\u0004|\f\u0000\u04a8\u04a9\u0003\u00b0P\u0000\u04a9\u04aa\u0001"+ - "\u0000\u0000\u0000\u04aa\u04ab\u0006|\u0019\u0000\u04ab\u0109\u0001\u0000"+ - "\u0000\u0000\u04ac\u04ad\u0007\f\u0000\u0000\u04ad\u04ae\u0007\u0002\u0000"+ - "\u0000\u04ae\u010b\u0001\u0000\u0000\u0000\u04af\u04b0\u0003\u00f6s\u0000"+ - "\u04b0\u04b1\u0001\u0000\u0000\u0000\u04b1\u04b2\u0006~\u001a\u0000\u04b2"+ - "\u010d\u0001\u0000\u0000\u0000\u04b3\u04b4\u0003B\u0019\u0000\u04b4\u04b5"+ - "\u0001\u0000\u0000\u0000\u04b5\u04b6\u0006\u007f\u000b\u0000\u04b6\u010f"+ - "\u0001\u0000\u0000\u0000\u04b7\u04b8\u0003D\u001a\u0000\u04b8\u04b9\u0001"+ - "\u0000\u0000\u0000\u04b9\u04ba\u0006\u0080\u000b\u0000\u04ba\u0111\u0001"+ - "\u0000\u0000\u0000\u04bb\u04bc\u0003F\u001b\u0000\u04bc\u04bd\u0001\u0000"+ - "\u0000\u0000\u04bd\u04be\u0006\u0081\u000b\u0000\u04be\u0113\u0001\u0000"+ - "\u0000\u0000\u04bf\u04c0\u0003J\u001d\u0000\u04c0\u04c1\u0001\u0000\u0000"+ - "\u0000\u04c1\u04c2\u0006\u0082\u0011\u0000\u04c2\u04c3\u0006\u0082\f\u0000"+ - "\u04c3\u0115\u0001\u0000\u0000\u0000\u04c4\u04c5\u0003\u00b2Q\u0000\u04c5"+ - "\u04c6\u0001\u0000\u0000\u0000\u04c6\u04c7\u0006\u0083\u000f\u0000\u04c7"+ - "\u04c8\u0006\u0083\u001b\u0000\u04c8\u0117\u0001\u0000\u0000\u0000\u04c9"+ - "\u04ca\u0007\u0007\u0000\u0000\u04ca\u04cb\u0007\t\u0000\u0000\u04cb\u04cc"+ - "\u0001\u0000\u0000\u0000\u04cc\u04cd\u0006\u0084\u001c\u0000\u04cd\u0119"+ - "\u0001\u0000\u0000\u0000\u04ce\u04cf\u0007\u0013\u0000\u0000\u04cf\u04d0"+ - "\u0007\u0001\u0000\u0000\u04d0\u04d1\u0007\u0005\u0000\u0000\u04d1\u04d2"+ - "\u0007\n\u0000\u0000\u04d2\u04d3\u0001\u0000\u0000\u0000\u04d3\u04d4\u0006"+ - "\u0085\u001c\u0000\u04d4\u011b\u0001\u0000\u0000\u0000\u04d5\u04d6\b#"+ - "\u0000\u0000\u04d6\u011d\u0001\u0000\u0000\u0000\u04d7\u04d9\u0003\u011c"+ - "\u0086\u0000\u04d8\u04d7\u0001\u0000\u0000\u0000\u04d9\u04da\u0001\u0000"+ - "\u0000\u0000\u04da\u04d8\u0001\u0000\u0000\u0000\u04da\u04db\u0001\u0000"+ - "\u0000\u0000\u04db\u04dc\u0001\u0000\u0000\u0000\u04dc\u04dd\u0003H\u001c"+ - "\u0000\u04dd\u04df\u0001\u0000\u0000\u0000\u04de\u04d8\u0001\u0000\u0000"+ - "\u0000\u04de\u04df\u0001\u0000\u0000\u0000\u04df\u04e1\u0001\u0000\u0000"+ - "\u0000\u04e0\u04e2\u0003\u011c\u0086\u0000\u04e1\u04e0\u0001\u0000\u0000"+ - "\u0000\u04e2\u04e3\u0001\u0000\u0000\u0000\u04e3\u04e1\u0001\u0000\u0000"+ - "\u0000\u04e3\u04e4\u0001\u0000\u0000\u0000\u04e4\u011f\u0001\u0000\u0000"+ - "\u0000\u04e5\u04e6\u0003\u011e\u0087\u0000\u04e6\u04e7\u0001\u0000\u0000"+ - "\u0000\u04e7\u04e8\u0006\u0088\u001d\u0000\u04e8\u0121\u0001\u0000\u0000"+ - "\u0000\u04e9\u04ea\u0003B\u0019\u0000\u04ea\u04eb\u0001\u0000\u0000\u0000"+ - "\u04eb\u04ec\u0006\u0089\u000b\u0000\u04ec\u0123\u0001\u0000\u0000\u0000"+ - "\u04ed\u04ee\u0003D\u001a\u0000\u04ee\u04ef\u0001\u0000\u0000\u0000\u04ef"+ - "\u04f0\u0006\u008a\u000b\u0000\u04f0\u0125\u0001\u0000\u0000\u0000\u04f1"+ - "\u04f2\u0003F\u001b\u0000\u04f2\u04f3\u0001\u0000\u0000\u0000\u04f3\u04f4"+ - "\u0006\u008b\u000b\u0000\u04f4\u0127\u0001\u0000\u0000\u0000\u04f5\u04f6"+ - "\u0003J\u001d\u0000\u04f6\u04f7\u0001\u0000\u0000\u0000\u04f7\u04f8\u0006"+ - "\u008c\u0011\u0000\u04f8\u04f9\u0006\u008c\f\u0000\u04f9\u04fa\u0006\u008c"+ - "\f\u0000\u04fa\u0129\u0001\u0000\u0000\u0000\u04fb\u04fc\u0003l.\u0000"+ - "\u04fc\u04fd\u0001\u0000\u0000\u0000\u04fd\u04fe\u0006\u008d\u0014\u0000"+ - "\u04fe\u012b\u0001\u0000\u0000\u0000\u04ff\u0500\u0003p0\u0000\u0500\u0501"+ - "\u0001\u0000\u0000\u0000\u0501\u0502\u0006\u008e\u0013\u0000\u0502\u012d"+ - "\u0001\u0000\u0000\u0000\u0503\u0504\u0003t2\u0000\u0504\u0505\u0001\u0000"+ - "\u0000\u0000\u0505\u0506\u0006\u008f\u0017\u0000\u0506\u012f\u0001\u0000"+ - "\u0000\u0000\u0507\u0508\u0003\u011a\u0085\u0000\u0508\u0509\u0001\u0000"+ - "\u0000\u0000\u0509\u050a\u0006\u0090\u001e\u0000\u050a\u0131\u0001\u0000"+ - "\u0000\u0000\u050b\u050c\u0003\u00f6s\u0000\u050c\u050d\u0001\u0000\u0000"+ - "\u0000\u050d\u050e\u0006\u0091\u001a\u0000\u050e\u0133\u0001\u0000\u0000"+ - "\u0000\u050f\u0510\u0003\u00baU\u0000\u0510\u0511\u0001\u0000\u0000\u0000"+ - "\u0511\u0512\u0006\u0092\u001f\u0000\u0512\u0135\u0001\u0000\u0000\u0000"+ - "\u0513\u0514\u0004\u0093\r\u0000\u0514\u0515\u0003\u008c>\u0000\u0515"+ - "\u0516\u0001\u0000\u0000\u0000\u0516\u0517\u0006\u0093\u0018\u0000\u0517"+ - "\u0137\u0001\u0000\u0000\u0000\u0518\u0519\u0004\u0094\u000e\u0000\u0519"+ - "\u051a\u0003\u00b0P\u0000\u051a\u051b\u0001\u0000\u0000\u0000\u051b\u051c"+ - "\u0006\u0094\u0019\u0000\u051c\u0139\u0001\u0000\u0000\u0000\u051d\u051e"+ - "\u0003B\u0019\u0000\u051e\u051f\u0001\u0000\u0000\u0000\u051f\u0520\u0006"+ - "\u0095\u000b\u0000\u0520\u013b\u0001\u0000\u0000\u0000\u0521\u0522\u0003"+ - "D\u001a\u0000\u0522\u0523\u0001\u0000\u0000\u0000\u0523\u0524\u0006\u0096"+ - "\u000b\u0000\u0524\u013d\u0001\u0000\u0000\u0000\u0525\u0526\u0003F\u001b"+ - "\u0000\u0526\u0527\u0001\u0000\u0000\u0000\u0527\u0528\u0006\u0097\u000b"+ - "\u0000\u0528\u013f\u0001\u0000\u0000\u0000\u0529\u052a\u0003J\u001d\u0000"+ - "\u052a\u052b\u0001\u0000\u0000\u0000\u052b\u052c\u0006\u0098\u0011\u0000"+ - "\u052c\u052d\u0006\u0098\f\u0000\u052d\u0141\u0001\u0000\u0000\u0000\u052e"+ - "\u052f\u0003t2\u0000\u052f\u0530\u0001\u0000\u0000\u0000\u0530\u0531\u0006"+ - "\u0099\u0017\u0000\u0531\u0143\u0001\u0000\u0000\u0000\u0532\u0533\u0004"+ - "\u009a\u000f\u0000\u0533\u0534\u0003\u008c>\u0000\u0534\u0535\u0001\u0000"+ - "\u0000\u0000\u0535\u0536\u0006\u009a\u0018\u0000\u0536\u0145\u0001\u0000"+ - "\u0000\u0000\u0537\u0538\u0004\u009b\u0010\u0000\u0538\u0539\u0003\u00b0"+ - "P\u0000\u0539\u053a\u0001\u0000\u0000\u0000\u053a\u053b\u0006\u009b\u0019"+ - "\u0000\u053b\u0147\u0001\u0000\u0000\u0000\u053c\u053d\u0003\u00baU\u0000"+ - "\u053d\u053e\u0001\u0000\u0000\u0000\u053e\u053f\u0006\u009c\u001f\u0000"+ - "\u053f\u0149\u0001\u0000\u0000\u0000\u0540\u0541\u0003\u00b6S\u0000\u0541"+ - "\u0542\u0001\u0000\u0000\u0000\u0542\u0543\u0006\u009d \u0000\u0543\u014b"+ - "\u0001\u0000\u0000\u0000\u0544\u0545\u0003B\u0019\u0000\u0545\u0546\u0001"+ - "\u0000\u0000\u0000\u0546\u0547\u0006\u009e\u000b\u0000\u0547\u014d\u0001"+ - "\u0000\u0000\u0000\u0548\u0549\u0003D\u001a\u0000\u0549\u054a\u0001\u0000"+ - "\u0000\u0000\u054a\u054b\u0006\u009f\u000b\u0000\u054b\u014f\u0001\u0000"+ - "\u0000\u0000\u054c\u054d\u0003F\u001b\u0000\u054d\u054e\u0001\u0000\u0000"+ - "\u0000\u054e\u054f\u0006\u00a0\u000b\u0000\u054f\u0151\u0001\u0000\u0000"+ - "\u0000\u0550\u0551\u0003J\u001d\u0000\u0551\u0552\u0001\u0000\u0000\u0000"+ - "\u0552\u0553\u0006\u00a1\u0011\u0000\u0553\u0554\u0006\u00a1\f\u0000\u0554"+ - "\u0153\u0001\u0000\u0000\u0000\u0555\u0556\u0007\u0001\u0000\u0000\u0556"+ - "\u0557\u0007\t\u0000\u0000\u0557\u0558\u0007\u000f\u0000\u0000\u0558\u0559"+ - "\u0007\u0007\u0000\u0000\u0559\u0155\u0001\u0000\u0000\u0000\u055a\u055b"+ - "\u0003B\u0019\u0000\u055b\u055c\u0001\u0000\u0000\u0000\u055c\u055d\u0006"+ - "\u00a3\u000b\u0000\u055d\u0157\u0001\u0000\u0000\u0000\u055e\u055f\u0003"+ - "D\u001a\u0000\u055f\u0560\u0001\u0000\u0000\u0000\u0560\u0561\u0006\u00a4"+ - "\u000b\u0000\u0561\u0159\u0001\u0000\u0000\u0000\u0562\u0563\u0003F\u001b"+ - "\u0000\u0563\u0564\u0001\u0000\u0000\u0000\u0564\u0565\u0006\u00a5\u000b"+ - "\u0000\u0565\u015b\u0001\u0000\u0000\u0000\u0566\u0567\u0003\u00b4R\u0000"+ - "\u0567\u0568\u0001\u0000\u0000\u0000\u0568\u0569\u0006\u00a6\u0012\u0000"+ - "\u0569\u056a\u0006\u00a6\f\u0000\u056a\u015d\u0001\u0000\u0000\u0000\u056b"+ - "\u056c\u0003H\u001c\u0000\u056c\u056d\u0001\u0000\u0000\u0000\u056d\u056e"+ - "\u0006\u00a7\r\u0000\u056e\u015f\u0001\u0000\u0000\u0000\u056f\u0575\u0003"+ - "V#\u0000\u0570\u0575\u0003L\u001e\u0000\u0571\u0575\u0003t2\u0000\u0572"+ - "\u0575\u0003N\u001f\u0000\u0573\u0575\u0003\\&\u0000\u0574\u056f\u0001"+ - "\u0000\u0000\u0000\u0574\u0570\u0001\u0000\u0000\u0000\u0574\u0571\u0001"+ - "\u0000\u0000\u0000\u0574\u0572\u0001\u0000\u0000\u0000\u0574\u0573\u0001"+ - "\u0000\u0000\u0000\u0575\u0576\u0001\u0000\u0000\u0000\u0576\u0574\u0001"+ - "\u0000\u0000\u0000\u0576\u0577\u0001\u0000\u0000\u0000\u0577\u0161\u0001"+ - "\u0000\u0000\u0000\u0578\u0579\u0003B\u0019\u0000\u0579\u057a\u0001\u0000"+ - "\u0000\u0000\u057a\u057b\u0006\u00a9\u000b\u0000\u057b\u0163\u0001\u0000"+ - "\u0000\u0000\u057c\u057d\u0003D\u001a\u0000\u057d\u057e\u0001\u0000\u0000"+ - "\u0000\u057e\u057f\u0006\u00aa\u000b\u0000\u057f\u0165\u0001\u0000\u0000"+ - "\u0000\u0580\u0581\u0003F\u001b\u0000\u0581\u0582\u0001\u0000\u0000\u0000"+ - "\u0582\u0583\u0006\u00ab\u000b\u0000\u0583\u0167\u0001\u0000\u0000\u0000"+ - "\u0584\u0585\u0003J\u001d\u0000\u0585\u0586\u0001\u0000\u0000\u0000\u0586"+ - "\u0587\u0006\u00ac\u0011\u0000\u0587\u0588\u0006\u00ac\f\u0000\u0588\u0169"+ - "\u0001\u0000\u0000\u0000\u0589\u058a\u0003H\u001c\u0000\u058a\u058b\u0001"+ - "\u0000\u0000\u0000\u058b\u058c\u0006\u00ad\r\u0000\u058c\u016b\u0001\u0000"+ - "\u0000\u0000\u058d\u058e\u0003p0\u0000\u058e\u058f\u0001\u0000\u0000\u0000"+ - "\u058f\u0590\u0006\u00ae\u0013\u0000\u0590\u016d\u0001\u0000\u0000\u0000"+ - "\u0591\u0592\u0003t2\u0000\u0592\u0593\u0001\u0000\u0000\u0000\u0593\u0594"+ - "\u0006\u00af\u0017\u0000\u0594\u016f\u0001\u0000\u0000\u0000\u0595\u0596"+ - "\u0003\u0118\u0084\u0000\u0596\u0597\u0001\u0000\u0000\u0000\u0597\u0598"+ - "\u0006\u00b0!\u0000\u0598\u0599\u0006\u00b0\"\u0000\u0599\u0171\u0001"+ - "\u0000\u0000\u0000\u059a\u059b\u0003\u00dcf\u0000\u059b\u059c\u0001\u0000"+ - "\u0000\u0000\u059c\u059d\u0006\u00b1\u0015\u0000\u059d\u0173\u0001\u0000"+ - "\u0000\u0000\u059e\u059f\u0003`(\u0000\u059f\u05a0\u0001\u0000\u0000\u0000"+ - "\u05a0\u05a1\u0006\u00b2\u0016\u0000\u05a1\u0175\u0001\u0000\u0000\u0000"+ - "\u05a2\u05a3\u0003B\u0019\u0000\u05a3\u05a4\u0001\u0000\u0000\u0000\u05a4"+ - "\u05a5\u0006\u00b3\u000b\u0000\u05a5\u0177\u0001\u0000\u0000\u0000\u05a6"+ - "\u05a7\u0003D\u001a\u0000\u05a7\u05a8\u0001\u0000\u0000\u0000\u05a8\u05a9"+ - "\u0006\u00b4\u000b\u0000\u05a9\u0179\u0001\u0000\u0000\u0000\u05aa\u05ab"+ - "\u0003F\u001b\u0000\u05ab\u05ac\u0001\u0000\u0000\u0000\u05ac\u05ad\u0006"+ - "\u00b5\u000b\u0000\u05ad\u017b\u0001\u0000\u0000\u0000\u05ae\u05af\u0003"+ - "J\u001d\u0000\u05af\u05b0\u0001\u0000\u0000\u0000\u05b0\u05b1\u0006\u00b6"+ - "\u0011\u0000\u05b1\u05b2\u0006\u00b6\f\u0000\u05b2\u05b3\u0006\u00b6\f"+ - "\u0000\u05b3\u017d\u0001\u0000\u0000\u0000\u05b4\u05b5\u0003p0\u0000\u05b5"+ - "\u05b6\u0001\u0000\u0000\u0000\u05b6\u05b7\u0006\u00b7\u0013\u0000\u05b7"+ - "\u017f\u0001\u0000\u0000\u0000\u05b8\u05b9\u0003t2\u0000\u05b9\u05ba\u0001"+ - "\u0000\u0000\u0000\u05ba\u05bb\u0006\u00b8\u0017\u0000\u05bb\u0181\u0001"+ - "\u0000\u0000\u0000\u05bc\u05bd\u0003\u00f6s\u0000\u05bd\u05be\u0001\u0000"+ - "\u0000\u0000\u05be\u05bf\u0006\u00b9\u001a\u0000\u05bf\u0183\u0001\u0000"+ - "\u0000\u0000\u05c0\u05c1\u0003B\u0019\u0000\u05c1\u05c2\u0001\u0000\u0000"+ - "\u0000\u05c2\u05c3\u0006\u00ba\u000b\u0000\u05c3\u0185\u0001\u0000\u0000"+ - "\u0000\u05c4\u05c5\u0003D\u001a\u0000\u05c5\u05c6\u0001\u0000\u0000\u0000"+ - "\u05c6\u05c7\u0006\u00bb\u000b\u0000\u05c7\u0187\u0001\u0000\u0000\u0000"+ - "\u05c8\u05c9\u0003F\u001b\u0000\u05c9\u05ca\u0001\u0000\u0000\u0000\u05ca"+ - "\u05cb\u0006\u00bc\u000b\u0000\u05cb\u0189\u0001\u0000\u0000\u0000\u05cc"+ - "\u05cd\u0003J\u001d\u0000\u05cd\u05ce\u0001\u0000\u0000\u0000\u05ce\u05cf"+ - "\u0006\u00bd\u0011\u0000\u05cf\u05d0\u0006\u00bd\f\u0000\u05d0\u018b\u0001"+ - "\u0000\u0000\u0000\u05d1\u05d2\u00036\u0013\u0000\u05d2\u05d3\u0001\u0000"+ - "\u0000\u0000\u05d3\u05d4\u0006\u00be#\u0000\u05d4\u018d\u0001\u0000\u0000"+ - "\u0000\u05d5\u05d6\u0003\u010a}\u0000\u05d6\u05d7\u0001\u0000\u0000\u0000"+ - "\u05d7\u05d8\u0006\u00bf$\u0000\u05d8\u018f\u0001\u0000\u0000\u0000\u05d9"+ - "\u05da\u0003\u0118\u0084\u0000\u05da\u05db\u0001\u0000\u0000\u0000\u05db"+ - "\u05dc\u0006\u00c0!\u0000\u05dc\u05dd\u0006\u00c0\f\u0000\u05dd\u05de"+ - "\u0006\u00c0\u0000\u0000\u05de\u0191\u0001\u0000\u0000\u0000\u05df\u05e0"+ - "\u0007\u0014\u0000\u0000\u05e0\u05e1\u0007\u0002\u0000\u0000\u05e1\u05e2"+ - "\u0007\u0001\u0000\u0000\u05e2\u05e3\u0007\t\u0000\u0000\u05e3\u05e4\u0007"+ - "\u0011\u0000\u0000\u05e4\u05e5\u0001\u0000\u0000\u0000\u05e5\u05e6\u0006"+ - "\u00c1\f\u0000\u05e6\u05e7\u0006\u00c1\u0000\u0000\u05e7\u0193\u0001\u0000"+ - "\u0000\u0000\u05e8\u05e9\u0003\u00b6S\u0000\u05e9\u05ea\u0001\u0000\u0000"+ - "\u0000\u05ea\u05eb\u0006\u00c2 \u0000\u05eb\u0195\u0001\u0000\u0000\u0000"+ - "\u05ec\u05ed\u0003\u00baU\u0000\u05ed\u05ee\u0001\u0000\u0000\u0000\u05ee"+ - "\u05ef\u0006\u00c3\u001f\u0000\u05ef\u0197\u0001\u0000\u0000\u0000\u05f0"+ - "\u05f1\u0003B\u0019\u0000\u05f1\u05f2\u0001\u0000\u0000\u0000\u05f2\u05f3"+ - "\u0006\u00c4\u000b\u0000\u05f3\u0199\u0001\u0000\u0000\u0000\u05f4\u05f5"+ - "\u0003D\u001a\u0000\u05f5\u05f6\u0001\u0000\u0000\u0000\u05f6\u05f7\u0006"+ - "\u00c5\u000b\u0000\u05f7\u019b\u0001\u0000\u0000\u0000\u05f8\u05f9\u0003"+ - "F\u001b\u0000\u05f9\u05fa\u0001\u0000\u0000\u0000\u05fa\u05fb\u0006\u00c6"+ - "\u000b\u0000\u05fb\u019d\u0001\u0000\u0000\u0000\u05fc\u05fd\u0003J\u001d"+ - "\u0000\u05fd\u05fe\u0001\u0000\u0000\u0000\u05fe\u05ff\u0006\u00c7\u0011"+ - "\u0000\u05ff\u0600\u0006\u00c7\f\u0000\u0600\u019f\u0001\u0000\u0000\u0000"+ - "\u0601\u0602\u0003\u00dcf\u0000\u0602\u0603\u0001\u0000\u0000\u0000\u0603"+ - "\u0604\u0006\u00c8\u0015\u0000\u0604\u0605\u0006\u00c8\f\u0000\u0605\u0606"+ - "\u0006\u00c8%\u0000\u0606\u01a1\u0001\u0000\u0000\u0000\u0607\u0608\u0003"+ - "`(\u0000\u0608\u0609\u0001\u0000\u0000\u0000\u0609\u060a\u0006\u00c9\u0016"+ - "\u0000\u060a\u060b\u0006\u00c9\f\u0000\u060b\u060c\u0006\u00c9%\u0000"+ - "\u060c\u01a3\u0001\u0000\u0000\u0000\u060d\u060e\u0003B\u0019\u0000\u060e"+ - "\u060f\u0001\u0000\u0000\u0000\u060f\u0610\u0006\u00ca\u000b\u0000\u0610"+ - "\u01a5\u0001\u0000\u0000\u0000\u0611\u0612\u0003D\u001a\u0000\u0612\u0613"+ - "\u0001\u0000\u0000\u0000\u0613\u0614\u0006\u00cb\u000b\u0000\u0614\u01a7"+ - "\u0001\u0000\u0000\u0000\u0615\u0616\u0003F\u001b\u0000\u0616\u0617\u0001"+ - "\u0000\u0000\u0000\u0617\u0618\u0006\u00cc\u000b\u0000\u0618\u01a9\u0001"+ - "\u0000\u0000\u0000\u0619\u061a\u0003H\u001c\u0000\u061a\u061b\u0001\u0000"+ - "\u0000\u0000\u061b\u061c\u0006\u00cd\r\u0000\u061c\u061d\u0006\u00cd\f"+ - "\u0000\u061d\u061e\u0006\u00cd\t\u0000\u061e\u01ab\u0001\u0000\u0000\u0000"+ - "\u061f\u0620\u0003p0\u0000\u0620\u0621\u0001\u0000\u0000\u0000\u0621\u0622"+ - "\u0006\u00ce\u0013\u0000\u0622\u0623\u0006\u00ce\f\u0000\u0623\u0624\u0006"+ - "\u00ce\t\u0000\u0624\u01ad\u0001\u0000\u0000\u0000\u0625\u0626\u0003B"+ - "\u0019\u0000\u0626\u0627\u0001\u0000\u0000\u0000\u0627\u0628\u0006\u00cf"+ - "\u000b\u0000\u0628\u01af\u0001\u0000\u0000\u0000\u0629\u062a\u0003D\u001a"+ - "\u0000\u062a\u062b\u0001\u0000\u0000\u0000\u062b\u062c\u0006\u00d0\u000b"+ - "\u0000\u062c\u01b1\u0001\u0000\u0000\u0000\u062d\u062e\u0003F\u001b\u0000"+ - "\u062e\u062f\u0001\u0000\u0000\u0000\u062f\u0630\u0006\u00d1\u000b\u0000"+ - "\u0630\u01b3\u0001\u0000\u0000\u0000\u0631\u0632\u0003\u00baU\u0000\u0632"+ - "\u0633\u0001\u0000\u0000\u0000\u0633\u0634\u0006\u00d2\f\u0000\u0634\u0635"+ - "\u0006\u00d2\u0000\u0000\u0635\u0636\u0006\u00d2\u001f\u0000\u0636\u01b5"+ - "\u0001\u0000\u0000\u0000\u0637\u0638\u0003\u00b6S\u0000\u0638\u0639\u0001"+ - "\u0000\u0000\u0000\u0639\u063a\u0006\u00d3\f\u0000\u063a\u063b\u0006\u00d3"+ - "\u0000\u0000\u063b\u063c\u0006\u00d3 \u0000\u063c\u01b7\u0001\u0000\u0000"+ - "\u0000\u063d\u063e\u0003f+\u0000\u063e\u063f\u0001\u0000\u0000\u0000\u063f"+ - "\u0640\u0006\u00d4\f\u0000\u0640\u0641\u0006\u00d4\u0000\u0000\u0641\u0642"+ - "\u0006\u00d4&\u0000\u0642\u01b9\u0001\u0000\u0000\u0000\u0643\u0644\u0003"+ - "J\u001d\u0000\u0644\u0645\u0001\u0000\u0000\u0000\u0645\u0646\u0006\u00d5"+ - "\u0011\u0000\u0646\u0647\u0006\u00d5\f\u0000\u0647\u01bb\u0001\u0000\u0000"+ + "k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001"+ + "m\u0001n\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001"+ + "o\u0001p\u0001p\u0001p\u0001p\u0003p\u0462\bp\u0001q\u0001q\u0003q\u0466"+ + "\bq\u0001q\u0005q\u0469\bq\nq\fq\u046c\tq\u0001q\u0001q\u0003q\u0470\b"+ + "q\u0001q\u0004q\u0473\bq\u000bq\fq\u0474\u0003q\u0477\bq\u0001r\u0001"+ + "r\u0004r\u047b\br\u000br\fr\u047c\u0001s\u0001s\u0001s\u0001s\u0001t\u0001"+ + "t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001"+ + "v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001"+ + "y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001z\u0001{\u0001"+ + "{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001"+ + "}\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001"+ + "\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001"+ + "\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001"+ + "\u0082\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001"+ + "\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001"+ + "\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0086\u0004"+ + "\u0086\u04d2\b\u0086\u000b\u0086\f\u0086\u04d3\u0001\u0086\u0001\u0086"+ + "\u0003\u0086\u04d8\b\u0086\u0001\u0086\u0004\u0086\u04db\b\u0086\u000b"+ + "\u0086\f\u0086\u04dc\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0087\u0001"+ + "\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001"+ + "\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001"+ + "\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ + "\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001"+ + "\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001"+ + "\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090\u0001"+ + "\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001"+ + "\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001"+ + "\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001"+ + "\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001"+ + "\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001"+ + "\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001"+ + "\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001"+ + "\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001"+ + "\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001"+ + "\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001"+ + "\u009e\u0001\u009e\u0001\u009e\u0001\u009f\u0001\u009f\u0001\u009f\u0001"+ + "\u009f\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001"+ + "\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a2\u0001"+ + "\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001"+ + "\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5\u0001"+ + "\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001"+ + "\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001"+ + "\u00a7\u0004\u00a7\u056e\b\u00a7\u000b\u00a7\f\u00a7\u056f\u0001\u00a8"+ + "\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001\u00a9\u0001\u00a9"+ + "\u0001\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00ab"+ + "\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac"+ + "\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad"+ + "\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001\u00af"+ + "\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001\u00b0"+ + "\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b2"+ + "\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001\u00b3\u0001\u00b3"+ + "\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b5"+ + "\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6"+ + "\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001\u00b7"+ + "\u0001\u00b7\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9"+ + "\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001\u00ba"+ + "\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bc"+ + "\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd"+ + "\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be"+ + "\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf"+ + "\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0"+ + "\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001\u00c1\u0001\u00c1"+ + "\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c3"+ + "\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4\u0001\u00c4"+ + "\u0001\u00c4\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c6"+ + "\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c7\u0001\u00c7"+ + "\u0001\u00c7\u0001\u00c7\u0001\u00c7\u0001\u00c7\u0001\u00c8\u0001\u00c8"+ + "\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c9\u0001\u00c9"+ + "\u0001\u00c9\u0001\u00c9\u0001\u00ca\u0001\u00ca\u0001\u00ca\u0001\u00ca"+ + "\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cc\u0001\u00cc"+ + "\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cd\u0001\u00cd"+ + "\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00ce\u0001\u00ce"+ + "\u0001\u00ce\u0001\u00ce\u0001\u00cf\u0001\u00cf\u0001\u00cf\u0001\u00cf"+ + "\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d1\u0001\u00d1"+ + "\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d2\u0001\u00d2"+ + "\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d3\u0001\u00d3"+ + "\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d4\u0001\u00d4"+ + "\u0001\u00d4\u0001\u00d4\u0001\u00d4\u0002\u02ab\u02f0\u0000\u00d5\u0010"+ + "\u0001\u0012\u0002\u0014\u0003\u0016\u0004\u0018\u0005\u001a\u0006\u001c"+ + "\u0007\u001e\b \t\"\n$\u000b&\f(\r*\u000e,\u000f.\u00100\u00112\u0012"+ + "4\u00136\u00148\u0015:\u0016<\u0017>\u0018@\u0019B\u001aD\u001bF\u001c"+ + "H\u001dJ\u0000L\u0000N\u0000P\u0000R\u0000T\u0000V\u0000X\u0000Z\u0000"+ + "\\\u0000^\u001e`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+z,|-~.\u0080/\u00820\u0084"+ + "1\u00862\u00883\u008a4\u008c5\u008e6\u00907\u00928\u00949\u0096:\u0098"+ + ";\u009a<\u009c=\u009e>\u00a0?\u00a2@\u00a4A\u00a6B\u00a8C\u00aaD\u00ac"+ + "\u0000\u00aeE\u00b0F\u00b2G\u00b4H\u00b6\u0000\u00b8I\u00baJ\u00bcK\u00be"+ + "L\u00c0\u0000\u00c2\u0000\u00c4M\u00c6N\u00c8O\u00ca\u0000\u00cc\u0000"+ + "\u00ce\u0000\u00d0\u0000\u00d2\u0000\u00d4\u0000\u00d6P\u00d8\u0000\u00da"+ + "Q\u00dc\u0000\u00de\u0000\u00e0R\u00e2S\u00e4T\u00e6\u0000\u00e8\u0000"+ + "\u00ea\u0000\u00ec\u0000\u00ee\u0000\u00f0\u0000\u00f2\u0000\u00f4U\u00f6"+ + "V\u00f8W\u00faX\u00fc\u0000\u00fe\u0000\u0100\u0000\u0102\u0000\u0104"+ + "\u0000\u0106\u0000\u0108Y\u010a\u0000\u010cZ\u010e[\u0110\\\u0112\u0000"+ + "\u0114\u0000\u0116]\u0118^\u011a\u0000\u011c_\u011e\u0000\u0120`\u0122"+ + "a\u0124b\u0126\u0000\u0128\u0000\u012a\u0000\u012c\u0000\u012e\u0000\u0130"+ + "\u0000\u0132\u0000\u0134\u0000\u0136\u0000\u0138c\u013ad\u013ce\u013e"+ + "\u0000\u0140\u0000\u0142\u0000\u0144\u0000\u0146\u0000\u0148\u0000\u014a"+ + "f\u014cg\u014eh\u0150\u0000\u0152i\u0154j\u0156k\u0158l\u015a\u0000\u015c"+ + "\u0000\u015em\u0160n\u0162o\u0164p\u0166\u0000\u0168\u0000\u016a\u0000"+ + "\u016c\u0000\u016e\u0000\u0170\u0000\u0172\u0000\u0174q\u0176r\u0178s"+ + "\u017a\u0000\u017c\u0000\u017e\u0000\u0180\u0000\u0182t\u0184u\u0186v"+ + "\u0188\u0000\u018a\u0000\u018c\u0000\u018e\u0000\u0190w\u0192\u0000\u0194"+ + "\u0000\u0196x\u0198y\u019az\u019c\u0000\u019e\u0000\u01a0\u0000\u01a2"+ + "{\u01a4|\u01a6}\u01a8\u0000\u01aa\u0000\u01ac~\u01ae\u007f\u01b0\u0080"+ + "\u01b2\u0000\u01b4\u0000\u01b6\u0000\u01b8\u0000\u0010\u0000\u0001\u0002"+ + "\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f$\u0002\u0000"+ + "DDdd\u0002\u0000IIii\u0002\u0000SSss\u0002\u0000EEee\u0002\u0000CCcc\u0002"+ + "\u0000TTtt\u0002\u0000RRrr\u0002\u0000OOoo\u0002\u0000PPpp\u0002\u0000"+ + "NNnn\u0002\u0000HHhh\u0002\u0000VVvv\u0002\u0000AAaa\u0002\u0000LLll\u0002"+ + "\u0000XXxx\u0002\u0000FFff\u0002\u0000MMmm\u0002\u0000GGgg\u0002\u0000"+ + "KKkk\u0002\u0000WWww\u0002\u0000UUuu\u0002\u0000JJjj\u0006\u0000\t\n\r"+ + "\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002"+ + "\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002"+ + "\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000YYyy\u000b\u0000\t"+ + "\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,"+ + "//::<<>?\\\\||\u065c\u0000\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001"+ + "\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001"+ + "\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001"+ + "\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001"+ + "\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000"+ + "\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000"+ + "\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,"+ + "\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u00000\u0001\u0000"+ + "\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000"+ + "\u00006\u0001\u0000\u0000\u0000\u00008\u0001\u0000\u0000\u0000\u0000:"+ + "\u0001\u0000\u0000\u0000\u0000<\u0001\u0000\u0000\u0000\u0000>\u0001\u0000"+ + "\u0000\u0000\u0000@\u0001\u0000\u0000\u0000\u0000B\u0001\u0000\u0000\u0000"+ + "\u0000D\u0001\u0000\u0000\u0000\u0000F\u0001\u0000\u0000\u0000\u0001H"+ + "\u0001\u0000\u0000\u0000\u0001^\u0001\u0000\u0000\u0000\u0001`\u0001\u0000"+ + "\u0000\u0000\u0001b\u0001\u0000\u0000\u0000\u0001d\u0001\u0000\u0000\u0000"+ + "\u0001f\u0001\u0000\u0000\u0000\u0001h\u0001\u0000\u0000\u0000\u0001j"+ + "\u0001\u0000\u0000\u0000\u0001l\u0001\u0000\u0000\u0000\u0001n\u0001\u0000"+ + "\u0000\u0000\u0001p\u0001\u0000\u0000\u0000\u0001r\u0001\u0000\u0000\u0000"+ + "\u0001t\u0001\u0000\u0000\u0000\u0001v\u0001\u0000\u0000\u0000\u0001x"+ + "\u0001\u0000\u0000\u0000\u0001z\u0001\u0000\u0000\u0000\u0001|\u0001\u0000"+ + "\u0000\u0000\u0001~\u0001\u0000\u0000\u0000\u0001\u0080\u0001\u0000\u0000"+ + "\u0000\u0001\u0082\u0001\u0000\u0000\u0000\u0001\u0084\u0001\u0000\u0000"+ + "\u0000\u0001\u0086\u0001\u0000\u0000\u0000\u0001\u0088\u0001\u0000\u0000"+ + "\u0000\u0001\u008a\u0001\u0000\u0000\u0000\u0001\u008c\u0001\u0000\u0000"+ + "\u0000\u0001\u008e\u0001\u0000\u0000\u0000\u0001\u0090\u0001\u0000\u0000"+ + "\u0000\u0001\u0092\u0001\u0000\u0000\u0000\u0001\u0094\u0001\u0000\u0000"+ + "\u0000\u0001\u0096\u0001\u0000\u0000\u0000\u0001\u0098\u0001\u0000\u0000"+ + "\u0000\u0001\u009a\u0001\u0000\u0000\u0000\u0001\u009c\u0001\u0000\u0000"+ + "\u0000\u0001\u009e\u0001\u0000\u0000\u0000\u0001\u00a0\u0001\u0000\u0000"+ + "\u0000\u0001\u00a2\u0001\u0000\u0000\u0000\u0001\u00a4\u0001\u0000\u0000"+ + "\u0000\u0001\u00a6\u0001\u0000\u0000\u0000\u0001\u00a8\u0001\u0000\u0000"+ + "\u0000\u0001\u00aa\u0001\u0000\u0000\u0000\u0001\u00ac\u0001\u0000\u0000"+ + "\u0000\u0001\u00ae\u0001\u0000\u0000\u0000\u0001\u00b0\u0001\u0000\u0000"+ + "\u0000\u0001\u00b2\u0001\u0000\u0000\u0000\u0001\u00b4\u0001\u0000\u0000"+ + "\u0000\u0001\u00b8\u0001\u0000\u0000\u0000\u0001\u00ba\u0001\u0000\u0000"+ + "\u0000\u0001\u00bc\u0001\u0000\u0000\u0000\u0001\u00be\u0001\u0000\u0000"+ + "\u0000\u0002\u00c0\u0001\u0000\u0000\u0000\u0002\u00c2\u0001\u0000\u0000"+ + "\u0000\u0002\u00c4\u0001\u0000\u0000\u0000\u0002\u00c6\u0001\u0000\u0000"+ + "\u0000\u0002\u00c8\u0001\u0000\u0000\u0000\u0003\u00ca\u0001\u0000\u0000"+ + "\u0000\u0003\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce\u0001\u0000\u0000"+ + "\u0000\u0003\u00d0\u0001\u0000\u0000\u0000\u0003\u00d2\u0001\u0000\u0000"+ + "\u0000\u0003\u00d4\u0001\u0000\u0000\u0000\u0003\u00d6\u0001\u0000\u0000"+ + "\u0000\u0003\u00da\u0001\u0000\u0000\u0000\u0003\u00dc\u0001\u0000\u0000"+ + "\u0000\u0003\u00de\u0001\u0000\u0000\u0000\u0003\u00e0\u0001\u0000\u0000"+ + "\u0000\u0003\u00e2\u0001\u0000\u0000\u0000\u0003\u00e4\u0001\u0000\u0000"+ + "\u0000\u0004\u00e6\u0001\u0000\u0000\u0000\u0004\u00e8\u0001\u0000\u0000"+ + "\u0000\u0004\u00ea\u0001\u0000\u0000\u0000\u0004\u00ec\u0001\u0000\u0000"+ + "\u0000\u0004\u00ee\u0001\u0000\u0000\u0000\u0004\u00f4\u0001\u0000\u0000"+ + "\u0000\u0004\u00f6\u0001\u0000\u0000\u0000\u0004\u00f8\u0001\u0000\u0000"+ + "\u0000\u0004\u00fa\u0001\u0000\u0000\u0000\u0005\u00fc\u0001\u0000\u0000"+ + "\u0000\u0005\u00fe\u0001\u0000\u0000\u0000\u0005\u0100\u0001\u0000\u0000"+ + "\u0000\u0005\u0102\u0001\u0000\u0000\u0000\u0005\u0104\u0001\u0000\u0000"+ + "\u0000\u0005\u0106\u0001\u0000\u0000\u0000\u0005\u0108\u0001\u0000\u0000"+ + "\u0000\u0005\u010a\u0001\u0000\u0000\u0000\u0005\u010c\u0001\u0000\u0000"+ + "\u0000\u0005\u010e\u0001\u0000\u0000\u0000\u0005\u0110\u0001\u0000\u0000"+ + "\u0000\u0006\u0112\u0001\u0000\u0000\u0000\u0006\u0114\u0001\u0000\u0000"+ + "\u0000\u0006\u0116\u0001\u0000\u0000\u0000\u0006\u0118\u0001\u0000\u0000"+ + "\u0000\u0006\u011c\u0001\u0000\u0000\u0000\u0006\u011e\u0001\u0000\u0000"+ + "\u0000\u0006\u0120\u0001\u0000\u0000\u0000\u0006\u0122\u0001\u0000\u0000"+ + "\u0000\u0006\u0124\u0001\u0000\u0000\u0000\u0007\u0126\u0001\u0000\u0000"+ + "\u0000\u0007\u0128\u0001\u0000\u0000\u0000\u0007\u012a\u0001\u0000\u0000"+ + "\u0000\u0007\u012c\u0001\u0000\u0000\u0000\u0007\u012e\u0001\u0000\u0000"+ + "\u0000\u0007\u0130\u0001\u0000\u0000\u0000\u0007\u0132\u0001\u0000\u0000"+ + "\u0000\u0007\u0134\u0001\u0000\u0000\u0000\u0007\u0136\u0001\u0000\u0000"+ + "\u0000\u0007\u0138\u0001\u0000\u0000\u0000\u0007\u013a\u0001\u0000\u0000"+ + "\u0000\u0007\u013c\u0001\u0000\u0000\u0000\b\u013e\u0001\u0000\u0000\u0000"+ + "\b\u0140\u0001\u0000\u0000\u0000\b\u0142\u0001\u0000\u0000\u0000\b\u0144"+ + "\u0001\u0000\u0000\u0000\b\u0146\u0001\u0000\u0000\u0000\b\u0148\u0001"+ + "\u0000\u0000\u0000\b\u014a\u0001\u0000\u0000\u0000\b\u014c\u0001\u0000"+ + "\u0000\u0000\b\u014e\u0001\u0000\u0000\u0000\t\u0150\u0001\u0000\u0000"+ + "\u0000\t\u0152\u0001\u0000\u0000\u0000\t\u0154\u0001\u0000\u0000\u0000"+ + "\t\u0156\u0001\u0000\u0000\u0000\t\u0158\u0001\u0000\u0000\u0000\n\u015a"+ + "\u0001\u0000\u0000\u0000\n\u015c\u0001\u0000\u0000\u0000\n\u015e\u0001"+ + "\u0000\u0000\u0000\n\u0160\u0001\u0000\u0000\u0000\n\u0162\u0001\u0000"+ + "\u0000\u0000\n\u0164\u0001\u0000\u0000\u0000\u000b\u0166\u0001\u0000\u0000"+ + "\u0000\u000b\u0168\u0001\u0000\u0000\u0000\u000b\u016a\u0001\u0000\u0000"+ + "\u0000\u000b\u016c\u0001\u0000\u0000\u0000\u000b\u016e\u0001\u0000\u0000"+ + "\u0000\u000b\u0170\u0001\u0000\u0000\u0000\u000b\u0172\u0001\u0000\u0000"+ + "\u0000\u000b\u0174\u0001\u0000\u0000\u0000\u000b\u0176\u0001\u0000\u0000"+ + "\u0000\u000b\u0178\u0001\u0000\u0000\u0000\f\u017a\u0001\u0000\u0000\u0000"+ + "\f\u017c\u0001\u0000\u0000\u0000\f\u017e\u0001\u0000\u0000\u0000\f\u0180"+ + "\u0001\u0000\u0000\u0000\f\u0182\u0001\u0000\u0000\u0000\f\u0184\u0001"+ + "\u0000\u0000\u0000\f\u0186\u0001\u0000\u0000\u0000\r\u0188\u0001\u0000"+ + "\u0000\u0000\r\u018a\u0001\u0000\u0000\u0000\r\u018c\u0001\u0000\u0000"+ + "\u0000\r\u018e\u0001\u0000\u0000\u0000\r\u0190\u0001\u0000\u0000\u0000"+ + "\r\u0192\u0001\u0000\u0000\u0000\r\u0194\u0001\u0000\u0000\u0000\r\u0196"+ + "\u0001\u0000\u0000\u0000\r\u0198\u0001\u0000\u0000\u0000\r\u019a\u0001"+ + "\u0000\u0000\u0000\u000e\u019c\u0001\u0000\u0000\u0000\u000e\u019e\u0001"+ + "\u0000\u0000\u0000\u000e\u01a0\u0001\u0000\u0000\u0000\u000e\u01a2\u0001"+ + "\u0000\u0000\u0000\u000e\u01a4\u0001\u0000\u0000\u0000\u000e\u01a6\u0001"+ + "\u0000\u0000\u0000\u000f\u01a8\u0001\u0000\u0000\u0000\u000f\u01aa\u0001"+ + "\u0000\u0000\u0000\u000f\u01ac\u0001\u0000\u0000\u0000\u000f\u01ae\u0001"+ + "\u0000\u0000\u0000\u000f\u01b0\u0001\u0000\u0000\u0000\u000f\u01b2\u0001"+ + "\u0000\u0000\u0000\u000f\u01b4\u0001\u0000\u0000\u0000\u000f\u01b6\u0001"+ + "\u0000\u0000\u0000\u000f\u01b8\u0001\u0000\u0000\u0000\u0010\u01ba\u0001"+ + "\u0000\u0000\u0000\u0012\u01c4\u0001\u0000\u0000\u0000\u0014\u01cb\u0001"+ + "\u0000\u0000\u0000\u0016\u01d4\u0001\u0000\u0000\u0000\u0018\u01db\u0001"+ + "\u0000\u0000\u0000\u001a\u01e5\u0001\u0000\u0000\u0000\u001c\u01ec\u0001"+ + "\u0000\u0000\u0000\u001e\u01f3\u0001\u0000\u0000\u0000 \u01fa\u0001\u0000"+ + "\u0000\u0000\"\u0202\u0001\u0000\u0000\u0000$\u020e\u0001\u0000\u0000"+ + "\u0000&\u0217\u0001\u0000\u0000\u0000(\u021d\u0001\u0000\u0000\u0000*"+ + "\u0224\u0001\u0000\u0000\u0000,\u022b\u0001\u0000\u0000\u0000.\u0233\u0001"+ + "\u0000\u0000\u00000\u023b\u0001\u0000\u0000\u00002\u024a\u0001\u0000\u0000"+ + "\u00004\u0256\u0001\u0000\u0000\u00006\u0261\u0001\u0000\u0000\u00008"+ + "\u0269\u0001\u0000\u0000\u0000:\u0271\u0001\u0000\u0000\u0000<\u0279\u0001"+ + "\u0000\u0000\u0000>\u0282\u0001\u0000\u0000\u0000@\u028d\u0001\u0000\u0000"+ + "\u0000B\u0293\u0001\u0000\u0000\u0000D\u02a4\u0001\u0000\u0000\u0000F"+ + "\u02b4\u0001\u0000\u0000\u0000H\u02ba\u0001\u0000\u0000\u0000J\u02be\u0001"+ + "\u0000\u0000\u0000L\u02c0\u0001\u0000\u0000\u0000N\u02c2\u0001\u0000\u0000"+ + "\u0000P\u02c5\u0001\u0000\u0000\u0000R\u02c7\u0001\u0000\u0000\u0000T"+ + "\u02d0\u0001\u0000\u0000\u0000V\u02d2\u0001\u0000\u0000\u0000X\u02d7\u0001"+ + "\u0000\u0000\u0000Z\u02d9\u0001\u0000\u0000\u0000\\\u02de\u0001\u0000"+ + "\u0000\u0000^\u02fd\u0001\u0000\u0000\u0000`\u0300\u0001\u0000\u0000\u0000"+ + "b\u032e\u0001\u0000\u0000\u0000d\u0330\u0001\u0000\u0000\u0000f\u0333"+ + "\u0001\u0000\u0000\u0000h\u0337\u0001\u0000\u0000\u0000j\u033b\u0001\u0000"+ + "\u0000\u0000l\u033d\u0001\u0000\u0000\u0000n\u0340\u0001\u0000\u0000\u0000"+ + "p\u0342\u0001\u0000\u0000\u0000r\u0344\u0001\u0000\u0000\u0000t\u0349"+ + "\u0001\u0000\u0000\u0000v\u034b\u0001\u0000\u0000\u0000x\u0351\u0001\u0000"+ + "\u0000\u0000z\u0357\u0001\u0000\u0000\u0000|\u035a\u0001\u0000\u0000\u0000"+ + "~\u035d\u0001\u0000\u0000\u0000\u0080\u0362\u0001\u0000\u0000\u0000\u0082"+ + "\u0367\u0001\u0000\u0000\u0000\u0084\u0369\u0001\u0000\u0000\u0000\u0086"+ + "\u036d\u0001\u0000\u0000\u0000\u0088\u0372\u0001\u0000\u0000\u0000\u008a"+ + "\u0378\u0001\u0000\u0000\u0000\u008c\u037b\u0001\u0000\u0000\u0000\u008e"+ + "\u037d\u0001\u0000\u0000\u0000\u0090\u0383\u0001\u0000\u0000\u0000\u0092"+ + "\u0385\u0001\u0000\u0000\u0000\u0094\u038a\u0001\u0000\u0000\u0000\u0096"+ + "\u038d\u0001\u0000\u0000\u0000\u0098\u0390\u0001\u0000\u0000\u0000\u009a"+ + "\u0393\u0001\u0000\u0000\u0000\u009c\u0395\u0001\u0000\u0000\u0000\u009e"+ + "\u0398\u0001\u0000\u0000\u0000\u00a0\u039a\u0001\u0000\u0000\u0000\u00a2"+ + "\u039d\u0001\u0000\u0000\u0000\u00a4\u039f\u0001\u0000\u0000\u0000\u00a6"+ + "\u03a1\u0001\u0000\u0000\u0000\u00a8\u03a3\u0001\u0000\u0000\u0000\u00aa"+ + "\u03a5\u0001\u0000\u0000\u0000\u00ac\u03a7\u0001\u0000\u0000\u0000\u00ae"+ + "\u03bc\u0001\u0000\u0000\u0000\u00b0\u03be\u0001\u0000\u0000\u0000\u00b2"+ + "\u03c3\u0001\u0000\u0000\u0000\u00b4\u03d8\u0001\u0000\u0000\u0000\u00b6"+ + "\u03da\u0001\u0000\u0000\u0000\u00b8\u03e2\u0001\u0000\u0000\u0000\u00ba"+ + "\u03e4\u0001\u0000\u0000\u0000\u00bc\u03e8\u0001\u0000\u0000\u0000\u00be"+ + "\u03ec\u0001\u0000\u0000\u0000\u00c0\u03f0\u0001\u0000\u0000\u0000\u00c2"+ + "\u03f5\u0001\u0000\u0000\u0000\u00c4\u03fa\u0001\u0000\u0000\u0000\u00c6"+ + "\u03fe\u0001\u0000\u0000\u0000\u00c8\u0402\u0001\u0000\u0000\u0000\u00ca"+ + "\u0406\u0001\u0000\u0000\u0000\u00cc\u040b\u0001\u0000\u0000\u0000\u00ce"+ + "\u040f\u0001\u0000\u0000\u0000\u00d0\u0413\u0001\u0000\u0000\u0000\u00d2"+ + "\u0417\u0001\u0000\u0000\u0000\u00d4\u041b\u0001\u0000\u0000\u0000\u00d6"+ + "\u041f\u0001\u0000\u0000\u0000\u00d8\u042b\u0001\u0000\u0000\u0000\u00da"+ + "\u042e\u0001\u0000\u0000\u0000\u00dc\u0432\u0001\u0000\u0000\u0000\u00de"+ + "\u0436\u0001\u0000\u0000\u0000\u00e0\u043a\u0001\u0000\u0000\u0000\u00e2"+ + "\u043e\u0001\u0000\u0000\u0000\u00e4\u0442\u0001\u0000\u0000\u0000\u00e6"+ + "\u0446\u0001\u0000\u0000\u0000\u00e8\u044b\u0001\u0000\u0000\u0000\u00ea"+ + "\u044f\u0001\u0000\u0000\u0000\u00ec\u0453\u0001\u0000\u0000\u0000\u00ee"+ + "\u0458\u0001\u0000\u0000\u0000\u00f0\u0461\u0001\u0000\u0000\u0000\u00f2"+ + "\u0476\u0001\u0000\u0000\u0000\u00f4\u047a\u0001\u0000\u0000\u0000\u00f6"+ + "\u047e\u0001\u0000\u0000\u0000\u00f8\u0482\u0001\u0000\u0000\u0000\u00fa"+ + "\u0486\u0001\u0000\u0000\u0000\u00fc\u048a\u0001\u0000\u0000\u0000\u00fe"+ + "\u048f\u0001\u0000\u0000\u0000\u0100\u0493\u0001\u0000\u0000\u0000\u0102"+ + "\u0497\u0001\u0000\u0000\u0000\u0104\u049b\u0001\u0000\u0000\u0000\u0106"+ + "\u04a0\u0001\u0000\u0000\u0000\u0108\u04a5\u0001\u0000\u0000\u0000\u010a"+ + "\u04a8\u0001\u0000\u0000\u0000\u010c\u04ac\u0001\u0000\u0000\u0000\u010e"+ + "\u04b0\u0001\u0000\u0000\u0000\u0110\u04b4\u0001\u0000\u0000\u0000\u0112"+ + "\u04b8\u0001\u0000\u0000\u0000\u0114\u04bd\u0001\u0000\u0000\u0000\u0116"+ + "\u04c2\u0001\u0000\u0000\u0000\u0118\u04c7\u0001\u0000\u0000\u0000\u011a"+ + "\u04ce\u0001\u0000\u0000\u0000\u011c\u04d7\u0001\u0000\u0000\u0000\u011e"+ + "\u04de\u0001\u0000\u0000\u0000\u0120\u04e2\u0001\u0000\u0000\u0000\u0122"+ + "\u04e6\u0001\u0000\u0000\u0000\u0124\u04ea\u0001\u0000\u0000\u0000\u0126"+ + "\u04ee\u0001\u0000\u0000\u0000\u0128\u04f4\u0001\u0000\u0000\u0000\u012a"+ + "\u04f8\u0001\u0000\u0000\u0000\u012c\u04fc\u0001\u0000\u0000\u0000\u012e"+ + "\u0500\u0001\u0000\u0000\u0000\u0130\u0504\u0001\u0000\u0000\u0000\u0132"+ + "\u0508\u0001\u0000\u0000\u0000\u0134\u050c\u0001\u0000\u0000\u0000\u0136"+ + "\u0511\u0001\u0000\u0000\u0000\u0138\u0516\u0001\u0000\u0000\u0000\u013a"+ + "\u051a\u0001\u0000\u0000\u0000\u013c\u051e\u0001\u0000\u0000\u0000\u013e"+ + "\u0522\u0001\u0000\u0000\u0000\u0140\u0527\u0001\u0000\u0000\u0000\u0142"+ + "\u052b\u0001\u0000\u0000\u0000\u0144\u0530\u0001\u0000\u0000\u0000\u0146"+ + "\u0535\u0001\u0000\u0000\u0000\u0148\u0539\u0001\u0000\u0000\u0000\u014a"+ + "\u053d\u0001\u0000\u0000\u0000\u014c\u0541\u0001\u0000\u0000\u0000\u014e"+ + "\u0545\u0001\u0000\u0000\u0000\u0150\u0549\u0001\u0000\u0000\u0000\u0152"+ + "\u054e\u0001\u0000\u0000\u0000\u0154\u0553\u0001\u0000\u0000\u0000\u0156"+ + "\u0557\u0001\u0000\u0000\u0000\u0158\u055b\u0001\u0000\u0000\u0000\u015a"+ + "\u055f\u0001\u0000\u0000\u0000\u015c\u0564\u0001\u0000\u0000\u0000\u015e"+ + "\u056d\u0001\u0000\u0000\u0000\u0160\u0571\u0001\u0000\u0000\u0000\u0162"+ + "\u0575\u0001\u0000\u0000\u0000\u0164\u0579\u0001\u0000\u0000\u0000\u0166"+ + "\u057d\u0001\u0000\u0000\u0000\u0168\u0582\u0001\u0000\u0000\u0000\u016a"+ + "\u0586\u0001\u0000\u0000\u0000\u016c\u058a\u0001\u0000\u0000\u0000\u016e"+ + "\u058e\u0001\u0000\u0000\u0000\u0170\u0593\u0001\u0000\u0000\u0000\u0172"+ + "\u0597\u0001\u0000\u0000\u0000\u0174\u059b\u0001\u0000\u0000\u0000\u0176"+ + "\u059f\u0001\u0000\u0000\u0000\u0178\u05a3\u0001\u0000\u0000\u0000\u017a"+ + "\u05a7\u0001\u0000\u0000\u0000\u017c\u05ad\u0001\u0000\u0000\u0000\u017e"+ + "\u05b1\u0001\u0000\u0000\u0000\u0180\u05b5\u0001\u0000\u0000\u0000\u0182"+ + "\u05b9\u0001\u0000\u0000\u0000\u0184\u05bd\u0001\u0000\u0000\u0000\u0186"+ + "\u05c1\u0001\u0000\u0000\u0000\u0188\u05c5\u0001\u0000\u0000\u0000\u018a"+ + "\u05ca\u0001\u0000\u0000\u0000\u018c\u05ce\u0001\u0000\u0000\u0000\u018e"+ + "\u05d2\u0001\u0000\u0000\u0000\u0190\u05d8\u0001\u0000\u0000\u0000\u0192"+ + "\u05e1\u0001\u0000\u0000\u0000\u0194\u05e5\u0001\u0000\u0000\u0000\u0196"+ + "\u05e9\u0001\u0000\u0000\u0000\u0198\u05ed\u0001\u0000\u0000\u0000\u019a"+ + "\u05f1\u0001\u0000\u0000\u0000\u019c\u05f5\u0001\u0000\u0000\u0000\u019e"+ + "\u05fa\u0001\u0000\u0000\u0000\u01a0\u0600\u0001\u0000\u0000\u0000\u01a2"+ + "\u0606\u0001\u0000\u0000\u0000\u01a4\u060a\u0001\u0000\u0000\u0000\u01a6"+ + "\u060e\u0001\u0000\u0000\u0000\u01a8\u0612\u0001\u0000\u0000\u0000\u01aa"+ + "\u0618\u0001\u0000\u0000\u0000\u01ac\u061e\u0001\u0000\u0000\u0000\u01ae"+ + "\u0622\u0001\u0000\u0000\u0000\u01b0\u0626\u0001\u0000\u0000\u0000\u01b2"+ + "\u062a\u0001\u0000\u0000\u0000\u01b4\u0630\u0001\u0000\u0000\u0000\u01b6"+ + "\u0636\u0001\u0000\u0000\u0000\u01b8\u063c\u0001\u0000\u0000\u0000\u01ba"+ + "\u01bb\u0007\u0000\u0000\u0000\u01bb\u01bc\u0007\u0001\u0000\u0000\u01bc"+ + "\u01bd\u0007\u0002\u0000\u0000\u01bd\u01be\u0007\u0002\u0000\u0000\u01be"+ + "\u01bf\u0007\u0003\u0000\u0000\u01bf\u01c0\u0007\u0004\u0000\u0000\u01c0"+ + "\u01c1\u0007\u0005\u0000\u0000\u01c1\u01c2\u0001\u0000\u0000\u0000\u01c2"+ + "\u01c3\u0006\u0000\u0000\u0000\u01c3\u0011\u0001\u0000\u0000\u0000\u01c4"+ + "\u01c5\u0007\u0000\u0000\u0000\u01c5\u01c6\u0007\u0006\u0000\u0000\u01c6"+ + "\u01c7\u0007\u0007\u0000\u0000\u01c7\u01c8\u0007\b\u0000\u0000\u01c8\u01c9"+ + "\u0001\u0000\u0000\u0000\u01c9\u01ca\u0006\u0001\u0001\u0000\u01ca\u0013"+ + "\u0001\u0000\u0000\u0000\u01cb\u01cc\u0007\u0003\u0000\u0000\u01cc\u01cd"+ + "\u0007\t\u0000\u0000\u01cd\u01ce\u0007\u0006\u0000\u0000\u01ce\u01cf\u0007"+ + "\u0001\u0000\u0000\u01cf\u01d0\u0007\u0004\u0000\u0000\u01d0\u01d1\u0007"+ + "\n\u0000\u0000\u01d1\u01d2\u0001\u0000\u0000\u0000\u01d2\u01d3\u0006\u0002"+ + "\u0002\u0000\u01d3\u0015\u0001\u0000\u0000\u0000\u01d4\u01d5\u0007\u0003"+ + "\u0000\u0000\u01d5\u01d6\u0007\u000b\u0000\u0000\u01d6\u01d7\u0007\f\u0000"+ + "\u0000\u01d7\u01d8\u0007\r\u0000\u0000\u01d8\u01d9\u0001\u0000\u0000\u0000"+ + "\u01d9\u01da\u0006\u0003\u0000\u0000\u01da\u0017\u0001\u0000\u0000\u0000"+ + "\u01db\u01dc\u0007\u0003\u0000\u0000\u01dc\u01dd\u0007\u000e\u0000\u0000"+ + "\u01dd\u01de\u0007\b\u0000\u0000\u01de\u01df\u0007\r\u0000\u0000\u01df"+ + "\u01e0\u0007\f\u0000\u0000\u01e0\u01e1\u0007\u0001\u0000\u0000\u01e1\u01e2"+ + "\u0007\t\u0000\u0000\u01e2\u01e3\u0001\u0000\u0000\u0000\u01e3\u01e4\u0006"+ + "\u0004\u0003\u0000\u01e4\u0019\u0001\u0000\u0000\u0000\u01e5\u01e6\u0007"+ + "\u000f\u0000\u0000\u01e6\u01e7\u0007\u0006\u0000\u0000\u01e7\u01e8\u0007"+ + "\u0007\u0000\u0000\u01e8\u01e9\u0007\u0010\u0000\u0000\u01e9\u01ea\u0001"+ + "\u0000\u0000\u0000\u01ea\u01eb\u0006\u0005\u0004\u0000\u01eb\u001b\u0001"+ + "\u0000\u0000\u0000\u01ec\u01ed\u0007\u0011\u0000\u0000\u01ed\u01ee\u0007"+ + "\u0006\u0000\u0000\u01ee\u01ef\u0007\u0007\u0000\u0000\u01ef\u01f0\u0007"+ + "\u0012\u0000\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000\u01f1\u01f2\u0006"+ + "\u0006\u0000\u0000\u01f2\u001d\u0001\u0000\u0000\u0000\u01f3\u01f4\u0007"+ + "\u0012\u0000\u0000\u01f4\u01f5\u0007\u0003\u0000\u0000\u01f5\u01f6\u0007"+ + "\u0003\u0000\u0000\u01f6\u01f7\u0007\b\u0000\u0000\u01f7\u01f8\u0001\u0000"+ + "\u0000\u0000\u01f8\u01f9\u0006\u0007\u0001\u0000\u01f9\u001f\u0001\u0000"+ + "\u0000\u0000\u01fa\u01fb\u0007\r\u0000\u0000\u01fb\u01fc\u0007\u0001\u0000"+ + "\u0000\u01fc\u01fd\u0007\u0010\u0000\u0000\u01fd\u01fe\u0007\u0001\u0000"+ + "\u0000\u01fe\u01ff\u0007\u0005\u0000\u0000\u01ff\u0200\u0001\u0000\u0000"+ + "\u0000\u0200\u0201\u0006\b\u0000\u0000\u0201!\u0001\u0000\u0000\u0000"+ + "\u0202\u0203\u0007\u0010\u0000\u0000\u0203\u0204\u0007\u000b\u0000\u0000"+ + "\u0204\u0205\u0005_\u0000\u0000\u0205\u0206\u0007\u0003\u0000\u0000\u0206"+ + "\u0207\u0007\u000e\u0000\u0000\u0207\u0208\u0007\b\u0000\u0000\u0208\u0209"+ + "\u0007\f\u0000\u0000\u0209\u020a\u0007\t\u0000\u0000\u020a\u020b\u0007"+ + "\u0000\u0000\u0000\u020b\u020c\u0001\u0000\u0000\u0000\u020c\u020d\u0006"+ + "\t\u0005\u0000\u020d#\u0001\u0000\u0000\u0000\u020e\u020f\u0007\u0006"+ + "\u0000\u0000\u020f\u0210\u0007\u0003\u0000\u0000\u0210\u0211\u0007\t\u0000"+ + "\u0000\u0211\u0212\u0007\f\u0000\u0000\u0212\u0213\u0007\u0010\u0000\u0000"+ + "\u0213\u0214\u0007\u0003\u0000\u0000\u0214\u0215\u0001\u0000\u0000\u0000"+ + "\u0215\u0216\u0006\n\u0006\u0000\u0216%\u0001\u0000\u0000\u0000\u0217"+ + "\u0218\u0007\u0006\u0000\u0000\u0218\u0219\u0007\u0007\u0000\u0000\u0219"+ + "\u021a\u0007\u0013\u0000\u0000\u021a\u021b\u0001\u0000\u0000\u0000\u021b"+ + "\u021c\u0006\u000b\u0000\u0000\u021c\'\u0001\u0000\u0000\u0000\u021d\u021e"+ + "\u0007\u0002\u0000\u0000\u021e\u021f\u0007\n\u0000\u0000\u021f\u0220\u0007"+ + "\u0007\u0000\u0000\u0220\u0221\u0007\u0013\u0000\u0000\u0221\u0222\u0001"+ + "\u0000\u0000\u0000\u0222\u0223\u0006\f\u0007\u0000\u0223)\u0001\u0000"+ + "\u0000\u0000\u0224\u0225\u0007\u0002\u0000\u0000\u0225\u0226\u0007\u0007"+ + "\u0000\u0000\u0226\u0227\u0007\u0006\u0000\u0000\u0227\u0228\u0007\u0005"+ + "\u0000\u0000\u0228\u0229\u0001\u0000\u0000\u0000\u0229\u022a\u0006\r\u0000"+ + "\u0000\u022a+\u0001\u0000\u0000\u0000\u022b\u022c\u0007\u0002\u0000\u0000"+ + "\u022c\u022d\u0007\u0005\u0000\u0000\u022d\u022e\u0007\f\u0000\u0000\u022e"+ + "\u022f\u0007\u0005\u0000\u0000\u022f\u0230\u0007\u0002\u0000\u0000\u0230"+ + "\u0231\u0001\u0000\u0000\u0000\u0231\u0232\u0006\u000e\u0000\u0000\u0232"+ + "-\u0001\u0000\u0000\u0000\u0233\u0234\u0007\u0013\u0000\u0000\u0234\u0235"+ + "\u0007\n\u0000\u0000\u0235\u0236\u0007\u0003\u0000\u0000\u0236\u0237\u0007"+ + "\u0006\u0000\u0000\u0237\u0238\u0007\u0003\u0000\u0000\u0238\u0239\u0001"+ + "\u0000\u0000\u0000\u0239\u023a\u0006\u000f\u0000\u0000\u023a/\u0001\u0000"+ + "\u0000\u0000\u023b\u023c\u0004\u0010\u0000\u0000\u023c\u023d\u0007\u0001"+ + "\u0000\u0000\u023d\u023e\u0007\t\u0000\u0000\u023e\u023f\u0007\r\u0000"+ + "\u0000\u023f\u0240\u0007\u0001\u0000\u0000\u0240\u0241\u0007\t\u0000\u0000"+ + "\u0241\u0242\u0007\u0003\u0000\u0000\u0242\u0243\u0007\u0002\u0000\u0000"+ + "\u0243\u0244\u0007\u0005\u0000\u0000\u0244\u0245\u0007\f\u0000\u0000\u0245"+ + "\u0246\u0007\u0005\u0000\u0000\u0246\u0247\u0007\u0002\u0000\u0000\u0247"+ + "\u0248\u0001\u0000\u0000\u0000\u0248\u0249\u0006\u0010\u0000\u0000\u0249"+ + "1\u0001\u0000\u0000\u0000\u024a\u024b\u0004\u0011\u0001\u0000\u024b\u024c"+ + "\u0007\r\u0000\u0000\u024c\u024d\u0007\u0007\u0000\u0000\u024d\u024e\u0007"+ + "\u0007\u0000\u0000\u024e\u024f\u0007\u0012\u0000\u0000\u024f\u0250\u0007"+ + "\u0014\u0000\u0000\u0250\u0251\u0007\b\u0000\u0000\u0251\u0252\u0005_"+ + "\u0000\u0000\u0252\u0253\u0005\u8001\uf414\u0000\u0000\u0253\u0254\u0001"+ + "\u0000\u0000\u0000\u0254\u0255\u0006\u0011\b\u0000\u02553\u0001\u0000"+ + "\u0000\u0000\u0256\u0257\u0004\u0012\u0002\u0000\u0257\u0258\u0007\u0010"+ + "\u0000\u0000\u0258\u0259\u0007\u0003\u0000\u0000\u0259\u025a\u0007\u0005"+ + "\u0000\u0000\u025a\u025b\u0007\u0006\u0000\u0000\u025b\u025c\u0007\u0001"+ + "\u0000\u0000\u025c\u025d\u0007\u0004\u0000\u0000\u025d\u025e\u0007\u0002"+ + "\u0000\u0000\u025e\u025f\u0001\u0000\u0000\u0000\u025f\u0260\u0006\u0012"+ + "\t\u0000\u02605\u0001\u0000\u0000\u0000\u0261\u0262\u0004\u0013\u0003"+ + "\u0000\u0262\u0263\u0007\u0015\u0000\u0000\u0263\u0264\u0007\u0007\u0000"+ + "\u0000\u0264\u0265\u0007\u0001\u0000\u0000\u0265\u0266\u0007\t\u0000\u0000"+ + "\u0266\u0267\u0001\u0000\u0000\u0000\u0267\u0268\u0006\u0013\n\u0000\u0268"+ + "7\u0001\u0000\u0000\u0000\u0269\u026a\u0004\u0014\u0004\u0000\u026a\u026b"+ + "\u0007\u000f\u0000\u0000\u026b\u026c\u0007\u0014\u0000\u0000\u026c\u026d"+ + "\u0007\r\u0000\u0000\u026d\u026e\u0007\r\u0000\u0000\u026e\u026f\u0001"+ + "\u0000\u0000\u0000\u026f\u0270\u0006\u0014\n\u0000\u02709\u0001\u0000"+ + "\u0000\u0000\u0271\u0272\u0004\u0015\u0005\u0000\u0272\u0273\u0007\r\u0000"+ + "\u0000\u0273\u0274\u0007\u0003\u0000\u0000\u0274\u0275\u0007\u000f\u0000"+ + "\u0000\u0275\u0276\u0007\u0005\u0000\u0000\u0276\u0277\u0001\u0000\u0000"+ + "\u0000\u0277\u0278\u0006\u0015\n\u0000\u0278;\u0001\u0000\u0000\u0000"+ + "\u0279\u027a\u0004\u0016\u0006\u0000\u027a\u027b\u0007\u0006\u0000\u0000"+ + "\u027b\u027c\u0007\u0001\u0000\u0000\u027c\u027d\u0007\u0011\u0000\u0000"+ + "\u027d\u027e\u0007\n\u0000\u0000\u027e\u027f\u0007\u0005\u0000\u0000\u027f"+ + "\u0280\u0001\u0000\u0000\u0000\u0280\u0281\u0006\u0016\n\u0000\u0281="+ + "\u0001\u0000\u0000\u0000\u0282\u0283\u0004\u0017\u0007\u0000\u0283\u0284"+ + "\u0007\r\u0000\u0000\u0284\u0285\u0007\u0007\u0000\u0000\u0285\u0286\u0007"+ + "\u0007\u0000\u0000\u0286\u0287\u0007\u0012\u0000\u0000\u0287\u0288\u0007"+ + "\u0014\u0000\u0000\u0288\u0289\u0007\b\u0000\u0000\u0289\u028a\u0001\u0000"+ + "\u0000\u0000\u028a\u028b\u0006\u0017\n\u0000\u028b?\u0001\u0000\u0000"+ + "\u0000\u028c\u028e\b\u0016\u0000\u0000\u028d\u028c\u0001\u0000\u0000\u0000"+ + "\u028e\u028f\u0001\u0000\u0000\u0000\u028f\u028d\u0001\u0000\u0000\u0000"+ + "\u028f\u0290\u0001\u0000\u0000\u0000\u0290\u0291\u0001\u0000\u0000\u0000"+ + "\u0291\u0292\u0006\u0018\u0000\u0000\u0292A\u0001\u0000\u0000\u0000\u0293"+ + "\u0294\u0005/\u0000\u0000\u0294\u0295\u0005/\u0000\u0000\u0295\u0299\u0001"+ + "\u0000\u0000\u0000\u0296\u0298\b\u0017\u0000\u0000\u0297\u0296\u0001\u0000"+ + "\u0000\u0000\u0298\u029b\u0001\u0000\u0000\u0000\u0299\u0297\u0001\u0000"+ + "\u0000\u0000\u0299\u029a\u0001\u0000\u0000\u0000\u029a\u029d\u0001\u0000"+ + "\u0000\u0000\u029b\u0299\u0001\u0000\u0000\u0000\u029c\u029e\u0005\r\u0000"+ + "\u0000\u029d\u029c\u0001\u0000\u0000\u0000\u029d\u029e\u0001\u0000\u0000"+ + "\u0000\u029e\u02a0\u0001\u0000\u0000\u0000\u029f\u02a1\u0005\n\u0000\u0000"+ + "\u02a0\u029f\u0001\u0000\u0000\u0000\u02a0\u02a1\u0001\u0000\u0000\u0000"+ + "\u02a1\u02a2\u0001\u0000\u0000\u0000\u02a2\u02a3\u0006\u0019\u000b\u0000"+ + "\u02a3C\u0001\u0000\u0000\u0000\u02a4\u02a5\u0005/\u0000\u0000\u02a5\u02a6"+ + "\u0005*\u0000\u0000\u02a6\u02ab\u0001\u0000\u0000\u0000\u02a7\u02aa\u0003"+ + "D\u001a\u0000\u02a8\u02aa\t\u0000\u0000\u0000\u02a9\u02a7\u0001\u0000"+ + "\u0000\u0000\u02a9\u02a8\u0001\u0000\u0000\u0000\u02aa\u02ad\u0001\u0000"+ + "\u0000\u0000\u02ab\u02ac\u0001\u0000\u0000\u0000\u02ab\u02a9\u0001\u0000"+ + "\u0000\u0000\u02ac\u02ae\u0001\u0000\u0000\u0000\u02ad\u02ab\u0001\u0000"+ + "\u0000\u0000\u02ae\u02af\u0005*\u0000\u0000\u02af\u02b0\u0005/\u0000\u0000"+ + "\u02b0\u02b1\u0001\u0000\u0000\u0000\u02b1\u02b2\u0006\u001a\u000b\u0000"+ + "\u02b2E\u0001\u0000\u0000\u0000\u02b3\u02b5\u0007\u0018\u0000\u0000\u02b4"+ + "\u02b3\u0001\u0000\u0000\u0000\u02b5\u02b6\u0001\u0000\u0000\u0000\u02b6"+ + "\u02b4\u0001\u0000\u0000\u0000\u02b6\u02b7\u0001\u0000\u0000\u0000\u02b7"+ + "\u02b8\u0001\u0000\u0000\u0000\u02b8\u02b9\u0006\u001b\u000b\u0000\u02b9"+ + "G\u0001\u0000\u0000\u0000\u02ba\u02bb\u0005|\u0000\u0000\u02bb\u02bc\u0001"+ + "\u0000\u0000\u0000\u02bc\u02bd\u0006\u001c\f\u0000\u02bdI\u0001\u0000"+ + "\u0000\u0000\u02be\u02bf\u0007\u0019\u0000\u0000\u02bfK\u0001\u0000\u0000"+ + "\u0000\u02c0\u02c1\u0007\u001a\u0000\u0000\u02c1M\u0001\u0000\u0000\u0000"+ + "\u02c2\u02c3\u0005\\\u0000\u0000\u02c3\u02c4\u0007\u001b\u0000\u0000\u02c4"+ + "O\u0001\u0000\u0000\u0000\u02c5\u02c6\b\u001c\u0000\u0000\u02c6Q\u0001"+ + "\u0000\u0000\u0000\u02c7\u02c9\u0007\u0003\u0000\u0000\u02c8\u02ca\u0007"+ + "\u001d\u0000\u0000\u02c9\u02c8\u0001\u0000\u0000\u0000\u02c9\u02ca\u0001"+ + "\u0000\u0000\u0000\u02ca\u02cc\u0001\u0000\u0000\u0000\u02cb\u02cd\u0003"+ + "J\u001d\u0000\u02cc\u02cb\u0001\u0000\u0000\u0000\u02cd\u02ce\u0001\u0000"+ + "\u0000\u0000\u02ce\u02cc\u0001\u0000\u0000\u0000\u02ce\u02cf\u0001\u0000"+ + "\u0000\u0000\u02cfS\u0001\u0000\u0000\u0000\u02d0\u02d1\u0005@\u0000\u0000"+ + "\u02d1U\u0001\u0000\u0000\u0000\u02d2\u02d3\u0005`\u0000\u0000\u02d3W"+ + "\u0001\u0000\u0000\u0000\u02d4\u02d8\b\u001e\u0000\u0000\u02d5\u02d6\u0005"+ + "`\u0000\u0000\u02d6\u02d8\u0005`\u0000\u0000\u02d7\u02d4\u0001\u0000\u0000"+ + "\u0000\u02d7\u02d5\u0001\u0000\u0000\u0000\u02d8Y\u0001\u0000\u0000\u0000"+ + "\u02d9\u02da\u0005_\u0000\u0000\u02da[\u0001\u0000\u0000\u0000\u02db\u02df"+ + "\u0003L\u001e\u0000\u02dc\u02df\u0003J\u001d\u0000\u02dd\u02df\u0003Z"+ + "%\u0000\u02de\u02db\u0001\u0000\u0000\u0000\u02de\u02dc\u0001\u0000\u0000"+ + "\u0000\u02de\u02dd\u0001\u0000\u0000\u0000\u02df]\u0001\u0000\u0000\u0000"+ + "\u02e0\u02e5\u0005\"\u0000\u0000\u02e1\u02e4\u0003N\u001f\u0000\u02e2"+ + "\u02e4\u0003P \u0000\u02e3\u02e1\u0001\u0000\u0000\u0000\u02e3\u02e2\u0001"+ + "\u0000\u0000\u0000\u02e4\u02e7\u0001\u0000\u0000\u0000\u02e5\u02e3\u0001"+ + "\u0000\u0000\u0000\u02e5\u02e6\u0001\u0000\u0000\u0000\u02e6\u02e8\u0001"+ + "\u0000\u0000\u0000\u02e7\u02e5\u0001\u0000\u0000\u0000\u02e8\u02fe\u0005"+ + "\"\u0000\u0000\u02e9\u02ea\u0005\"\u0000\u0000\u02ea\u02eb\u0005\"\u0000"+ + "\u0000\u02eb\u02ec\u0005\"\u0000\u0000\u02ec\u02f0\u0001\u0000\u0000\u0000"+ + "\u02ed\u02ef\b\u0017\u0000\u0000\u02ee\u02ed\u0001\u0000\u0000\u0000\u02ef"+ + "\u02f2\u0001\u0000\u0000\u0000\u02f0\u02f1\u0001\u0000\u0000\u0000\u02f0"+ + "\u02ee\u0001\u0000\u0000\u0000\u02f1\u02f3\u0001\u0000\u0000\u0000\u02f2"+ + "\u02f0\u0001\u0000\u0000\u0000\u02f3\u02f4\u0005\"\u0000\u0000\u02f4\u02f5"+ + "\u0005\"\u0000\u0000\u02f5\u02f6\u0005\"\u0000\u0000\u02f6\u02f8\u0001"+ + "\u0000\u0000\u0000\u02f7\u02f9\u0005\"\u0000\u0000\u02f8\u02f7\u0001\u0000"+ + "\u0000\u0000\u02f8\u02f9\u0001\u0000\u0000\u0000\u02f9\u02fb\u0001\u0000"+ + "\u0000\u0000\u02fa\u02fc\u0005\"\u0000\u0000\u02fb\u02fa\u0001\u0000\u0000"+ + "\u0000\u02fb\u02fc\u0001\u0000\u0000\u0000\u02fc\u02fe\u0001\u0000\u0000"+ + "\u0000\u02fd\u02e0\u0001\u0000\u0000\u0000\u02fd\u02e9\u0001\u0000\u0000"+ + "\u0000\u02fe_\u0001\u0000\u0000\u0000\u02ff\u0301\u0003J\u001d\u0000\u0300"+ + "\u02ff\u0001\u0000\u0000\u0000\u0301\u0302\u0001\u0000\u0000\u0000\u0302"+ + "\u0300\u0001\u0000\u0000\u0000\u0302\u0303\u0001\u0000\u0000\u0000\u0303"+ + "a\u0001\u0000\u0000\u0000\u0304\u0306\u0003J\u001d\u0000\u0305\u0304\u0001"+ + "\u0000\u0000\u0000\u0306\u0307\u0001\u0000\u0000\u0000\u0307\u0305\u0001"+ + "\u0000\u0000\u0000\u0307\u0308\u0001\u0000\u0000\u0000\u0308\u0309\u0001"+ + "\u0000\u0000\u0000\u0309\u030d\u0003t2\u0000\u030a\u030c\u0003J\u001d"+ + "\u0000\u030b\u030a\u0001\u0000\u0000\u0000\u030c\u030f\u0001\u0000\u0000"+ + "\u0000\u030d\u030b\u0001\u0000\u0000\u0000\u030d\u030e\u0001\u0000\u0000"+ + "\u0000\u030e\u032f\u0001\u0000\u0000\u0000\u030f\u030d\u0001\u0000\u0000"+ + "\u0000\u0310\u0312\u0003t2\u0000\u0311\u0313\u0003J\u001d\u0000\u0312"+ + "\u0311\u0001\u0000\u0000\u0000\u0313\u0314\u0001\u0000\u0000\u0000\u0314"+ + "\u0312\u0001\u0000\u0000\u0000\u0314\u0315\u0001\u0000\u0000\u0000\u0315"+ + "\u032f\u0001\u0000\u0000\u0000\u0316\u0318\u0003J\u001d\u0000\u0317\u0316"+ + "\u0001\u0000\u0000\u0000\u0318\u0319\u0001\u0000\u0000\u0000\u0319\u0317"+ + "\u0001\u0000\u0000\u0000\u0319\u031a\u0001\u0000\u0000\u0000\u031a\u0322"+ + "\u0001\u0000\u0000\u0000\u031b\u031f\u0003t2\u0000\u031c\u031e\u0003J"+ + "\u001d\u0000\u031d\u031c\u0001\u0000\u0000\u0000\u031e\u0321\u0001\u0000"+ + "\u0000\u0000\u031f\u031d\u0001\u0000\u0000\u0000\u031f\u0320\u0001\u0000"+ + "\u0000\u0000\u0320\u0323\u0001\u0000\u0000\u0000\u0321\u031f\u0001\u0000"+ + "\u0000\u0000\u0322\u031b\u0001\u0000\u0000\u0000\u0322\u0323\u0001\u0000"+ + "\u0000\u0000\u0323\u0324\u0001\u0000\u0000\u0000\u0324\u0325\u0003R!\u0000"+ + "\u0325\u032f\u0001\u0000\u0000\u0000\u0326\u0328\u0003t2\u0000\u0327\u0329"+ + "\u0003J\u001d\u0000\u0328\u0327\u0001\u0000\u0000\u0000\u0329\u032a\u0001"+ + "\u0000\u0000\u0000\u032a\u0328\u0001\u0000\u0000\u0000\u032a\u032b\u0001"+ + "\u0000\u0000\u0000\u032b\u032c\u0001\u0000\u0000\u0000\u032c\u032d\u0003"+ + "R!\u0000\u032d\u032f\u0001\u0000\u0000\u0000\u032e\u0305\u0001\u0000\u0000"+ + "\u0000\u032e\u0310\u0001\u0000\u0000\u0000\u032e\u0317\u0001\u0000\u0000"+ + "\u0000\u032e\u0326\u0001\u0000\u0000\u0000\u032fc\u0001\u0000\u0000\u0000"+ + "\u0330\u0331\u0007\u001f\u0000\u0000\u0331\u0332\u0007 \u0000\u0000\u0332"+ + "e\u0001\u0000\u0000\u0000\u0333\u0334\u0007\f\u0000\u0000\u0334\u0335"+ + "\u0007\t\u0000\u0000\u0335\u0336\u0007\u0000\u0000\u0000\u0336g\u0001"+ + "\u0000\u0000\u0000\u0337\u0338\u0007\f\u0000\u0000\u0338\u0339\u0007\u0002"+ + "\u0000\u0000\u0339\u033a\u0007\u0004\u0000\u0000\u033ai\u0001\u0000\u0000"+ + "\u0000\u033b\u033c\u0005=\u0000\u0000\u033ck\u0001\u0000\u0000\u0000\u033d"+ + "\u033e\u0005:\u0000\u0000\u033e\u033f\u0005:\u0000\u0000\u033fm\u0001"+ + "\u0000\u0000\u0000\u0340\u0341\u0005:\u0000\u0000\u0341o\u0001\u0000\u0000"+ + "\u0000\u0342\u0343\u0005,\u0000\u0000\u0343q\u0001\u0000\u0000\u0000\u0344"+ + "\u0345\u0007\u0000\u0000\u0000\u0345\u0346\u0007\u0003\u0000\u0000\u0346"+ + "\u0347\u0007\u0002\u0000\u0000\u0347\u0348\u0007\u0004\u0000\u0000\u0348"+ + "s\u0001\u0000\u0000\u0000\u0349\u034a\u0005.\u0000\u0000\u034au\u0001"+ + "\u0000\u0000\u0000\u034b\u034c\u0007\u000f\u0000\u0000\u034c\u034d\u0007"+ + "\f\u0000\u0000\u034d\u034e\u0007\r\u0000\u0000\u034e\u034f\u0007\u0002"+ + "\u0000\u0000\u034f\u0350\u0007\u0003\u0000\u0000\u0350w\u0001\u0000\u0000"+ + "\u0000\u0351\u0352\u0007\u000f\u0000\u0000\u0352\u0353\u0007\u0001\u0000"+ + "\u0000\u0353\u0354\u0007\u0006\u0000\u0000\u0354\u0355\u0007\u0002\u0000"+ + "\u0000\u0355\u0356\u0007\u0005\u0000\u0000\u0356y\u0001\u0000\u0000\u0000"+ + "\u0357\u0358\u0007\u0001\u0000\u0000\u0358\u0359\u0007\t\u0000\u0000\u0359"+ + "{\u0001\u0000\u0000\u0000\u035a\u035b\u0007\u0001\u0000\u0000\u035b\u035c"+ + "\u0007\u0002\u0000\u0000\u035c}\u0001\u0000\u0000\u0000\u035d\u035e\u0007"+ + "\r\u0000\u0000\u035e\u035f\u0007\f\u0000\u0000\u035f\u0360\u0007\u0002"+ + "\u0000\u0000\u0360\u0361\u0007\u0005\u0000\u0000\u0361\u007f\u0001\u0000"+ + "\u0000\u0000\u0362\u0363\u0007\r\u0000\u0000\u0363\u0364\u0007\u0001\u0000"+ + "\u0000\u0364\u0365\u0007\u0012\u0000\u0000\u0365\u0366\u0007\u0003\u0000"+ + "\u0000\u0366\u0081\u0001\u0000\u0000\u0000\u0367\u0368\u0005(\u0000\u0000"+ + "\u0368\u0083\u0001\u0000\u0000\u0000\u0369\u036a\u0007\t\u0000\u0000\u036a"+ + "\u036b\u0007\u0007\u0000\u0000\u036b\u036c\u0007\u0005\u0000\u0000\u036c"+ + "\u0085\u0001\u0000\u0000\u0000\u036d\u036e\u0007\t\u0000\u0000\u036e\u036f"+ + "\u0007\u0014\u0000\u0000\u036f\u0370\u0007\r\u0000\u0000\u0370\u0371\u0007"+ + "\r\u0000\u0000\u0371\u0087\u0001\u0000\u0000\u0000\u0372\u0373\u0007\t"+ + "\u0000\u0000\u0373\u0374\u0007\u0014\u0000\u0000\u0374\u0375\u0007\r\u0000"+ + "\u0000\u0375\u0376\u0007\r\u0000\u0000\u0376\u0377\u0007\u0002\u0000\u0000"+ + "\u0377\u0089\u0001\u0000\u0000\u0000\u0378\u0379\u0007\u0007\u0000\u0000"+ + "\u0379\u037a\u0007\u0006\u0000\u0000\u037a\u008b\u0001\u0000\u0000\u0000"+ + "\u037b\u037c\u0005?\u0000\u0000\u037c\u008d\u0001\u0000\u0000\u0000\u037d"+ + "\u037e\u0007\u0006\u0000\u0000\u037e\u037f\u0007\r\u0000\u0000\u037f\u0380"+ + "\u0007\u0001\u0000\u0000\u0380\u0381\u0007\u0012\u0000\u0000\u0381\u0382"+ + "\u0007\u0003\u0000\u0000\u0382\u008f\u0001\u0000\u0000\u0000\u0383\u0384"+ + "\u0005)\u0000\u0000\u0384\u0091\u0001\u0000\u0000\u0000\u0385\u0386\u0007"+ + "\u0005\u0000\u0000\u0386\u0387\u0007\u0006\u0000\u0000\u0387\u0388\u0007"+ + "\u0014\u0000\u0000\u0388\u0389\u0007\u0003\u0000\u0000\u0389\u0093\u0001"+ + "\u0000\u0000\u0000\u038a\u038b\u0005=\u0000\u0000\u038b\u038c\u0005=\u0000"+ + "\u0000\u038c\u0095\u0001\u0000\u0000\u0000\u038d\u038e\u0005=\u0000\u0000"+ + "\u038e\u038f\u0005~\u0000\u0000\u038f\u0097\u0001\u0000\u0000\u0000\u0390"+ + "\u0391\u0005!\u0000\u0000\u0391\u0392\u0005=\u0000\u0000\u0392\u0099\u0001"+ + "\u0000\u0000\u0000\u0393\u0394\u0005<\u0000\u0000\u0394\u009b\u0001\u0000"+ + "\u0000\u0000\u0395\u0396\u0005<\u0000\u0000\u0396\u0397\u0005=\u0000\u0000"+ + "\u0397\u009d\u0001\u0000\u0000\u0000\u0398\u0399\u0005>\u0000\u0000\u0399"+ + "\u009f\u0001\u0000\u0000\u0000\u039a\u039b\u0005>\u0000\u0000\u039b\u039c"+ + "\u0005=\u0000\u0000\u039c\u00a1\u0001\u0000\u0000\u0000\u039d\u039e\u0005"+ + "+\u0000\u0000\u039e\u00a3\u0001\u0000\u0000\u0000\u039f\u03a0\u0005-\u0000"+ + "\u0000\u03a0\u00a5\u0001\u0000\u0000\u0000\u03a1\u03a2\u0005*\u0000\u0000"+ + "\u03a2\u00a7\u0001\u0000\u0000\u0000\u03a3\u03a4\u0005/\u0000\u0000\u03a4"+ + "\u00a9\u0001\u0000\u0000\u0000\u03a5\u03a6\u0005%\u0000\u0000\u03a6\u00ab"+ + "\u0001\u0000\u0000\u0000\u03a7\u03a8\u0003.\u000f\u0000\u03a8\u03a9\u0001"+ + "\u0000\u0000\u0000\u03a9\u03aa\u0006N\r\u0000\u03aa\u00ad\u0001\u0000"+ + "\u0000\u0000\u03ab\u03ae\u0003\u008c>\u0000\u03ac\u03af\u0003L\u001e\u0000"+ + "\u03ad\u03af\u0003Z%\u0000\u03ae\u03ac\u0001\u0000\u0000\u0000\u03ae\u03ad"+ + "\u0001\u0000\u0000\u0000\u03af\u03b3\u0001\u0000\u0000\u0000\u03b0\u03b2"+ + "\u0003\\&\u0000\u03b1\u03b0\u0001\u0000\u0000\u0000\u03b2\u03b5\u0001"+ + "\u0000\u0000\u0000\u03b3\u03b1\u0001\u0000\u0000\u0000\u03b3\u03b4\u0001"+ + "\u0000\u0000\u0000\u03b4\u03bd\u0001\u0000\u0000\u0000\u03b5\u03b3\u0001"+ + "\u0000\u0000\u0000\u03b6\u03b8\u0003\u008c>\u0000\u03b7\u03b9\u0003J\u001d"+ + "\u0000\u03b8\u03b7\u0001\u0000\u0000\u0000\u03b9\u03ba\u0001\u0000\u0000"+ + "\u0000\u03ba\u03b8\u0001\u0000\u0000\u0000\u03ba\u03bb\u0001\u0000\u0000"+ + "\u0000\u03bb\u03bd\u0001\u0000\u0000\u0000\u03bc\u03ab\u0001\u0000\u0000"+ + "\u0000\u03bc\u03b6\u0001\u0000\u0000\u0000\u03bd\u00af\u0001\u0000\u0000"+ + "\u0000\u03be\u03bf\u0005[\u0000\u0000\u03bf\u03c0\u0001\u0000\u0000\u0000"+ + "\u03c0\u03c1\u0006P\u0000\u0000\u03c1\u03c2\u0006P\u0000\u0000\u03c2\u00b1"+ + "\u0001\u0000\u0000\u0000\u03c3\u03c4\u0005]\u0000\u0000\u03c4\u03c5\u0001"+ + "\u0000\u0000\u0000\u03c5\u03c6\u0006Q\f\u0000\u03c6\u03c7\u0006Q\f\u0000"+ + "\u03c7\u00b3\u0001\u0000\u0000\u0000\u03c8\u03cc\u0003L\u001e\u0000\u03c9"+ + "\u03cb\u0003\\&\u0000\u03ca\u03c9\u0001\u0000\u0000\u0000\u03cb\u03ce"+ + "\u0001\u0000\u0000\u0000\u03cc\u03ca\u0001\u0000\u0000\u0000\u03cc\u03cd"+ + "\u0001\u0000\u0000\u0000\u03cd\u03d9\u0001\u0000\u0000\u0000\u03ce\u03cc"+ + "\u0001\u0000\u0000\u0000\u03cf\u03d2\u0003Z%\u0000\u03d0\u03d2\u0003T"+ + "\"\u0000\u03d1\u03cf\u0001\u0000\u0000\u0000\u03d1\u03d0\u0001\u0000\u0000"+ + "\u0000\u03d2\u03d4\u0001\u0000\u0000\u0000\u03d3\u03d5\u0003\\&\u0000"+ + "\u03d4\u03d3\u0001\u0000\u0000\u0000\u03d5\u03d6\u0001\u0000\u0000\u0000"+ + "\u03d6\u03d4\u0001\u0000\u0000\u0000\u03d6\u03d7\u0001\u0000\u0000\u0000"+ + "\u03d7\u03d9\u0001\u0000\u0000\u0000\u03d8\u03c8\u0001\u0000\u0000\u0000"+ + "\u03d8\u03d1\u0001\u0000\u0000\u0000\u03d9\u00b5\u0001\u0000\u0000\u0000"+ + "\u03da\u03dc\u0003V#\u0000\u03db\u03dd\u0003X$\u0000\u03dc\u03db\u0001"+ + "\u0000\u0000\u0000\u03dd\u03de\u0001\u0000\u0000\u0000\u03de\u03dc\u0001"+ + "\u0000\u0000\u0000\u03de\u03df\u0001\u0000\u0000\u0000\u03df\u03e0\u0001"+ + "\u0000\u0000\u0000\u03e0\u03e1\u0003V#\u0000\u03e1\u00b7\u0001\u0000\u0000"+ + "\u0000\u03e2\u03e3\u0003\u00b6S\u0000\u03e3\u00b9\u0001\u0000\u0000\u0000"+ + "\u03e4\u03e5\u0003B\u0019\u0000\u03e5\u03e6\u0001\u0000\u0000\u0000\u03e6"+ + "\u03e7\u0006U\u000b\u0000\u03e7\u00bb\u0001\u0000\u0000\u0000\u03e8\u03e9"+ + "\u0003D\u001a\u0000\u03e9\u03ea\u0001\u0000\u0000\u0000\u03ea\u03eb\u0006"+ + "V\u000b\u0000\u03eb\u00bd\u0001\u0000\u0000\u0000\u03ec\u03ed\u0003F\u001b"+ + "\u0000\u03ed\u03ee\u0001\u0000\u0000\u0000\u03ee\u03ef\u0006W\u000b\u0000"+ + "\u03ef\u00bf\u0001\u0000\u0000\u0000\u03f0\u03f1\u0003\u00b0P\u0000\u03f1"+ + "\u03f2\u0001\u0000\u0000\u0000\u03f2\u03f3\u0006X\u000e\u0000\u03f3\u03f4"+ + "\u0006X\u000f\u0000\u03f4\u00c1\u0001\u0000\u0000\u0000\u03f5\u03f6\u0003"+ + "H\u001c\u0000\u03f6\u03f7\u0001\u0000\u0000\u0000\u03f7\u03f8\u0006Y\u0010"+ + "\u0000\u03f8\u03f9\u0006Y\f\u0000\u03f9\u00c3\u0001\u0000\u0000\u0000"+ + "\u03fa\u03fb\u0003F\u001b\u0000\u03fb\u03fc\u0001\u0000\u0000\u0000\u03fc"+ + "\u03fd\u0006Z\u000b\u0000\u03fd\u00c5\u0001\u0000\u0000\u0000\u03fe\u03ff"+ + "\u0003B\u0019\u0000\u03ff\u0400\u0001\u0000\u0000\u0000\u0400\u0401\u0006"+ + "[\u000b\u0000\u0401\u00c7\u0001\u0000\u0000\u0000\u0402\u0403\u0003D\u001a"+ + "\u0000\u0403\u0404\u0001\u0000\u0000\u0000\u0404\u0405\u0006\\\u000b\u0000"+ + "\u0405\u00c9\u0001\u0000\u0000\u0000\u0406\u0407\u0003H\u001c\u0000\u0407"+ + "\u0408\u0001\u0000\u0000\u0000\u0408\u0409\u0006]\u0010\u0000\u0409\u040a"+ + "\u0006]\f\u0000\u040a\u00cb\u0001\u0000\u0000\u0000\u040b\u040c\u0003"+ + "\u00b0P\u0000\u040c\u040d\u0001\u0000\u0000\u0000\u040d\u040e\u0006^\u000e"+ + "\u0000\u040e\u00cd\u0001\u0000\u0000\u0000\u040f\u0410\u0003\u00b2Q\u0000"+ + "\u0410\u0411\u0001\u0000\u0000\u0000\u0411\u0412\u0006_\u0011\u0000\u0412"+ + "\u00cf\u0001\u0000\u0000\u0000\u0413\u0414\u0003n/\u0000\u0414\u0415\u0001"+ + "\u0000\u0000\u0000\u0415\u0416\u0006`\u0012\u0000\u0416\u00d1\u0001\u0000"+ + "\u0000\u0000\u0417\u0418\u0003p0\u0000\u0418\u0419\u0001\u0000\u0000\u0000"+ + "\u0419\u041a\u0006a\u0013\u0000\u041a\u00d3\u0001\u0000\u0000\u0000\u041b"+ + "\u041c\u0003j-\u0000\u041c\u041d\u0001\u0000\u0000\u0000\u041d\u041e\u0006"+ + "b\u0014\u0000\u041e\u00d5\u0001\u0000\u0000\u0000\u041f\u0420\u0007\u0010"+ + "\u0000\u0000\u0420\u0421\u0007\u0003\u0000\u0000\u0421\u0422\u0007\u0005"+ + "\u0000\u0000\u0422\u0423\u0007\f\u0000\u0000\u0423\u0424\u0007\u0000\u0000"+ + "\u0000\u0424\u0425\u0007\f\u0000\u0000\u0425\u0426\u0007\u0005\u0000\u0000"+ + "\u0426\u0427\u0007\f\u0000\u0000\u0427\u00d7\u0001\u0000\u0000\u0000\u0428"+ + "\u042c\b!\u0000\u0000\u0429\u042a\u0005/\u0000\u0000\u042a\u042c\b\"\u0000"+ + "\u0000\u042b\u0428\u0001\u0000\u0000\u0000\u042b\u0429\u0001\u0000\u0000"+ + "\u0000\u042c\u00d9\u0001\u0000\u0000\u0000\u042d\u042f\u0003\u00d8d\u0000"+ + "\u042e\u042d\u0001\u0000\u0000\u0000\u042f\u0430\u0001\u0000\u0000\u0000"+ + "\u0430\u042e\u0001\u0000\u0000\u0000\u0430\u0431\u0001\u0000\u0000\u0000"+ + "\u0431\u00db\u0001\u0000\u0000\u0000\u0432\u0433\u0003\u00dae\u0000\u0433"+ + "\u0434\u0001\u0000\u0000\u0000\u0434\u0435\u0006f\u0015\u0000\u0435\u00dd"+ + "\u0001\u0000\u0000\u0000\u0436\u0437\u0003^\'\u0000\u0437\u0438\u0001"+ + "\u0000\u0000\u0000\u0438\u0439\u0006g\u0016\u0000\u0439\u00df\u0001\u0000"+ + "\u0000\u0000\u043a\u043b\u0003B\u0019\u0000\u043b\u043c\u0001\u0000\u0000"+ + "\u0000\u043c\u043d\u0006h\u000b\u0000\u043d\u00e1\u0001\u0000\u0000\u0000"+ + "\u043e\u043f\u0003D\u001a\u0000\u043f\u0440\u0001\u0000\u0000\u0000\u0440"+ + "\u0441\u0006i\u000b\u0000\u0441\u00e3\u0001\u0000\u0000\u0000\u0442\u0443"+ + "\u0003F\u001b\u0000\u0443\u0444\u0001\u0000\u0000\u0000\u0444\u0445\u0006"+ + "j\u000b\u0000\u0445\u00e5\u0001\u0000\u0000\u0000\u0446\u0447\u0003H\u001c"+ + "\u0000\u0447\u0448\u0001\u0000\u0000\u0000\u0448\u0449\u0006k\u0010\u0000"+ + "\u0449\u044a\u0006k\f\u0000\u044a\u00e7\u0001\u0000\u0000\u0000\u044b"+ + "\u044c\u0003t2\u0000\u044c\u044d\u0001\u0000\u0000\u0000\u044d\u044e\u0006"+ + "l\u0017\u0000\u044e\u00e9\u0001\u0000\u0000\u0000\u044f\u0450\u0003p0"+ + "\u0000\u0450\u0451\u0001\u0000\u0000\u0000\u0451\u0452\u0006m\u0013\u0000"+ + "\u0452\u00eb\u0001\u0000\u0000\u0000\u0453\u0454\u0004n\b\u0000\u0454"+ + "\u0455\u0003\u008c>\u0000\u0455\u0456\u0001\u0000\u0000\u0000\u0456\u0457"+ + "\u0006n\u0018\u0000\u0457\u00ed\u0001\u0000\u0000\u0000\u0458\u0459\u0004"+ + "o\t\u0000\u0459\u045a\u0003\u00aeO\u0000\u045a\u045b\u0001\u0000\u0000"+ + "\u0000\u045b\u045c\u0006o\u0019\u0000\u045c\u00ef\u0001\u0000\u0000\u0000"+ + "\u045d\u0462\u0003L\u001e\u0000\u045e\u0462\u0003J\u001d\u0000\u045f\u0462"+ + "\u0003Z%\u0000\u0460\u0462\u0003\u00a6K\u0000\u0461\u045d\u0001\u0000"+ + "\u0000\u0000\u0461\u045e\u0001\u0000\u0000\u0000\u0461\u045f\u0001\u0000"+ + "\u0000\u0000\u0461\u0460\u0001\u0000\u0000\u0000\u0462\u00f1\u0001\u0000"+ + "\u0000\u0000\u0463\u0466\u0003L\u001e\u0000\u0464\u0466\u0003\u00a6K\u0000"+ + "\u0465\u0463\u0001\u0000\u0000\u0000\u0465\u0464\u0001\u0000\u0000\u0000"+ + "\u0466\u046a\u0001\u0000\u0000\u0000\u0467\u0469\u0003\u00f0p\u0000\u0468"+ + "\u0467\u0001\u0000\u0000\u0000\u0469\u046c\u0001\u0000\u0000\u0000\u046a"+ + "\u0468\u0001\u0000\u0000\u0000\u046a\u046b\u0001\u0000\u0000\u0000\u046b"+ + "\u0477\u0001\u0000\u0000\u0000\u046c\u046a\u0001\u0000\u0000\u0000\u046d"+ + "\u0470\u0003Z%\u0000\u046e\u0470\u0003T\"\u0000\u046f\u046d\u0001\u0000"+ + "\u0000\u0000\u046f\u046e\u0001\u0000\u0000\u0000\u0470\u0472\u0001\u0000"+ + "\u0000\u0000\u0471\u0473\u0003\u00f0p\u0000\u0472\u0471\u0001\u0000\u0000"+ + "\u0000\u0473\u0474\u0001\u0000\u0000\u0000\u0474\u0472\u0001\u0000\u0000"+ + "\u0000\u0474\u0475\u0001\u0000\u0000\u0000\u0475\u0477\u0001\u0000\u0000"+ + "\u0000\u0476\u0465\u0001\u0000\u0000\u0000\u0476\u046f\u0001\u0000\u0000"+ + "\u0000\u0477\u00f3\u0001\u0000\u0000\u0000\u0478\u047b\u0003\u00f2q\u0000"+ + "\u0479\u047b\u0003\u00b6S\u0000\u047a\u0478\u0001\u0000\u0000\u0000\u047a"+ + "\u0479\u0001\u0000\u0000\u0000\u047b\u047c\u0001\u0000\u0000\u0000\u047c"+ + "\u047a\u0001\u0000\u0000\u0000\u047c\u047d\u0001\u0000\u0000\u0000\u047d"+ + "\u00f5\u0001\u0000\u0000\u0000\u047e\u047f\u0003B\u0019\u0000\u047f\u0480"+ + "\u0001\u0000\u0000\u0000\u0480\u0481\u0006s\u000b\u0000\u0481\u00f7\u0001"+ + "\u0000\u0000\u0000\u0482\u0483\u0003D\u001a\u0000\u0483\u0484\u0001\u0000"+ + "\u0000\u0000\u0484\u0485\u0006t\u000b\u0000\u0485\u00f9\u0001\u0000\u0000"+ + "\u0000\u0486\u0487\u0003F\u001b\u0000\u0487\u0488\u0001\u0000\u0000\u0000"+ + "\u0488\u0489\u0006u\u000b\u0000\u0489\u00fb\u0001\u0000\u0000\u0000\u048a"+ + "\u048b\u0003H\u001c\u0000\u048b\u048c\u0001\u0000\u0000\u0000\u048c\u048d"+ + "\u0006v\u0010\u0000\u048d\u048e\u0006v\f\u0000\u048e\u00fd\u0001\u0000"+ + "\u0000\u0000\u048f\u0490\u0003j-\u0000\u0490\u0491\u0001\u0000\u0000\u0000"+ + "\u0491\u0492\u0006w\u0014\u0000\u0492\u00ff\u0001\u0000\u0000\u0000\u0493"+ + "\u0494\u0003p0\u0000\u0494\u0495\u0001\u0000\u0000\u0000\u0495\u0496\u0006"+ + "x\u0013\u0000\u0496\u0101\u0001\u0000\u0000\u0000\u0497\u0498\u0003t2"+ + "\u0000\u0498\u0499\u0001\u0000\u0000\u0000\u0499\u049a\u0006y\u0017\u0000"+ + "\u049a\u0103\u0001\u0000\u0000\u0000\u049b\u049c\u0004z\n\u0000\u049c"+ + "\u049d\u0003\u008c>\u0000\u049d\u049e\u0001\u0000\u0000\u0000\u049e\u049f"+ + "\u0006z\u0018\u0000\u049f\u0105\u0001\u0000\u0000\u0000\u04a0\u04a1\u0004"+ + "{\u000b\u0000\u04a1\u04a2\u0003\u00aeO\u0000\u04a2\u04a3\u0001\u0000\u0000"+ + "\u0000\u04a3\u04a4\u0006{\u0019\u0000\u04a4\u0107\u0001\u0000\u0000\u0000"+ + "\u04a5\u04a6\u0007\f\u0000\u0000\u04a6\u04a7\u0007\u0002\u0000\u0000\u04a7"+ + "\u0109\u0001\u0000\u0000\u0000\u04a8\u04a9\u0003\u00f4r\u0000\u04a9\u04aa"+ + "\u0001\u0000\u0000\u0000\u04aa\u04ab\u0006}\u001a\u0000\u04ab\u010b\u0001"+ + "\u0000\u0000\u0000\u04ac\u04ad\u0003B\u0019\u0000\u04ad\u04ae\u0001\u0000"+ + "\u0000\u0000\u04ae\u04af\u0006~\u000b\u0000\u04af\u010d\u0001\u0000\u0000"+ + "\u0000\u04b0\u04b1\u0003D\u001a\u0000\u04b1\u04b2\u0001\u0000\u0000\u0000"+ + "\u04b2\u04b3\u0006\u007f\u000b\u0000\u04b3\u010f\u0001\u0000\u0000\u0000"+ + "\u04b4\u04b5\u0003F\u001b\u0000\u04b5\u04b6\u0001\u0000\u0000\u0000\u04b6"+ + "\u04b7\u0006\u0080\u000b\u0000\u04b7\u0111\u0001\u0000\u0000\u0000\u04b8"+ + "\u04b9\u0003H\u001c\u0000\u04b9\u04ba\u0001\u0000\u0000\u0000\u04ba\u04bb"+ + "\u0006\u0081\u0010\u0000\u04bb\u04bc\u0006\u0081\f\u0000\u04bc\u0113\u0001"+ + "\u0000\u0000\u0000\u04bd\u04be\u0003\u00b0P\u0000\u04be\u04bf\u0001\u0000"+ + "\u0000\u0000\u04bf\u04c0\u0006\u0082\u000e\u0000\u04c0\u04c1\u0006\u0082"+ + "\u001b\u0000\u04c1\u0115\u0001\u0000\u0000\u0000\u04c2\u04c3\u0007\u0007"+ + "\u0000\u0000\u04c3\u04c4\u0007\t\u0000\u0000\u04c4\u04c5\u0001\u0000\u0000"+ + "\u0000\u04c5\u04c6\u0006\u0083\u001c\u0000\u04c6\u0117\u0001\u0000\u0000"+ + "\u0000\u04c7\u04c8\u0007\u0013\u0000\u0000\u04c8\u04c9\u0007\u0001\u0000"+ + "\u0000\u04c9\u04ca\u0007\u0005\u0000\u0000\u04ca\u04cb\u0007\n\u0000\u0000"+ + "\u04cb\u04cc\u0001\u0000\u0000\u0000\u04cc\u04cd\u0006\u0084\u001c\u0000"+ + "\u04cd\u0119\u0001\u0000\u0000\u0000\u04ce\u04cf\b#\u0000\u0000\u04cf"+ + "\u011b\u0001\u0000\u0000\u0000\u04d0\u04d2\u0003\u011a\u0085\u0000\u04d1"+ + "\u04d0\u0001\u0000\u0000\u0000\u04d2\u04d3\u0001\u0000\u0000\u0000\u04d3"+ + "\u04d1\u0001\u0000\u0000\u0000\u04d3\u04d4\u0001\u0000\u0000\u0000\u04d4"+ + "\u04d5\u0001\u0000\u0000\u0000\u04d5\u04d6\u0003n/\u0000\u04d6\u04d8\u0001"+ + "\u0000\u0000\u0000\u04d7\u04d1\u0001\u0000\u0000\u0000\u04d7\u04d8\u0001"+ + "\u0000\u0000\u0000\u04d8\u04da\u0001\u0000\u0000\u0000\u04d9\u04db\u0003"+ + "\u011a\u0085\u0000\u04da\u04d9\u0001\u0000\u0000\u0000\u04db\u04dc\u0001"+ + "\u0000\u0000\u0000\u04dc\u04da\u0001\u0000\u0000\u0000\u04dc\u04dd\u0001"+ + "\u0000\u0000\u0000\u04dd\u011d\u0001\u0000\u0000\u0000\u04de\u04df\u0003"+ + "\u011c\u0086\u0000\u04df\u04e0\u0001\u0000\u0000\u0000\u04e0\u04e1\u0006"+ + "\u0087\u001d\u0000\u04e1\u011f\u0001\u0000\u0000\u0000\u04e2\u04e3\u0003"+ + "B\u0019\u0000\u04e3\u04e4\u0001\u0000\u0000\u0000\u04e4\u04e5\u0006\u0088"+ + "\u000b\u0000\u04e5\u0121\u0001\u0000\u0000\u0000\u04e6\u04e7\u0003D\u001a"+ + "\u0000\u04e7\u04e8\u0001\u0000\u0000\u0000\u04e8\u04e9\u0006\u0089\u000b"+ + "\u0000\u04e9\u0123\u0001\u0000\u0000\u0000\u04ea\u04eb\u0003F\u001b\u0000"+ + "\u04eb\u04ec\u0001\u0000\u0000\u0000\u04ec\u04ed\u0006\u008a\u000b\u0000"+ + "\u04ed\u0125\u0001\u0000\u0000\u0000\u04ee\u04ef\u0003H\u001c\u0000\u04ef"+ + "\u04f0\u0001\u0000\u0000\u0000\u04f0\u04f1\u0006\u008b\u0010\u0000\u04f1"+ + "\u04f2\u0006\u008b\f\u0000\u04f2\u04f3\u0006\u008b\f\u0000\u04f3\u0127"+ + "\u0001\u0000\u0000\u0000\u04f4\u04f5\u0003j-\u0000\u04f5\u04f6\u0001\u0000"+ + "\u0000\u0000\u04f6\u04f7\u0006\u008c\u0014\u0000\u04f7\u0129\u0001\u0000"+ + "\u0000\u0000\u04f8\u04f9\u0003p0\u0000\u04f9\u04fa\u0001\u0000\u0000\u0000"+ + "\u04fa\u04fb\u0006\u008d\u0013\u0000\u04fb\u012b\u0001\u0000\u0000\u0000"+ + "\u04fc\u04fd\u0003t2\u0000\u04fd\u04fe\u0001\u0000\u0000\u0000\u04fe\u04ff"+ + "\u0006\u008e\u0017\u0000\u04ff\u012d\u0001\u0000\u0000\u0000\u0500\u0501"+ + "\u0003\u0118\u0084\u0000\u0501\u0502\u0001\u0000\u0000\u0000\u0502\u0503"+ + "\u0006\u008f\u001e\u0000\u0503\u012f\u0001\u0000\u0000\u0000\u0504\u0505"+ + "\u0003\u00f4r\u0000\u0505\u0506\u0001\u0000\u0000\u0000\u0506\u0507\u0006"+ + "\u0090\u001a\u0000\u0507\u0131\u0001\u0000\u0000\u0000\u0508\u0509\u0003"+ + "\u00b8T\u0000\u0509\u050a\u0001\u0000\u0000\u0000\u050a\u050b\u0006\u0091"+ + "\u001f\u0000\u050b\u0133\u0001\u0000\u0000\u0000\u050c\u050d\u0004\u0092"+ + "\f\u0000\u050d\u050e\u0003\u008c>\u0000\u050e\u050f\u0001\u0000\u0000"+ + "\u0000\u050f\u0510\u0006\u0092\u0018\u0000\u0510\u0135\u0001\u0000\u0000"+ + "\u0000\u0511\u0512\u0004\u0093\r\u0000\u0512\u0513\u0003\u00aeO\u0000"+ + "\u0513\u0514\u0001\u0000\u0000\u0000\u0514\u0515\u0006\u0093\u0019\u0000"+ + "\u0515\u0137\u0001\u0000\u0000\u0000\u0516\u0517\u0003B\u0019\u0000\u0517"+ + "\u0518\u0001\u0000\u0000\u0000\u0518\u0519\u0006\u0094\u000b\u0000\u0519"+ + "\u0139\u0001\u0000\u0000\u0000\u051a\u051b\u0003D\u001a\u0000\u051b\u051c"+ + "\u0001\u0000\u0000\u0000\u051c\u051d\u0006\u0095\u000b\u0000\u051d\u013b"+ + "\u0001\u0000\u0000\u0000\u051e\u051f\u0003F\u001b\u0000\u051f\u0520\u0001"+ + "\u0000\u0000\u0000\u0520\u0521\u0006\u0096\u000b\u0000\u0521\u013d\u0001"+ + "\u0000\u0000\u0000\u0522\u0523\u0003H\u001c\u0000\u0523\u0524\u0001\u0000"+ + "\u0000\u0000\u0524\u0525\u0006\u0097\u0010\u0000\u0525\u0526\u0006\u0097"+ + "\f\u0000\u0526\u013f\u0001\u0000\u0000\u0000\u0527\u0528\u0003t2\u0000"+ + "\u0528\u0529\u0001\u0000\u0000\u0000\u0529\u052a\u0006\u0098\u0017\u0000"+ + "\u052a\u0141\u0001\u0000\u0000\u0000\u052b\u052c\u0004\u0099\u000e\u0000"+ + "\u052c\u052d\u0003\u008c>\u0000\u052d\u052e\u0001\u0000\u0000\u0000\u052e"+ + "\u052f\u0006\u0099\u0018\u0000\u052f\u0143\u0001\u0000\u0000\u0000\u0530"+ + "\u0531\u0004\u009a\u000f\u0000\u0531\u0532\u0003\u00aeO\u0000\u0532\u0533"+ + "\u0001\u0000\u0000\u0000\u0533\u0534\u0006\u009a\u0019\u0000\u0534\u0145"+ + "\u0001\u0000\u0000\u0000\u0535\u0536\u0003\u00b8T\u0000\u0536\u0537\u0001"+ + "\u0000\u0000\u0000\u0537\u0538\u0006\u009b\u001f\u0000\u0538\u0147\u0001"+ + "\u0000\u0000\u0000\u0539\u053a\u0003\u00b4R\u0000\u053a\u053b\u0001\u0000"+ + "\u0000\u0000\u053b\u053c\u0006\u009c \u0000\u053c\u0149\u0001\u0000\u0000"+ + "\u0000\u053d\u053e\u0003B\u0019\u0000\u053e\u053f\u0001\u0000\u0000\u0000"+ + "\u053f\u0540\u0006\u009d\u000b\u0000\u0540\u014b\u0001\u0000\u0000\u0000"+ + "\u0541\u0542\u0003D\u001a\u0000\u0542\u0543\u0001\u0000\u0000\u0000\u0543"+ + "\u0544\u0006\u009e\u000b\u0000\u0544\u014d\u0001\u0000\u0000\u0000\u0545"+ + "\u0546\u0003F\u001b\u0000\u0546\u0547\u0001\u0000\u0000\u0000\u0547\u0548"+ + "\u0006\u009f\u000b\u0000\u0548\u014f\u0001\u0000\u0000\u0000\u0549\u054a"+ + "\u0003H\u001c\u0000\u054a\u054b\u0001\u0000\u0000\u0000\u054b\u054c\u0006"+ + "\u00a0\u0010\u0000\u054c\u054d\u0006\u00a0\f\u0000\u054d\u0151\u0001\u0000"+ + "\u0000\u0000\u054e\u054f\u0007\u0001\u0000\u0000\u054f\u0550\u0007\t\u0000"+ + "\u0000\u0550\u0551\u0007\u000f\u0000\u0000\u0551\u0552\u0007\u0007\u0000"+ + "\u0000\u0552\u0153\u0001\u0000\u0000\u0000\u0553\u0554\u0003B\u0019\u0000"+ + "\u0554\u0555\u0001\u0000\u0000\u0000\u0555\u0556\u0006\u00a2\u000b\u0000"+ + "\u0556\u0155\u0001\u0000\u0000\u0000\u0557\u0558\u0003D\u001a\u0000\u0558"+ + "\u0559\u0001\u0000\u0000\u0000\u0559\u055a\u0006\u00a3\u000b\u0000\u055a"+ + "\u0157\u0001\u0000\u0000\u0000\u055b\u055c\u0003F\u001b\u0000\u055c\u055d"+ + "\u0001\u0000\u0000\u0000\u055d\u055e\u0006\u00a4\u000b\u0000\u055e\u0159"+ + "\u0001\u0000\u0000\u0000\u055f\u0560\u0003\u00b2Q\u0000\u0560\u0561\u0001"+ + "\u0000\u0000\u0000\u0561\u0562\u0006\u00a5\u0011\u0000\u0562\u0563\u0006"+ + "\u00a5\f\u0000\u0563\u015b\u0001\u0000\u0000\u0000\u0564\u0565\u0003n"+ + "/\u0000\u0565\u0566\u0001\u0000\u0000\u0000\u0566\u0567\u0006\u00a6\u0012"+ + "\u0000\u0567\u015d\u0001\u0000\u0000\u0000\u0568\u056e\u0003T\"\u0000"+ + "\u0569\u056e\u0003J\u001d\u0000\u056a\u056e\u0003t2\u0000\u056b\u056e"+ + "\u0003L\u001e\u0000\u056c\u056e\u0003Z%\u0000\u056d\u0568\u0001\u0000"+ + "\u0000\u0000\u056d\u0569\u0001\u0000\u0000\u0000\u056d\u056a\u0001\u0000"+ + "\u0000\u0000\u056d\u056b\u0001\u0000\u0000\u0000\u056d\u056c\u0001\u0000"+ + "\u0000\u0000\u056e\u056f\u0001\u0000\u0000\u0000\u056f\u056d\u0001\u0000"+ + "\u0000\u0000\u056f\u0570\u0001\u0000\u0000\u0000\u0570\u015f\u0001\u0000"+ + "\u0000\u0000\u0571\u0572\u0003B\u0019\u0000\u0572\u0573\u0001\u0000\u0000"+ + "\u0000\u0573\u0574\u0006\u00a8\u000b\u0000\u0574\u0161\u0001\u0000\u0000"+ + "\u0000\u0575\u0576\u0003D\u001a\u0000\u0576\u0577\u0001\u0000\u0000\u0000"+ + "\u0577\u0578\u0006\u00a9\u000b\u0000\u0578\u0163\u0001\u0000\u0000\u0000"+ + "\u0579\u057a\u0003F\u001b\u0000\u057a\u057b\u0001\u0000\u0000\u0000\u057b"+ + "\u057c\u0006\u00aa\u000b\u0000\u057c\u0165\u0001\u0000\u0000\u0000\u057d"+ + "\u057e\u0003H\u001c\u0000\u057e\u057f\u0001\u0000\u0000\u0000\u057f\u0580"+ + "\u0006\u00ab\u0010\u0000\u0580\u0581\u0006\u00ab\f\u0000\u0581\u0167\u0001"+ + "\u0000\u0000\u0000\u0582\u0583\u0003n/\u0000\u0583\u0584\u0001\u0000\u0000"+ + "\u0000\u0584\u0585\u0006\u00ac\u0012\u0000\u0585\u0169\u0001\u0000\u0000"+ + "\u0000\u0586\u0587\u0003p0\u0000\u0587\u0588\u0001\u0000\u0000\u0000\u0588"+ + "\u0589\u0006\u00ad\u0013\u0000\u0589\u016b\u0001\u0000\u0000\u0000\u058a"+ + "\u058b\u0003t2\u0000\u058b\u058c\u0001\u0000\u0000\u0000\u058c\u058d\u0006"+ + "\u00ae\u0017\u0000\u058d\u016d\u0001\u0000\u0000\u0000\u058e\u058f\u0003"+ + "\u0116\u0083\u0000\u058f\u0590\u0001\u0000\u0000\u0000\u0590\u0591\u0006"+ + "\u00af!\u0000\u0591\u0592\u0006\u00af\"\u0000\u0592\u016f\u0001\u0000"+ + "\u0000\u0000\u0593\u0594\u0003\u00dae\u0000\u0594\u0595\u0001\u0000\u0000"+ + "\u0000\u0595\u0596\u0006\u00b0\u0015\u0000\u0596\u0171\u0001\u0000\u0000"+ + "\u0000\u0597\u0598\u0003^\'\u0000\u0598\u0599\u0001\u0000\u0000\u0000"+ + "\u0599\u059a\u0006\u00b1\u0016\u0000\u059a\u0173\u0001\u0000\u0000\u0000"+ + "\u059b\u059c\u0003B\u0019\u0000\u059c\u059d\u0001\u0000\u0000\u0000\u059d"+ + "\u059e\u0006\u00b2\u000b\u0000\u059e\u0175\u0001\u0000\u0000\u0000\u059f"+ + "\u05a0\u0003D\u001a\u0000\u05a0\u05a1\u0001\u0000\u0000\u0000\u05a1\u05a2"+ + "\u0006\u00b3\u000b\u0000\u05a2\u0177\u0001\u0000\u0000\u0000\u05a3\u05a4"+ + "\u0003F\u001b\u0000\u05a4\u05a5\u0001\u0000\u0000\u0000\u05a5\u05a6\u0006"+ + "\u00b4\u000b\u0000\u05a6\u0179\u0001\u0000\u0000\u0000\u05a7\u05a8\u0003"+ + "H\u001c\u0000\u05a8\u05a9\u0001\u0000\u0000\u0000\u05a9\u05aa\u0006\u00b5"+ + "\u0010\u0000\u05aa\u05ab\u0006\u00b5\f\u0000\u05ab\u05ac\u0006\u00b5\f"+ + "\u0000\u05ac\u017b\u0001\u0000\u0000\u0000\u05ad\u05ae\u0003p0\u0000\u05ae"+ + "\u05af\u0001\u0000\u0000\u0000\u05af\u05b0\u0006\u00b6\u0013\u0000\u05b0"+ + "\u017d\u0001\u0000\u0000\u0000\u05b1\u05b2\u0003t2\u0000\u05b2\u05b3\u0001"+ + "\u0000\u0000\u0000\u05b3\u05b4\u0006\u00b7\u0017\u0000\u05b4\u017f\u0001"+ + "\u0000\u0000\u0000\u05b5\u05b6\u0003\u00f4r\u0000\u05b6\u05b7\u0001\u0000"+ + "\u0000\u0000\u05b7\u05b8\u0006\u00b8\u001a\u0000\u05b8\u0181\u0001\u0000"+ + "\u0000\u0000\u05b9\u05ba\u0003B\u0019\u0000\u05ba\u05bb\u0001\u0000\u0000"+ + "\u0000\u05bb\u05bc\u0006\u00b9\u000b\u0000\u05bc\u0183\u0001\u0000\u0000"+ + "\u0000\u05bd\u05be\u0003D\u001a\u0000\u05be\u05bf\u0001\u0000\u0000\u0000"+ + "\u05bf\u05c0\u0006\u00ba\u000b\u0000\u05c0\u0185\u0001\u0000\u0000\u0000"+ + "\u05c1\u05c2\u0003F\u001b\u0000\u05c2\u05c3\u0001\u0000\u0000\u0000\u05c3"+ + "\u05c4\u0006\u00bb\u000b\u0000\u05c4\u0187\u0001\u0000\u0000\u0000\u05c5"+ + "\u05c6\u0003H\u001c\u0000\u05c6\u05c7\u0001\u0000\u0000\u0000\u05c7\u05c8"+ + "\u0006\u00bc\u0010\u0000\u05c8\u05c9\u0006\u00bc\f\u0000\u05c9\u0189\u0001"+ + "\u0000\u0000\u0000\u05ca\u05cb\u00036\u0013\u0000\u05cb\u05cc\u0001\u0000"+ + "\u0000\u0000\u05cc\u05cd\u0006\u00bd#\u0000\u05cd\u018b\u0001\u0000\u0000"+ + "\u0000\u05ce\u05cf\u0003\u0108|\u0000\u05cf\u05d0\u0001\u0000\u0000\u0000"+ + "\u05d0\u05d1\u0006\u00be$\u0000\u05d1\u018d\u0001\u0000\u0000\u0000\u05d2"+ + "\u05d3\u0003\u0116\u0083\u0000\u05d3\u05d4\u0001\u0000\u0000\u0000\u05d4"+ + "\u05d5\u0006\u00bf!\u0000\u05d5\u05d6\u0006\u00bf\f\u0000\u05d6\u05d7"+ + "\u0006\u00bf\u0000\u0000\u05d7\u018f\u0001\u0000\u0000\u0000\u05d8\u05d9"+ + "\u0007\u0014\u0000\u0000\u05d9\u05da\u0007\u0002\u0000\u0000\u05da\u05db"+ + "\u0007\u0001\u0000\u0000\u05db\u05dc\u0007\t\u0000\u0000\u05dc\u05dd\u0007"+ + "\u0011\u0000\u0000\u05dd\u05de\u0001\u0000\u0000\u0000\u05de\u05df\u0006"+ + "\u00c0\f\u0000\u05df\u05e0\u0006\u00c0\u0000\u0000\u05e0\u0191\u0001\u0000"+ + "\u0000\u0000\u05e1\u05e2\u0003\u00b4R\u0000\u05e2\u05e3\u0001\u0000\u0000"+ + "\u0000\u05e3\u05e4\u0006\u00c1 \u0000\u05e4\u0193\u0001\u0000\u0000\u0000"+ + "\u05e5\u05e6\u0003\u00b8T\u0000\u05e6\u05e7\u0001\u0000\u0000\u0000\u05e7"+ + "\u05e8\u0006\u00c2\u001f\u0000\u05e8\u0195\u0001\u0000\u0000\u0000\u05e9"+ + "\u05ea\u0003B\u0019\u0000\u05ea\u05eb\u0001\u0000\u0000\u0000\u05eb\u05ec"+ + "\u0006\u00c3\u000b\u0000\u05ec\u0197\u0001\u0000\u0000\u0000\u05ed\u05ee"+ + "\u0003D\u001a\u0000\u05ee\u05ef\u0001\u0000\u0000\u0000\u05ef\u05f0\u0006"+ + "\u00c4\u000b\u0000\u05f0\u0199\u0001\u0000\u0000\u0000\u05f1\u05f2\u0003"+ + "F\u001b\u0000\u05f2\u05f3\u0001\u0000\u0000\u0000\u05f3\u05f4\u0006\u00c5"+ + "\u000b\u0000\u05f4\u019b\u0001\u0000\u0000\u0000\u05f5\u05f6\u0003H\u001c"+ + "\u0000\u05f6\u05f7\u0001\u0000\u0000\u0000\u05f7\u05f8\u0006\u00c6\u0010"+ + "\u0000\u05f8\u05f9\u0006\u00c6\f\u0000\u05f9\u019d\u0001\u0000\u0000\u0000"+ + "\u05fa\u05fb\u0003\u00dae\u0000\u05fb\u05fc\u0001\u0000\u0000\u0000\u05fc"+ + "\u05fd\u0006\u00c7\u0015\u0000\u05fd\u05fe\u0006\u00c7\f\u0000\u05fe\u05ff"+ + "\u0006\u00c7%\u0000\u05ff\u019f\u0001\u0000\u0000\u0000\u0600\u0601\u0003"+ + "^\'\u0000\u0601\u0602\u0001\u0000\u0000\u0000\u0602\u0603\u0006\u00c8"+ + "\u0016\u0000\u0603\u0604\u0006\u00c8\f\u0000\u0604\u0605\u0006\u00c8%"+ + "\u0000\u0605\u01a1\u0001\u0000\u0000\u0000\u0606\u0607\u0003B\u0019\u0000"+ + "\u0607\u0608\u0001\u0000\u0000\u0000\u0608\u0609\u0006\u00c9\u000b\u0000"+ + "\u0609\u01a3\u0001\u0000\u0000\u0000\u060a\u060b\u0003D\u001a\u0000\u060b"+ + "\u060c\u0001\u0000\u0000\u0000\u060c\u060d\u0006\u00ca\u000b\u0000\u060d"+ + "\u01a5\u0001\u0000\u0000\u0000\u060e\u060f\u0003F\u001b\u0000\u060f\u0610"+ + "\u0001\u0000\u0000\u0000\u0610\u0611\u0006\u00cb\u000b\u0000\u0611\u01a7"+ + "\u0001\u0000\u0000\u0000\u0612\u0613\u0003n/\u0000\u0613\u0614\u0001\u0000"+ + "\u0000\u0000\u0614\u0615\u0006\u00cc\u0012\u0000\u0615\u0616\u0006\u00cc"+ + "\f\u0000\u0616\u0617\u0006\u00cc\t\u0000\u0617\u01a9\u0001\u0000\u0000"+ + "\u0000\u0618\u0619\u0003p0\u0000\u0619\u061a\u0001\u0000\u0000\u0000\u061a"+ + "\u061b\u0006\u00cd\u0013\u0000\u061b\u061c\u0006\u00cd\f\u0000\u061c\u061d"+ + "\u0006\u00cd\t\u0000\u061d\u01ab\u0001\u0000\u0000\u0000\u061e\u061f\u0003"+ + "B\u0019\u0000\u061f\u0620\u0001\u0000\u0000\u0000\u0620\u0621\u0006\u00ce"+ + "\u000b\u0000\u0621\u01ad\u0001\u0000\u0000\u0000\u0622\u0623\u0003D\u001a"+ + "\u0000\u0623\u0624\u0001\u0000\u0000\u0000\u0624\u0625\u0006\u00cf\u000b"+ + "\u0000\u0625\u01af\u0001\u0000\u0000\u0000\u0626\u0627\u0003F\u001b\u0000"+ + "\u0627\u0628\u0001\u0000\u0000\u0000\u0628\u0629\u0006\u00d0\u000b\u0000"+ + "\u0629\u01b1\u0001\u0000\u0000\u0000\u062a\u062b\u0003\u00b8T\u0000\u062b"+ + "\u062c\u0001\u0000\u0000\u0000\u062c\u062d\u0006\u00d1\f\u0000\u062d\u062e"+ + "\u0006\u00d1\u0000\u0000\u062e\u062f\u0006\u00d1\u001f\u0000\u062f\u01b3"+ + "\u0001\u0000\u0000\u0000\u0630\u0631\u0003\u00b4R\u0000\u0631\u0632\u0001"+ + "\u0000\u0000\u0000\u0632\u0633\u0006\u00d2\f\u0000\u0633\u0634\u0006\u00d2"+ + "\u0000\u0000\u0634\u0635\u0006\u00d2 \u0000\u0635\u01b5\u0001\u0000\u0000"+ + "\u0000\u0636\u0637\u0003d*\u0000\u0637\u0638\u0001\u0000\u0000\u0000\u0638"+ + "\u0639\u0006\u00d3\f\u0000\u0639\u063a\u0006\u00d3\u0000\u0000\u063a\u063b"+ + "\u0006\u00d3&\u0000\u063b\u01b7\u0001\u0000\u0000\u0000\u063c\u063d\u0003"+ + "H\u001c\u0000\u063d\u063e\u0001\u0000\u0000\u0000\u063e\u063f\u0006\u00d4"+ + "\u0010\u0000\u063f\u0640\u0006\u00d4\f\u0000\u0640\u01b9\u0001\u0000\u0000"+ "\u0000B\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f"+ - "\r\u000e\u000f\u0291\u029b\u029f\u02a2\u02ab\u02ad\u02b8\u02cd\u02d2\u02db"+ - "\u02e2\u02e7\u02e9\u02f4\u02fc\u02ff\u0301\u0306\u030b\u0311\u0318\u031d"+ - "\u0323\u0326\u032e\u0332\u03b5\u03ba\u03c1\u03c3\u03d3\u03d8\u03dd\u03df"+ - "\u03e5\u0432\u0437\u0468\u046c\u0471\u0476\u047b\u047d\u0481\u0483\u04da"+ - "\u04de\u04e3\u0574\u0576\'\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006"+ + "\r\u000e\u000f\u028f\u0299\u029d\u02a0\u02a9\u02ab\u02b6\u02c9\u02ce\u02d7"+ + "\u02de\u02e3\u02e5\u02f0\u02f8\u02fb\u02fd\u0302\u0307\u030d\u0314\u0319"+ + "\u031f\u0322\u032a\u032e\u03ae\u03b3\u03ba\u03bc\u03cc\u03d1\u03d6\u03d8"+ + "\u03de\u042b\u0430\u0461\u0465\u046a\u046f\u0474\u0476\u047a\u047c\u04d3"+ + "\u04d7\u04dc\u056d\u056f\'\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006"+ "\u0000\u0005\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005\u0000"+ "\u0005\t\u0000\u0005\u000b\u0000\u0005\u000e\u0000\u0005\r\u0000\u0000"+ - "\u0001\u0000\u0004\u0000\u0000\u0007\u001d\u0000\u0007\u0010\u0000\u0007"+ - "F\u0000\u0005\u0000\u0000\u0007\u001e\u0000\u0007G\u0000\u0007\'\u0000"+ - "\u0007%\u0000\u0007Q\u0000\u0007\u001f\u0000\u0007)\u0000\u00075\u0000"+ - "\u0007E\u0000\u0007U\u0000\u0005\n\u0000\u0005\u0007\u0000\u0007_\u0000"+ - "\u0007^\u0000\u0007I\u0000\u0007H\u0000\u0007]\u0000\u0005\f\u0000\u0007"+ - "\u0014\u0000\u0007Y\u0000\u0005\u000f\u0000\u0007\"\u0000"; + "\u0001\u0000\u0004\u0000\u0000\u0007\u0010\u0000\u0007F\u0000\u0005\u0000"+ + "\u0000\u0007\u001d\u0000\u0007G\u0000\u0007&\u0000\u0007\'\u0000\u0007"+ + "$\u0000\u0007Q\u0000\u0007\u001e\u0000\u0007)\u0000\u00075\u0000\u0007"+ + "E\u0000\u0007U\u0000\u0005\n\u0000\u0005\u0007\u0000\u0007_\u0000\u0007"+ + "^\u0000\u0007I\u0000\u0007H\u0000\u0007]\u0000\u0005\f\u0000\u0007\u0014"+ + "\u0000\u0007Y\u0000\u0005\u000f\u0000\u0007!\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index 71930451ad55c..50493f584fe4c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -28,7 +28,6 @@ null null null null -':' '|' null null @@ -38,6 +37,7 @@ null 'asc' '=' '::' +':' ',' 'desc' '.' @@ -159,7 +159,6 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS -COLON PIPE QUOTED_STRING INTEGER_LITERAL @@ -169,6 +168,7 @@ AND ASC ASSIGN CAST_OP +COLON COMMA DESC DOT @@ -330,4 +330,4 @@ joinPredicate atn: -[4, 1, 128, 636, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 142, 8, 1, 10, 1, 12, 1, 145, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 153, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 173, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 185, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 192, 8, 5, 10, 5, 12, 5, 195, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 202, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 208, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 216, 8, 5, 10, 5, 12, 5, 219, 9, 5, 1, 6, 1, 6, 3, 6, 223, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 230, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 235, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 246, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 252, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 260, 8, 9, 10, 9, 12, 9, 263, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 273, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 278, 8, 10, 10, 10, 12, 10, 281, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 289, 8, 11, 10, 11, 12, 11, 292, 9, 11, 3, 11, 294, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 308, 8, 15, 10, 15, 12, 15, 311, 9, 15, 1, 16, 1, 16, 1, 16, 3, 16, 316, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 324, 8, 17, 10, 17, 12, 17, 327, 9, 17, 1, 17, 3, 17, 330, 8, 17, 1, 18, 1, 18, 1, 18, 3, 18, 335, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 345, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 351, 8, 22, 10, 22, 12, 22, 354, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 364, 8, 24, 10, 24, 12, 24, 367, 9, 24, 1, 24, 3, 24, 370, 8, 24, 1, 24, 1, 24, 3, 24, 374, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 381, 8, 26, 1, 26, 1, 26, 3, 26, 385, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 390, 8, 27, 10, 27, 12, 27, 393, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 398, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 403, 8, 29, 10, 29, 12, 29, 406, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 411, 8, 30, 10, 30, 12, 30, 414, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 419, 8, 31, 10, 31, 12, 31, 422, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 429, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 444, 8, 34, 10, 34, 12, 34, 447, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 455, 8, 34, 10, 34, 12, 34, 458, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 466, 8, 34, 10, 34, 12, 34, 469, 9, 34, 1, 34, 1, 34, 3, 34, 473, 8, 34, 1, 35, 1, 35, 3, 35, 477, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 482, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 491, 8, 38, 10, 38, 12, 38, 494, 9, 38, 1, 39, 1, 39, 3, 39, 498, 8, 39, 1, 39, 1, 39, 3, 39, 502, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 514, 8, 42, 10, 42, 12, 42, 517, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 527, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 539, 8, 47, 10, 47, 12, 47, 542, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 552, 8, 50, 1, 51, 3, 51, 555, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 560, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 582, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 588, 8, 58, 10, 58, 12, 58, 591, 9, 58, 3, 58, 593, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 598, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 611, 8, 61, 1, 62, 3, 62, 614, 8, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 3, 63, 623, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 629, 8, 64, 10, 64, 12, 64, 632, 9, 64, 1, 65, 1, 65, 1, 65, 0, 4, 2, 10, 18, 20, 66, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 0, 9, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 31, 31, 81, 81, 1, 0, 72, 73, 2, 0, 36, 36, 40, 40, 2, 0, 43, 43, 46, 46, 2, 0, 42, 42, 56, 56, 2, 0, 57, 57, 59, 63, 1, 0, 22, 24, 661, 0, 132, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 4, 152, 1, 0, 0, 0, 6, 172, 1, 0, 0, 0, 8, 174, 1, 0, 0, 0, 10, 207, 1, 0, 0, 0, 12, 234, 1, 0, 0, 0, 14, 236, 1, 0, 0, 0, 16, 245, 1, 0, 0, 0, 18, 251, 1, 0, 0, 0, 20, 272, 1, 0, 0, 0, 22, 282, 1, 0, 0, 0, 24, 297, 1, 0, 0, 0, 26, 299, 1, 0, 0, 0, 28, 301, 1, 0, 0, 0, 30, 304, 1, 0, 0, 0, 32, 315, 1, 0, 0, 0, 34, 319, 1, 0, 0, 0, 36, 334, 1, 0, 0, 0, 38, 338, 1, 0, 0, 0, 40, 340, 1, 0, 0, 0, 42, 344, 1, 0, 0, 0, 44, 346, 1, 0, 0, 0, 46, 355, 1, 0, 0, 0, 48, 359, 1, 0, 0, 0, 50, 375, 1, 0, 0, 0, 52, 378, 1, 0, 0, 0, 54, 386, 1, 0, 0, 0, 56, 394, 1, 0, 0, 0, 58, 399, 1, 0, 0, 0, 60, 407, 1, 0, 0, 0, 62, 415, 1, 0, 0, 0, 64, 423, 1, 0, 0, 0, 66, 428, 1, 0, 0, 0, 68, 472, 1, 0, 0, 0, 70, 476, 1, 0, 0, 0, 72, 481, 1, 0, 0, 0, 74, 483, 1, 0, 0, 0, 76, 486, 1, 0, 0, 0, 78, 495, 1, 0, 0, 0, 80, 503, 1, 0, 0, 0, 82, 506, 1, 0, 0, 0, 84, 509, 1, 0, 0, 0, 86, 518, 1, 0, 0, 0, 88, 522, 1, 0, 0, 0, 90, 528, 1, 0, 0, 0, 92, 532, 1, 0, 0, 0, 94, 535, 1, 0, 0, 0, 96, 543, 1, 0, 0, 0, 98, 547, 1, 0, 0, 0, 100, 551, 1, 0, 0, 0, 102, 554, 1, 0, 0, 0, 104, 559, 1, 0, 0, 0, 106, 563, 1, 0, 0, 0, 108, 565, 1, 0, 0, 0, 110, 567, 1, 0, 0, 0, 112, 570, 1, 0, 0, 0, 114, 574, 1, 0, 0, 0, 116, 577, 1, 0, 0, 0, 118, 597, 1, 0, 0, 0, 120, 601, 1, 0, 0, 0, 122, 606, 1, 0, 0, 0, 124, 613, 1, 0, 0, 0, 126, 619, 1, 0, 0, 0, 128, 624, 1, 0, 0, 0, 130, 633, 1, 0, 0, 0, 132, 133, 3, 2, 1, 0, 133, 134, 5, 0, 0, 1, 134, 1, 1, 0, 0, 0, 135, 136, 6, 1, -1, 0, 136, 137, 3, 4, 2, 0, 137, 143, 1, 0, 0, 0, 138, 139, 10, 1, 0, 0, 139, 140, 5, 30, 0, 0, 140, 142, 3, 6, 3, 0, 141, 138, 1, 0, 0, 0, 142, 145, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 3, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 146, 153, 3, 110, 55, 0, 147, 153, 3, 34, 17, 0, 148, 153, 3, 28, 14, 0, 149, 153, 3, 114, 57, 0, 150, 151, 4, 2, 1, 0, 151, 153, 3, 48, 24, 0, 152, 146, 1, 0, 0, 0, 152, 147, 1, 0, 0, 0, 152, 148, 1, 0, 0, 0, 152, 149, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 153, 5, 1, 0, 0, 0, 154, 173, 3, 50, 25, 0, 155, 173, 3, 8, 4, 0, 156, 173, 3, 80, 40, 0, 157, 173, 3, 74, 37, 0, 158, 173, 3, 52, 26, 0, 159, 173, 3, 76, 38, 0, 160, 173, 3, 82, 41, 0, 161, 173, 3, 84, 42, 0, 162, 173, 3, 88, 44, 0, 163, 173, 3, 90, 45, 0, 164, 173, 3, 116, 58, 0, 165, 173, 3, 92, 46, 0, 166, 167, 4, 3, 2, 0, 167, 173, 3, 122, 61, 0, 168, 169, 4, 3, 3, 0, 169, 173, 3, 120, 60, 0, 170, 171, 4, 3, 4, 0, 171, 173, 3, 124, 62, 0, 172, 154, 1, 0, 0, 0, 172, 155, 1, 0, 0, 0, 172, 156, 1, 0, 0, 0, 172, 157, 1, 0, 0, 0, 172, 158, 1, 0, 0, 0, 172, 159, 1, 0, 0, 0, 172, 160, 1, 0, 0, 0, 172, 161, 1, 0, 0, 0, 172, 162, 1, 0, 0, 0, 172, 163, 1, 0, 0, 0, 172, 164, 1, 0, 0, 0, 172, 165, 1, 0, 0, 0, 172, 166, 1, 0, 0, 0, 172, 168, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 7, 1, 0, 0, 0, 174, 175, 5, 16, 0, 0, 175, 176, 3, 10, 5, 0, 176, 9, 1, 0, 0, 0, 177, 178, 6, 5, -1, 0, 178, 179, 5, 49, 0, 0, 179, 208, 3, 10, 5, 8, 180, 208, 3, 16, 8, 0, 181, 208, 3, 12, 6, 0, 182, 184, 3, 16, 8, 0, 183, 185, 5, 49, 0, 0, 184, 183, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 5, 44, 0, 0, 187, 188, 5, 48, 0, 0, 188, 193, 3, 16, 8, 0, 189, 190, 5, 39, 0, 0, 190, 192, 3, 16, 8, 0, 191, 189, 1, 0, 0, 0, 192, 195, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 196, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 196, 197, 5, 55, 0, 0, 197, 208, 1, 0, 0, 0, 198, 199, 3, 16, 8, 0, 199, 201, 5, 45, 0, 0, 200, 202, 5, 49, 0, 0, 201, 200, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 204, 5, 50, 0, 0, 204, 208, 1, 0, 0, 0, 205, 206, 4, 5, 5, 0, 206, 208, 3, 14, 7, 0, 207, 177, 1, 0, 0, 0, 207, 180, 1, 0, 0, 0, 207, 181, 1, 0, 0, 0, 207, 182, 1, 0, 0, 0, 207, 198, 1, 0, 0, 0, 207, 205, 1, 0, 0, 0, 208, 217, 1, 0, 0, 0, 209, 210, 10, 5, 0, 0, 210, 211, 5, 35, 0, 0, 211, 216, 3, 10, 5, 6, 212, 213, 10, 4, 0, 0, 213, 214, 5, 52, 0, 0, 214, 216, 3, 10, 5, 5, 215, 209, 1, 0, 0, 0, 215, 212, 1, 0, 0, 0, 216, 219, 1, 0, 0, 0, 217, 215, 1, 0, 0, 0, 217, 218, 1, 0, 0, 0, 218, 11, 1, 0, 0, 0, 219, 217, 1, 0, 0, 0, 220, 222, 3, 16, 8, 0, 221, 223, 5, 49, 0, 0, 222, 221, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 225, 5, 47, 0, 0, 225, 226, 3, 106, 53, 0, 226, 235, 1, 0, 0, 0, 227, 229, 3, 16, 8, 0, 228, 230, 5, 49, 0, 0, 229, 228, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 232, 5, 54, 0, 0, 232, 233, 3, 106, 53, 0, 233, 235, 1, 0, 0, 0, 234, 220, 1, 0, 0, 0, 234, 227, 1, 0, 0, 0, 235, 13, 1, 0, 0, 0, 236, 237, 3, 58, 29, 0, 237, 238, 5, 29, 0, 0, 238, 239, 3, 68, 34, 0, 239, 15, 1, 0, 0, 0, 240, 246, 3, 18, 9, 0, 241, 242, 3, 18, 9, 0, 242, 243, 3, 108, 54, 0, 243, 244, 3, 18, 9, 0, 244, 246, 1, 0, 0, 0, 245, 240, 1, 0, 0, 0, 245, 241, 1, 0, 0, 0, 246, 17, 1, 0, 0, 0, 247, 248, 6, 9, -1, 0, 248, 252, 3, 20, 10, 0, 249, 250, 7, 0, 0, 0, 250, 252, 3, 18, 9, 3, 251, 247, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 252, 261, 1, 0, 0, 0, 253, 254, 10, 2, 0, 0, 254, 255, 7, 1, 0, 0, 255, 260, 3, 18, 9, 3, 256, 257, 10, 1, 0, 0, 257, 258, 7, 0, 0, 0, 258, 260, 3, 18, 9, 2, 259, 253, 1, 0, 0, 0, 259, 256, 1, 0, 0, 0, 260, 263, 1, 0, 0, 0, 261, 259, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 19, 1, 0, 0, 0, 263, 261, 1, 0, 0, 0, 264, 265, 6, 10, -1, 0, 265, 273, 3, 68, 34, 0, 266, 273, 3, 58, 29, 0, 267, 273, 3, 22, 11, 0, 268, 269, 5, 48, 0, 0, 269, 270, 3, 10, 5, 0, 270, 271, 5, 55, 0, 0, 271, 273, 1, 0, 0, 0, 272, 264, 1, 0, 0, 0, 272, 266, 1, 0, 0, 0, 272, 267, 1, 0, 0, 0, 272, 268, 1, 0, 0, 0, 273, 279, 1, 0, 0, 0, 274, 275, 10, 1, 0, 0, 275, 276, 5, 38, 0, 0, 276, 278, 3, 26, 13, 0, 277, 274, 1, 0, 0, 0, 278, 281, 1, 0, 0, 0, 279, 277, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 21, 1, 0, 0, 0, 281, 279, 1, 0, 0, 0, 282, 283, 3, 24, 12, 0, 283, 293, 5, 48, 0, 0, 284, 294, 5, 66, 0, 0, 285, 290, 3, 10, 5, 0, 286, 287, 5, 39, 0, 0, 287, 289, 3, 10, 5, 0, 288, 286, 1, 0, 0, 0, 289, 292, 1, 0, 0, 0, 290, 288, 1, 0, 0, 0, 290, 291, 1, 0, 0, 0, 291, 294, 1, 0, 0, 0, 292, 290, 1, 0, 0, 0, 293, 284, 1, 0, 0, 0, 293, 285, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 296, 5, 55, 0, 0, 296, 23, 1, 0, 0, 0, 297, 298, 3, 72, 36, 0, 298, 25, 1, 0, 0, 0, 299, 300, 3, 64, 32, 0, 300, 27, 1, 0, 0, 0, 301, 302, 5, 12, 0, 0, 302, 303, 3, 30, 15, 0, 303, 29, 1, 0, 0, 0, 304, 309, 3, 32, 16, 0, 305, 306, 5, 39, 0, 0, 306, 308, 3, 32, 16, 0, 307, 305, 1, 0, 0, 0, 308, 311, 1, 0, 0, 0, 309, 307, 1, 0, 0, 0, 309, 310, 1, 0, 0, 0, 310, 31, 1, 0, 0, 0, 311, 309, 1, 0, 0, 0, 312, 313, 3, 58, 29, 0, 313, 314, 5, 37, 0, 0, 314, 316, 1, 0, 0, 0, 315, 312, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 317, 1, 0, 0, 0, 317, 318, 3, 10, 5, 0, 318, 33, 1, 0, 0, 0, 319, 320, 5, 6, 0, 0, 320, 325, 3, 36, 18, 0, 321, 322, 5, 39, 0, 0, 322, 324, 3, 36, 18, 0, 323, 321, 1, 0, 0, 0, 324, 327, 1, 0, 0, 0, 325, 323, 1, 0, 0, 0, 325, 326, 1, 0, 0, 0, 326, 329, 1, 0, 0, 0, 327, 325, 1, 0, 0, 0, 328, 330, 3, 42, 21, 0, 329, 328, 1, 0, 0, 0, 329, 330, 1, 0, 0, 0, 330, 35, 1, 0, 0, 0, 331, 332, 3, 38, 19, 0, 332, 333, 5, 29, 0, 0, 333, 335, 1, 0, 0, 0, 334, 331, 1, 0, 0, 0, 334, 335, 1, 0, 0, 0, 335, 336, 1, 0, 0, 0, 336, 337, 3, 40, 20, 0, 337, 37, 1, 0, 0, 0, 338, 339, 5, 81, 0, 0, 339, 39, 1, 0, 0, 0, 340, 341, 7, 2, 0, 0, 341, 41, 1, 0, 0, 0, 342, 345, 3, 44, 22, 0, 343, 345, 3, 46, 23, 0, 344, 342, 1, 0, 0, 0, 344, 343, 1, 0, 0, 0, 345, 43, 1, 0, 0, 0, 346, 347, 5, 80, 0, 0, 347, 352, 5, 81, 0, 0, 348, 349, 5, 39, 0, 0, 349, 351, 5, 81, 0, 0, 350, 348, 1, 0, 0, 0, 351, 354, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 352, 353, 1, 0, 0, 0, 353, 45, 1, 0, 0, 0, 354, 352, 1, 0, 0, 0, 355, 356, 5, 70, 0, 0, 356, 357, 3, 44, 22, 0, 357, 358, 5, 71, 0, 0, 358, 47, 1, 0, 0, 0, 359, 360, 5, 19, 0, 0, 360, 365, 3, 36, 18, 0, 361, 362, 5, 39, 0, 0, 362, 364, 3, 36, 18, 0, 363, 361, 1, 0, 0, 0, 364, 367, 1, 0, 0, 0, 365, 363, 1, 0, 0, 0, 365, 366, 1, 0, 0, 0, 366, 369, 1, 0, 0, 0, 367, 365, 1, 0, 0, 0, 368, 370, 3, 54, 27, 0, 369, 368, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 373, 1, 0, 0, 0, 371, 372, 5, 34, 0, 0, 372, 374, 3, 30, 15, 0, 373, 371, 1, 0, 0, 0, 373, 374, 1, 0, 0, 0, 374, 49, 1, 0, 0, 0, 375, 376, 5, 4, 0, 0, 376, 377, 3, 30, 15, 0, 377, 51, 1, 0, 0, 0, 378, 380, 5, 15, 0, 0, 379, 381, 3, 54, 27, 0, 380, 379, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 383, 5, 34, 0, 0, 383, 385, 3, 30, 15, 0, 384, 382, 1, 0, 0, 0, 384, 385, 1, 0, 0, 0, 385, 53, 1, 0, 0, 0, 386, 391, 3, 56, 28, 0, 387, 388, 5, 39, 0, 0, 388, 390, 3, 56, 28, 0, 389, 387, 1, 0, 0, 0, 390, 393, 1, 0, 0, 0, 391, 389, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 55, 1, 0, 0, 0, 393, 391, 1, 0, 0, 0, 394, 397, 3, 32, 16, 0, 395, 396, 5, 16, 0, 0, 396, 398, 3, 10, 5, 0, 397, 395, 1, 0, 0, 0, 397, 398, 1, 0, 0, 0, 398, 57, 1, 0, 0, 0, 399, 404, 3, 72, 36, 0, 400, 401, 5, 41, 0, 0, 401, 403, 3, 72, 36, 0, 402, 400, 1, 0, 0, 0, 403, 406, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 404, 405, 1, 0, 0, 0, 405, 59, 1, 0, 0, 0, 406, 404, 1, 0, 0, 0, 407, 412, 3, 66, 33, 0, 408, 409, 5, 41, 0, 0, 409, 411, 3, 66, 33, 0, 410, 408, 1, 0, 0, 0, 411, 414, 1, 0, 0, 0, 412, 410, 1, 0, 0, 0, 412, 413, 1, 0, 0, 0, 413, 61, 1, 0, 0, 0, 414, 412, 1, 0, 0, 0, 415, 420, 3, 60, 30, 0, 416, 417, 5, 39, 0, 0, 417, 419, 3, 60, 30, 0, 418, 416, 1, 0, 0, 0, 419, 422, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 420, 421, 1, 0, 0, 0, 421, 63, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 423, 424, 7, 3, 0, 0, 424, 65, 1, 0, 0, 0, 425, 429, 5, 85, 0, 0, 426, 427, 4, 33, 11, 0, 427, 429, 3, 70, 35, 0, 428, 425, 1, 0, 0, 0, 428, 426, 1, 0, 0, 0, 429, 67, 1, 0, 0, 0, 430, 473, 5, 50, 0, 0, 431, 432, 3, 104, 52, 0, 432, 433, 5, 72, 0, 0, 433, 473, 1, 0, 0, 0, 434, 473, 3, 102, 51, 0, 435, 473, 3, 104, 52, 0, 436, 473, 3, 98, 49, 0, 437, 473, 3, 70, 35, 0, 438, 473, 3, 106, 53, 0, 439, 440, 5, 70, 0, 0, 440, 445, 3, 100, 50, 0, 441, 442, 5, 39, 0, 0, 442, 444, 3, 100, 50, 0, 443, 441, 1, 0, 0, 0, 444, 447, 1, 0, 0, 0, 445, 443, 1, 0, 0, 0, 445, 446, 1, 0, 0, 0, 446, 448, 1, 0, 0, 0, 447, 445, 1, 0, 0, 0, 448, 449, 5, 71, 0, 0, 449, 473, 1, 0, 0, 0, 450, 451, 5, 70, 0, 0, 451, 456, 3, 98, 49, 0, 452, 453, 5, 39, 0, 0, 453, 455, 3, 98, 49, 0, 454, 452, 1, 0, 0, 0, 455, 458, 1, 0, 0, 0, 456, 454, 1, 0, 0, 0, 456, 457, 1, 0, 0, 0, 457, 459, 1, 0, 0, 0, 458, 456, 1, 0, 0, 0, 459, 460, 5, 71, 0, 0, 460, 473, 1, 0, 0, 0, 461, 462, 5, 70, 0, 0, 462, 467, 3, 106, 53, 0, 463, 464, 5, 39, 0, 0, 464, 466, 3, 106, 53, 0, 465, 463, 1, 0, 0, 0, 466, 469, 1, 0, 0, 0, 467, 465, 1, 0, 0, 0, 467, 468, 1, 0, 0, 0, 468, 470, 1, 0, 0, 0, 469, 467, 1, 0, 0, 0, 470, 471, 5, 71, 0, 0, 471, 473, 1, 0, 0, 0, 472, 430, 1, 0, 0, 0, 472, 431, 1, 0, 0, 0, 472, 434, 1, 0, 0, 0, 472, 435, 1, 0, 0, 0, 472, 436, 1, 0, 0, 0, 472, 437, 1, 0, 0, 0, 472, 438, 1, 0, 0, 0, 472, 439, 1, 0, 0, 0, 472, 450, 1, 0, 0, 0, 472, 461, 1, 0, 0, 0, 473, 69, 1, 0, 0, 0, 474, 477, 5, 53, 0, 0, 475, 477, 5, 69, 0, 0, 476, 474, 1, 0, 0, 0, 476, 475, 1, 0, 0, 0, 477, 71, 1, 0, 0, 0, 478, 482, 3, 64, 32, 0, 479, 480, 4, 36, 12, 0, 480, 482, 3, 70, 35, 0, 481, 478, 1, 0, 0, 0, 481, 479, 1, 0, 0, 0, 482, 73, 1, 0, 0, 0, 483, 484, 5, 9, 0, 0, 484, 485, 5, 32, 0, 0, 485, 75, 1, 0, 0, 0, 486, 487, 5, 14, 0, 0, 487, 492, 3, 78, 39, 0, 488, 489, 5, 39, 0, 0, 489, 491, 3, 78, 39, 0, 490, 488, 1, 0, 0, 0, 491, 494, 1, 0, 0, 0, 492, 490, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 77, 1, 0, 0, 0, 494, 492, 1, 0, 0, 0, 495, 497, 3, 10, 5, 0, 496, 498, 7, 4, 0, 0, 497, 496, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 501, 1, 0, 0, 0, 499, 500, 5, 51, 0, 0, 500, 502, 7, 5, 0, 0, 501, 499, 1, 0, 0, 0, 501, 502, 1, 0, 0, 0, 502, 79, 1, 0, 0, 0, 503, 504, 5, 8, 0, 0, 504, 505, 3, 62, 31, 0, 505, 81, 1, 0, 0, 0, 506, 507, 5, 2, 0, 0, 507, 508, 3, 62, 31, 0, 508, 83, 1, 0, 0, 0, 509, 510, 5, 11, 0, 0, 510, 515, 3, 86, 43, 0, 511, 512, 5, 39, 0, 0, 512, 514, 3, 86, 43, 0, 513, 511, 1, 0, 0, 0, 514, 517, 1, 0, 0, 0, 515, 513, 1, 0, 0, 0, 515, 516, 1, 0, 0, 0, 516, 85, 1, 0, 0, 0, 517, 515, 1, 0, 0, 0, 518, 519, 3, 60, 30, 0, 519, 520, 5, 89, 0, 0, 520, 521, 3, 60, 30, 0, 521, 87, 1, 0, 0, 0, 522, 523, 5, 1, 0, 0, 523, 524, 3, 20, 10, 0, 524, 526, 3, 106, 53, 0, 525, 527, 3, 94, 47, 0, 526, 525, 1, 0, 0, 0, 526, 527, 1, 0, 0, 0, 527, 89, 1, 0, 0, 0, 528, 529, 5, 7, 0, 0, 529, 530, 3, 20, 10, 0, 530, 531, 3, 106, 53, 0, 531, 91, 1, 0, 0, 0, 532, 533, 5, 10, 0, 0, 533, 534, 3, 58, 29, 0, 534, 93, 1, 0, 0, 0, 535, 540, 3, 96, 48, 0, 536, 537, 5, 39, 0, 0, 537, 539, 3, 96, 48, 0, 538, 536, 1, 0, 0, 0, 539, 542, 1, 0, 0, 0, 540, 538, 1, 0, 0, 0, 540, 541, 1, 0, 0, 0, 541, 95, 1, 0, 0, 0, 542, 540, 1, 0, 0, 0, 543, 544, 3, 64, 32, 0, 544, 545, 5, 37, 0, 0, 545, 546, 3, 68, 34, 0, 546, 97, 1, 0, 0, 0, 547, 548, 7, 6, 0, 0, 548, 99, 1, 0, 0, 0, 549, 552, 3, 102, 51, 0, 550, 552, 3, 104, 52, 0, 551, 549, 1, 0, 0, 0, 551, 550, 1, 0, 0, 0, 552, 101, 1, 0, 0, 0, 553, 555, 7, 0, 0, 0, 554, 553, 1, 0, 0, 0, 554, 555, 1, 0, 0, 0, 555, 556, 1, 0, 0, 0, 556, 557, 5, 33, 0, 0, 557, 103, 1, 0, 0, 0, 558, 560, 7, 0, 0, 0, 559, 558, 1, 0, 0, 0, 559, 560, 1, 0, 0, 0, 560, 561, 1, 0, 0, 0, 561, 562, 5, 32, 0, 0, 562, 105, 1, 0, 0, 0, 563, 564, 5, 31, 0, 0, 564, 107, 1, 0, 0, 0, 565, 566, 7, 7, 0, 0, 566, 109, 1, 0, 0, 0, 567, 568, 5, 5, 0, 0, 568, 569, 3, 112, 56, 0, 569, 111, 1, 0, 0, 0, 570, 571, 5, 70, 0, 0, 571, 572, 3, 2, 1, 0, 572, 573, 5, 71, 0, 0, 573, 113, 1, 0, 0, 0, 574, 575, 5, 13, 0, 0, 575, 576, 5, 105, 0, 0, 576, 115, 1, 0, 0, 0, 577, 578, 5, 3, 0, 0, 578, 581, 5, 95, 0, 0, 579, 580, 5, 93, 0, 0, 580, 582, 3, 60, 30, 0, 581, 579, 1, 0, 0, 0, 581, 582, 1, 0, 0, 0, 582, 592, 1, 0, 0, 0, 583, 584, 5, 94, 0, 0, 584, 589, 3, 118, 59, 0, 585, 586, 5, 39, 0, 0, 586, 588, 3, 118, 59, 0, 587, 585, 1, 0, 0, 0, 588, 591, 1, 0, 0, 0, 589, 587, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 593, 1, 0, 0, 0, 591, 589, 1, 0, 0, 0, 592, 583, 1, 0, 0, 0, 592, 593, 1, 0, 0, 0, 593, 117, 1, 0, 0, 0, 594, 595, 3, 60, 30, 0, 595, 596, 5, 37, 0, 0, 596, 598, 1, 0, 0, 0, 597, 594, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 599, 1, 0, 0, 0, 599, 600, 3, 60, 30, 0, 600, 119, 1, 0, 0, 0, 601, 602, 5, 18, 0, 0, 602, 603, 3, 36, 18, 0, 603, 604, 5, 93, 0, 0, 604, 605, 3, 62, 31, 0, 605, 121, 1, 0, 0, 0, 606, 607, 5, 17, 0, 0, 607, 610, 3, 54, 27, 0, 608, 609, 5, 34, 0, 0, 609, 611, 3, 30, 15, 0, 610, 608, 1, 0, 0, 0, 610, 611, 1, 0, 0, 0, 611, 123, 1, 0, 0, 0, 612, 614, 7, 8, 0, 0, 613, 612, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 615, 1, 0, 0, 0, 615, 616, 5, 20, 0, 0, 616, 617, 3, 126, 63, 0, 617, 618, 3, 128, 64, 0, 618, 125, 1, 0, 0, 0, 619, 622, 3, 64, 32, 0, 620, 621, 5, 89, 0, 0, 621, 623, 3, 64, 32, 0, 622, 620, 1, 0, 0, 0, 622, 623, 1, 0, 0, 0, 623, 127, 1, 0, 0, 0, 624, 625, 5, 93, 0, 0, 625, 630, 3, 130, 65, 0, 626, 627, 5, 39, 0, 0, 627, 629, 3, 130, 65, 0, 628, 626, 1, 0, 0, 0, 629, 632, 1, 0, 0, 0, 630, 628, 1, 0, 0, 0, 630, 631, 1, 0, 0, 0, 631, 129, 1, 0, 0, 0, 632, 630, 1, 0, 0, 0, 633, 634, 3, 16, 8, 0, 634, 131, 1, 0, 0, 0, 61, 143, 152, 172, 184, 193, 201, 207, 215, 217, 222, 229, 234, 245, 251, 259, 261, 272, 279, 290, 293, 309, 315, 325, 329, 334, 344, 352, 365, 369, 373, 380, 384, 391, 397, 404, 412, 420, 428, 445, 456, 467, 472, 476, 481, 492, 497, 501, 515, 526, 540, 551, 554, 559, 581, 589, 592, 597, 610, 613, 622, 630] \ No newline at end of file +[4, 1, 128, 635, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 142, 8, 1, 10, 1, 12, 1, 145, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 153, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 173, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 185, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 192, 8, 5, 10, 5, 12, 5, 195, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 202, 8, 5, 1, 5, 1, 5, 1, 5, 3, 5, 207, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 215, 8, 5, 10, 5, 12, 5, 218, 9, 5, 1, 6, 1, 6, 3, 6, 222, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 229, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 234, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 245, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 251, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 259, 8, 9, 10, 9, 12, 9, 262, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 272, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 277, 8, 10, 10, 10, 12, 10, 280, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 288, 8, 11, 10, 11, 12, 11, 291, 9, 11, 3, 11, 293, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 307, 8, 15, 10, 15, 12, 15, 310, 9, 15, 1, 16, 1, 16, 1, 16, 3, 16, 315, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 323, 8, 17, 10, 17, 12, 17, 326, 9, 17, 1, 17, 3, 17, 329, 8, 17, 1, 18, 1, 18, 1, 18, 3, 18, 334, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 344, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 350, 8, 22, 10, 22, 12, 22, 353, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 363, 8, 24, 10, 24, 12, 24, 366, 9, 24, 1, 24, 3, 24, 369, 8, 24, 1, 24, 1, 24, 3, 24, 373, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 380, 8, 26, 1, 26, 1, 26, 3, 26, 384, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 389, 8, 27, 10, 27, 12, 27, 392, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 397, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 402, 8, 29, 10, 29, 12, 29, 405, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 410, 8, 30, 10, 30, 12, 30, 413, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 418, 8, 31, 10, 31, 12, 31, 421, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 428, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 443, 8, 34, 10, 34, 12, 34, 446, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 454, 8, 34, 10, 34, 12, 34, 457, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 465, 8, 34, 10, 34, 12, 34, 468, 9, 34, 1, 34, 1, 34, 3, 34, 472, 8, 34, 1, 35, 1, 35, 3, 35, 476, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 481, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 490, 8, 38, 10, 38, 12, 38, 493, 9, 38, 1, 39, 1, 39, 3, 39, 497, 8, 39, 1, 39, 1, 39, 3, 39, 501, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 513, 8, 42, 10, 42, 12, 42, 516, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 526, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 538, 8, 47, 10, 47, 12, 47, 541, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 551, 8, 50, 1, 51, 3, 51, 554, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 559, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 581, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 587, 8, 58, 10, 58, 12, 58, 590, 9, 58, 3, 58, 592, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 597, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 610, 8, 61, 1, 62, 3, 62, 613, 8, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 3, 63, 622, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 628, 8, 64, 10, 64, 12, 64, 631, 9, 64, 1, 65, 1, 65, 1, 65, 0, 4, 2, 10, 18, 20, 66, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 0, 9, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 30, 30, 81, 81, 1, 0, 72, 73, 2, 0, 35, 35, 40, 40, 2, 0, 43, 43, 46, 46, 2, 0, 42, 42, 56, 56, 2, 0, 57, 57, 59, 63, 1, 0, 22, 24, 660, 0, 132, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 4, 152, 1, 0, 0, 0, 6, 172, 1, 0, 0, 0, 8, 174, 1, 0, 0, 0, 10, 206, 1, 0, 0, 0, 12, 233, 1, 0, 0, 0, 14, 235, 1, 0, 0, 0, 16, 244, 1, 0, 0, 0, 18, 250, 1, 0, 0, 0, 20, 271, 1, 0, 0, 0, 22, 281, 1, 0, 0, 0, 24, 296, 1, 0, 0, 0, 26, 298, 1, 0, 0, 0, 28, 300, 1, 0, 0, 0, 30, 303, 1, 0, 0, 0, 32, 314, 1, 0, 0, 0, 34, 318, 1, 0, 0, 0, 36, 333, 1, 0, 0, 0, 38, 337, 1, 0, 0, 0, 40, 339, 1, 0, 0, 0, 42, 343, 1, 0, 0, 0, 44, 345, 1, 0, 0, 0, 46, 354, 1, 0, 0, 0, 48, 358, 1, 0, 0, 0, 50, 374, 1, 0, 0, 0, 52, 377, 1, 0, 0, 0, 54, 385, 1, 0, 0, 0, 56, 393, 1, 0, 0, 0, 58, 398, 1, 0, 0, 0, 60, 406, 1, 0, 0, 0, 62, 414, 1, 0, 0, 0, 64, 422, 1, 0, 0, 0, 66, 427, 1, 0, 0, 0, 68, 471, 1, 0, 0, 0, 70, 475, 1, 0, 0, 0, 72, 480, 1, 0, 0, 0, 74, 482, 1, 0, 0, 0, 76, 485, 1, 0, 0, 0, 78, 494, 1, 0, 0, 0, 80, 502, 1, 0, 0, 0, 82, 505, 1, 0, 0, 0, 84, 508, 1, 0, 0, 0, 86, 517, 1, 0, 0, 0, 88, 521, 1, 0, 0, 0, 90, 527, 1, 0, 0, 0, 92, 531, 1, 0, 0, 0, 94, 534, 1, 0, 0, 0, 96, 542, 1, 0, 0, 0, 98, 546, 1, 0, 0, 0, 100, 550, 1, 0, 0, 0, 102, 553, 1, 0, 0, 0, 104, 558, 1, 0, 0, 0, 106, 562, 1, 0, 0, 0, 108, 564, 1, 0, 0, 0, 110, 566, 1, 0, 0, 0, 112, 569, 1, 0, 0, 0, 114, 573, 1, 0, 0, 0, 116, 576, 1, 0, 0, 0, 118, 596, 1, 0, 0, 0, 120, 600, 1, 0, 0, 0, 122, 605, 1, 0, 0, 0, 124, 612, 1, 0, 0, 0, 126, 618, 1, 0, 0, 0, 128, 623, 1, 0, 0, 0, 130, 632, 1, 0, 0, 0, 132, 133, 3, 2, 1, 0, 133, 134, 5, 0, 0, 1, 134, 1, 1, 0, 0, 0, 135, 136, 6, 1, -1, 0, 136, 137, 3, 4, 2, 0, 137, 143, 1, 0, 0, 0, 138, 139, 10, 1, 0, 0, 139, 140, 5, 29, 0, 0, 140, 142, 3, 6, 3, 0, 141, 138, 1, 0, 0, 0, 142, 145, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 3, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 146, 153, 3, 110, 55, 0, 147, 153, 3, 34, 17, 0, 148, 153, 3, 28, 14, 0, 149, 153, 3, 114, 57, 0, 150, 151, 4, 2, 1, 0, 151, 153, 3, 48, 24, 0, 152, 146, 1, 0, 0, 0, 152, 147, 1, 0, 0, 0, 152, 148, 1, 0, 0, 0, 152, 149, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 153, 5, 1, 0, 0, 0, 154, 173, 3, 50, 25, 0, 155, 173, 3, 8, 4, 0, 156, 173, 3, 80, 40, 0, 157, 173, 3, 74, 37, 0, 158, 173, 3, 52, 26, 0, 159, 173, 3, 76, 38, 0, 160, 173, 3, 82, 41, 0, 161, 173, 3, 84, 42, 0, 162, 173, 3, 88, 44, 0, 163, 173, 3, 90, 45, 0, 164, 173, 3, 116, 58, 0, 165, 173, 3, 92, 46, 0, 166, 167, 4, 3, 2, 0, 167, 173, 3, 122, 61, 0, 168, 169, 4, 3, 3, 0, 169, 173, 3, 120, 60, 0, 170, 171, 4, 3, 4, 0, 171, 173, 3, 124, 62, 0, 172, 154, 1, 0, 0, 0, 172, 155, 1, 0, 0, 0, 172, 156, 1, 0, 0, 0, 172, 157, 1, 0, 0, 0, 172, 158, 1, 0, 0, 0, 172, 159, 1, 0, 0, 0, 172, 160, 1, 0, 0, 0, 172, 161, 1, 0, 0, 0, 172, 162, 1, 0, 0, 0, 172, 163, 1, 0, 0, 0, 172, 164, 1, 0, 0, 0, 172, 165, 1, 0, 0, 0, 172, 166, 1, 0, 0, 0, 172, 168, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 7, 1, 0, 0, 0, 174, 175, 5, 16, 0, 0, 175, 176, 3, 10, 5, 0, 176, 9, 1, 0, 0, 0, 177, 178, 6, 5, -1, 0, 178, 179, 5, 49, 0, 0, 179, 207, 3, 10, 5, 8, 180, 207, 3, 16, 8, 0, 181, 207, 3, 12, 6, 0, 182, 184, 3, 16, 8, 0, 183, 185, 5, 49, 0, 0, 184, 183, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 5, 44, 0, 0, 187, 188, 5, 48, 0, 0, 188, 193, 3, 16, 8, 0, 189, 190, 5, 39, 0, 0, 190, 192, 3, 16, 8, 0, 191, 189, 1, 0, 0, 0, 192, 195, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 196, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 196, 197, 5, 55, 0, 0, 197, 207, 1, 0, 0, 0, 198, 199, 3, 16, 8, 0, 199, 201, 5, 45, 0, 0, 200, 202, 5, 49, 0, 0, 201, 200, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 204, 5, 50, 0, 0, 204, 207, 1, 0, 0, 0, 205, 207, 3, 14, 7, 0, 206, 177, 1, 0, 0, 0, 206, 180, 1, 0, 0, 0, 206, 181, 1, 0, 0, 0, 206, 182, 1, 0, 0, 0, 206, 198, 1, 0, 0, 0, 206, 205, 1, 0, 0, 0, 207, 216, 1, 0, 0, 0, 208, 209, 10, 5, 0, 0, 209, 210, 5, 34, 0, 0, 210, 215, 3, 10, 5, 6, 211, 212, 10, 4, 0, 0, 212, 213, 5, 52, 0, 0, 213, 215, 3, 10, 5, 5, 214, 208, 1, 0, 0, 0, 214, 211, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 11, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 221, 3, 16, 8, 0, 220, 222, 5, 49, 0, 0, 221, 220, 1, 0, 0, 0, 221, 222, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 224, 5, 47, 0, 0, 224, 225, 3, 106, 53, 0, 225, 234, 1, 0, 0, 0, 226, 228, 3, 16, 8, 0, 227, 229, 5, 49, 0, 0, 228, 227, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 231, 5, 54, 0, 0, 231, 232, 3, 106, 53, 0, 232, 234, 1, 0, 0, 0, 233, 219, 1, 0, 0, 0, 233, 226, 1, 0, 0, 0, 234, 13, 1, 0, 0, 0, 235, 236, 3, 58, 29, 0, 236, 237, 5, 38, 0, 0, 237, 238, 3, 68, 34, 0, 238, 15, 1, 0, 0, 0, 239, 245, 3, 18, 9, 0, 240, 241, 3, 18, 9, 0, 241, 242, 3, 108, 54, 0, 242, 243, 3, 18, 9, 0, 243, 245, 1, 0, 0, 0, 244, 239, 1, 0, 0, 0, 244, 240, 1, 0, 0, 0, 245, 17, 1, 0, 0, 0, 246, 247, 6, 9, -1, 0, 247, 251, 3, 20, 10, 0, 248, 249, 7, 0, 0, 0, 249, 251, 3, 18, 9, 3, 250, 246, 1, 0, 0, 0, 250, 248, 1, 0, 0, 0, 251, 260, 1, 0, 0, 0, 252, 253, 10, 2, 0, 0, 253, 254, 7, 1, 0, 0, 254, 259, 3, 18, 9, 3, 255, 256, 10, 1, 0, 0, 256, 257, 7, 0, 0, 0, 257, 259, 3, 18, 9, 2, 258, 252, 1, 0, 0, 0, 258, 255, 1, 0, 0, 0, 259, 262, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, 261, 19, 1, 0, 0, 0, 262, 260, 1, 0, 0, 0, 263, 264, 6, 10, -1, 0, 264, 272, 3, 68, 34, 0, 265, 272, 3, 58, 29, 0, 266, 272, 3, 22, 11, 0, 267, 268, 5, 48, 0, 0, 268, 269, 3, 10, 5, 0, 269, 270, 5, 55, 0, 0, 270, 272, 1, 0, 0, 0, 271, 263, 1, 0, 0, 0, 271, 265, 1, 0, 0, 0, 271, 266, 1, 0, 0, 0, 271, 267, 1, 0, 0, 0, 272, 278, 1, 0, 0, 0, 273, 274, 10, 1, 0, 0, 274, 275, 5, 37, 0, 0, 275, 277, 3, 26, 13, 0, 276, 273, 1, 0, 0, 0, 277, 280, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 21, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 281, 282, 3, 24, 12, 0, 282, 292, 5, 48, 0, 0, 283, 293, 5, 66, 0, 0, 284, 289, 3, 10, 5, 0, 285, 286, 5, 39, 0, 0, 286, 288, 3, 10, 5, 0, 287, 285, 1, 0, 0, 0, 288, 291, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 289, 290, 1, 0, 0, 0, 290, 293, 1, 0, 0, 0, 291, 289, 1, 0, 0, 0, 292, 283, 1, 0, 0, 0, 292, 284, 1, 0, 0, 0, 292, 293, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 295, 5, 55, 0, 0, 295, 23, 1, 0, 0, 0, 296, 297, 3, 72, 36, 0, 297, 25, 1, 0, 0, 0, 298, 299, 3, 64, 32, 0, 299, 27, 1, 0, 0, 0, 300, 301, 5, 12, 0, 0, 301, 302, 3, 30, 15, 0, 302, 29, 1, 0, 0, 0, 303, 308, 3, 32, 16, 0, 304, 305, 5, 39, 0, 0, 305, 307, 3, 32, 16, 0, 306, 304, 1, 0, 0, 0, 307, 310, 1, 0, 0, 0, 308, 306, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 31, 1, 0, 0, 0, 310, 308, 1, 0, 0, 0, 311, 312, 3, 58, 29, 0, 312, 313, 5, 36, 0, 0, 313, 315, 1, 0, 0, 0, 314, 311, 1, 0, 0, 0, 314, 315, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 317, 3, 10, 5, 0, 317, 33, 1, 0, 0, 0, 318, 319, 5, 6, 0, 0, 319, 324, 3, 36, 18, 0, 320, 321, 5, 39, 0, 0, 321, 323, 3, 36, 18, 0, 322, 320, 1, 0, 0, 0, 323, 326, 1, 0, 0, 0, 324, 322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 328, 1, 0, 0, 0, 326, 324, 1, 0, 0, 0, 327, 329, 3, 42, 21, 0, 328, 327, 1, 0, 0, 0, 328, 329, 1, 0, 0, 0, 329, 35, 1, 0, 0, 0, 330, 331, 3, 38, 19, 0, 331, 332, 5, 38, 0, 0, 332, 334, 1, 0, 0, 0, 333, 330, 1, 0, 0, 0, 333, 334, 1, 0, 0, 0, 334, 335, 1, 0, 0, 0, 335, 336, 3, 40, 20, 0, 336, 37, 1, 0, 0, 0, 337, 338, 5, 81, 0, 0, 338, 39, 1, 0, 0, 0, 339, 340, 7, 2, 0, 0, 340, 41, 1, 0, 0, 0, 341, 344, 3, 44, 22, 0, 342, 344, 3, 46, 23, 0, 343, 341, 1, 0, 0, 0, 343, 342, 1, 0, 0, 0, 344, 43, 1, 0, 0, 0, 345, 346, 5, 80, 0, 0, 346, 351, 5, 81, 0, 0, 347, 348, 5, 39, 0, 0, 348, 350, 5, 81, 0, 0, 349, 347, 1, 0, 0, 0, 350, 353, 1, 0, 0, 0, 351, 349, 1, 0, 0, 0, 351, 352, 1, 0, 0, 0, 352, 45, 1, 0, 0, 0, 353, 351, 1, 0, 0, 0, 354, 355, 5, 70, 0, 0, 355, 356, 3, 44, 22, 0, 356, 357, 5, 71, 0, 0, 357, 47, 1, 0, 0, 0, 358, 359, 5, 19, 0, 0, 359, 364, 3, 36, 18, 0, 360, 361, 5, 39, 0, 0, 361, 363, 3, 36, 18, 0, 362, 360, 1, 0, 0, 0, 363, 366, 1, 0, 0, 0, 364, 362, 1, 0, 0, 0, 364, 365, 1, 0, 0, 0, 365, 368, 1, 0, 0, 0, 366, 364, 1, 0, 0, 0, 367, 369, 3, 54, 27, 0, 368, 367, 1, 0, 0, 0, 368, 369, 1, 0, 0, 0, 369, 372, 1, 0, 0, 0, 370, 371, 5, 33, 0, 0, 371, 373, 3, 30, 15, 0, 372, 370, 1, 0, 0, 0, 372, 373, 1, 0, 0, 0, 373, 49, 1, 0, 0, 0, 374, 375, 5, 4, 0, 0, 375, 376, 3, 30, 15, 0, 376, 51, 1, 0, 0, 0, 377, 379, 5, 15, 0, 0, 378, 380, 3, 54, 27, 0, 379, 378, 1, 0, 0, 0, 379, 380, 1, 0, 0, 0, 380, 383, 1, 0, 0, 0, 381, 382, 5, 33, 0, 0, 382, 384, 3, 30, 15, 0, 383, 381, 1, 0, 0, 0, 383, 384, 1, 0, 0, 0, 384, 53, 1, 0, 0, 0, 385, 390, 3, 56, 28, 0, 386, 387, 5, 39, 0, 0, 387, 389, 3, 56, 28, 0, 388, 386, 1, 0, 0, 0, 389, 392, 1, 0, 0, 0, 390, 388, 1, 0, 0, 0, 390, 391, 1, 0, 0, 0, 391, 55, 1, 0, 0, 0, 392, 390, 1, 0, 0, 0, 393, 396, 3, 32, 16, 0, 394, 395, 5, 16, 0, 0, 395, 397, 3, 10, 5, 0, 396, 394, 1, 0, 0, 0, 396, 397, 1, 0, 0, 0, 397, 57, 1, 0, 0, 0, 398, 403, 3, 72, 36, 0, 399, 400, 5, 41, 0, 0, 400, 402, 3, 72, 36, 0, 401, 399, 1, 0, 0, 0, 402, 405, 1, 0, 0, 0, 403, 401, 1, 0, 0, 0, 403, 404, 1, 0, 0, 0, 404, 59, 1, 0, 0, 0, 405, 403, 1, 0, 0, 0, 406, 411, 3, 66, 33, 0, 407, 408, 5, 41, 0, 0, 408, 410, 3, 66, 33, 0, 409, 407, 1, 0, 0, 0, 410, 413, 1, 0, 0, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 61, 1, 0, 0, 0, 413, 411, 1, 0, 0, 0, 414, 419, 3, 60, 30, 0, 415, 416, 5, 39, 0, 0, 416, 418, 3, 60, 30, 0, 417, 415, 1, 0, 0, 0, 418, 421, 1, 0, 0, 0, 419, 417, 1, 0, 0, 0, 419, 420, 1, 0, 0, 0, 420, 63, 1, 0, 0, 0, 421, 419, 1, 0, 0, 0, 422, 423, 7, 3, 0, 0, 423, 65, 1, 0, 0, 0, 424, 428, 5, 85, 0, 0, 425, 426, 4, 33, 10, 0, 426, 428, 3, 70, 35, 0, 427, 424, 1, 0, 0, 0, 427, 425, 1, 0, 0, 0, 428, 67, 1, 0, 0, 0, 429, 472, 5, 50, 0, 0, 430, 431, 3, 104, 52, 0, 431, 432, 5, 72, 0, 0, 432, 472, 1, 0, 0, 0, 433, 472, 3, 102, 51, 0, 434, 472, 3, 104, 52, 0, 435, 472, 3, 98, 49, 0, 436, 472, 3, 70, 35, 0, 437, 472, 3, 106, 53, 0, 438, 439, 5, 70, 0, 0, 439, 444, 3, 100, 50, 0, 440, 441, 5, 39, 0, 0, 441, 443, 3, 100, 50, 0, 442, 440, 1, 0, 0, 0, 443, 446, 1, 0, 0, 0, 444, 442, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 447, 1, 0, 0, 0, 446, 444, 1, 0, 0, 0, 447, 448, 5, 71, 0, 0, 448, 472, 1, 0, 0, 0, 449, 450, 5, 70, 0, 0, 450, 455, 3, 98, 49, 0, 451, 452, 5, 39, 0, 0, 452, 454, 3, 98, 49, 0, 453, 451, 1, 0, 0, 0, 454, 457, 1, 0, 0, 0, 455, 453, 1, 0, 0, 0, 455, 456, 1, 0, 0, 0, 456, 458, 1, 0, 0, 0, 457, 455, 1, 0, 0, 0, 458, 459, 5, 71, 0, 0, 459, 472, 1, 0, 0, 0, 460, 461, 5, 70, 0, 0, 461, 466, 3, 106, 53, 0, 462, 463, 5, 39, 0, 0, 463, 465, 3, 106, 53, 0, 464, 462, 1, 0, 0, 0, 465, 468, 1, 0, 0, 0, 466, 464, 1, 0, 0, 0, 466, 467, 1, 0, 0, 0, 467, 469, 1, 0, 0, 0, 468, 466, 1, 0, 0, 0, 469, 470, 5, 71, 0, 0, 470, 472, 1, 0, 0, 0, 471, 429, 1, 0, 0, 0, 471, 430, 1, 0, 0, 0, 471, 433, 1, 0, 0, 0, 471, 434, 1, 0, 0, 0, 471, 435, 1, 0, 0, 0, 471, 436, 1, 0, 0, 0, 471, 437, 1, 0, 0, 0, 471, 438, 1, 0, 0, 0, 471, 449, 1, 0, 0, 0, 471, 460, 1, 0, 0, 0, 472, 69, 1, 0, 0, 0, 473, 476, 5, 53, 0, 0, 474, 476, 5, 69, 0, 0, 475, 473, 1, 0, 0, 0, 475, 474, 1, 0, 0, 0, 476, 71, 1, 0, 0, 0, 477, 481, 3, 64, 32, 0, 478, 479, 4, 36, 11, 0, 479, 481, 3, 70, 35, 0, 480, 477, 1, 0, 0, 0, 480, 478, 1, 0, 0, 0, 481, 73, 1, 0, 0, 0, 482, 483, 5, 9, 0, 0, 483, 484, 5, 31, 0, 0, 484, 75, 1, 0, 0, 0, 485, 486, 5, 14, 0, 0, 486, 491, 3, 78, 39, 0, 487, 488, 5, 39, 0, 0, 488, 490, 3, 78, 39, 0, 489, 487, 1, 0, 0, 0, 490, 493, 1, 0, 0, 0, 491, 489, 1, 0, 0, 0, 491, 492, 1, 0, 0, 0, 492, 77, 1, 0, 0, 0, 493, 491, 1, 0, 0, 0, 494, 496, 3, 10, 5, 0, 495, 497, 7, 4, 0, 0, 496, 495, 1, 0, 0, 0, 496, 497, 1, 0, 0, 0, 497, 500, 1, 0, 0, 0, 498, 499, 5, 51, 0, 0, 499, 501, 7, 5, 0, 0, 500, 498, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 79, 1, 0, 0, 0, 502, 503, 5, 8, 0, 0, 503, 504, 3, 62, 31, 0, 504, 81, 1, 0, 0, 0, 505, 506, 5, 2, 0, 0, 506, 507, 3, 62, 31, 0, 507, 83, 1, 0, 0, 0, 508, 509, 5, 11, 0, 0, 509, 514, 3, 86, 43, 0, 510, 511, 5, 39, 0, 0, 511, 513, 3, 86, 43, 0, 512, 510, 1, 0, 0, 0, 513, 516, 1, 0, 0, 0, 514, 512, 1, 0, 0, 0, 514, 515, 1, 0, 0, 0, 515, 85, 1, 0, 0, 0, 516, 514, 1, 0, 0, 0, 517, 518, 3, 60, 30, 0, 518, 519, 5, 89, 0, 0, 519, 520, 3, 60, 30, 0, 520, 87, 1, 0, 0, 0, 521, 522, 5, 1, 0, 0, 522, 523, 3, 20, 10, 0, 523, 525, 3, 106, 53, 0, 524, 526, 3, 94, 47, 0, 525, 524, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 89, 1, 0, 0, 0, 527, 528, 5, 7, 0, 0, 528, 529, 3, 20, 10, 0, 529, 530, 3, 106, 53, 0, 530, 91, 1, 0, 0, 0, 531, 532, 5, 10, 0, 0, 532, 533, 3, 58, 29, 0, 533, 93, 1, 0, 0, 0, 534, 539, 3, 96, 48, 0, 535, 536, 5, 39, 0, 0, 536, 538, 3, 96, 48, 0, 537, 535, 1, 0, 0, 0, 538, 541, 1, 0, 0, 0, 539, 537, 1, 0, 0, 0, 539, 540, 1, 0, 0, 0, 540, 95, 1, 0, 0, 0, 541, 539, 1, 0, 0, 0, 542, 543, 3, 64, 32, 0, 543, 544, 5, 36, 0, 0, 544, 545, 3, 68, 34, 0, 545, 97, 1, 0, 0, 0, 546, 547, 7, 6, 0, 0, 547, 99, 1, 0, 0, 0, 548, 551, 3, 102, 51, 0, 549, 551, 3, 104, 52, 0, 550, 548, 1, 0, 0, 0, 550, 549, 1, 0, 0, 0, 551, 101, 1, 0, 0, 0, 552, 554, 7, 0, 0, 0, 553, 552, 1, 0, 0, 0, 553, 554, 1, 0, 0, 0, 554, 555, 1, 0, 0, 0, 555, 556, 5, 32, 0, 0, 556, 103, 1, 0, 0, 0, 557, 559, 7, 0, 0, 0, 558, 557, 1, 0, 0, 0, 558, 559, 1, 0, 0, 0, 559, 560, 1, 0, 0, 0, 560, 561, 5, 31, 0, 0, 561, 105, 1, 0, 0, 0, 562, 563, 5, 30, 0, 0, 563, 107, 1, 0, 0, 0, 564, 565, 7, 7, 0, 0, 565, 109, 1, 0, 0, 0, 566, 567, 5, 5, 0, 0, 567, 568, 3, 112, 56, 0, 568, 111, 1, 0, 0, 0, 569, 570, 5, 70, 0, 0, 570, 571, 3, 2, 1, 0, 571, 572, 5, 71, 0, 0, 572, 113, 1, 0, 0, 0, 573, 574, 5, 13, 0, 0, 574, 575, 5, 105, 0, 0, 575, 115, 1, 0, 0, 0, 576, 577, 5, 3, 0, 0, 577, 580, 5, 95, 0, 0, 578, 579, 5, 93, 0, 0, 579, 581, 3, 60, 30, 0, 580, 578, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 591, 1, 0, 0, 0, 582, 583, 5, 94, 0, 0, 583, 588, 3, 118, 59, 0, 584, 585, 5, 39, 0, 0, 585, 587, 3, 118, 59, 0, 586, 584, 1, 0, 0, 0, 587, 590, 1, 0, 0, 0, 588, 586, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 592, 1, 0, 0, 0, 590, 588, 1, 0, 0, 0, 591, 582, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 117, 1, 0, 0, 0, 593, 594, 3, 60, 30, 0, 594, 595, 5, 36, 0, 0, 595, 597, 1, 0, 0, 0, 596, 593, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 599, 3, 60, 30, 0, 599, 119, 1, 0, 0, 0, 600, 601, 5, 18, 0, 0, 601, 602, 3, 36, 18, 0, 602, 603, 5, 93, 0, 0, 603, 604, 3, 62, 31, 0, 604, 121, 1, 0, 0, 0, 605, 606, 5, 17, 0, 0, 606, 609, 3, 54, 27, 0, 607, 608, 5, 33, 0, 0, 608, 610, 3, 30, 15, 0, 609, 607, 1, 0, 0, 0, 609, 610, 1, 0, 0, 0, 610, 123, 1, 0, 0, 0, 611, 613, 7, 8, 0, 0, 612, 611, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 615, 5, 20, 0, 0, 615, 616, 3, 126, 63, 0, 616, 617, 3, 128, 64, 0, 617, 125, 1, 0, 0, 0, 618, 621, 3, 64, 32, 0, 619, 620, 5, 89, 0, 0, 620, 622, 3, 64, 32, 0, 621, 619, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 127, 1, 0, 0, 0, 623, 624, 5, 93, 0, 0, 624, 629, 3, 130, 65, 0, 625, 626, 5, 39, 0, 0, 626, 628, 3, 130, 65, 0, 627, 625, 1, 0, 0, 0, 628, 631, 1, 0, 0, 0, 629, 627, 1, 0, 0, 0, 629, 630, 1, 0, 0, 0, 630, 129, 1, 0, 0, 0, 631, 629, 1, 0, 0, 0, 632, 633, 3, 16, 8, 0, 633, 131, 1, 0, 0, 0, 61, 143, 152, 172, 184, 193, 201, 206, 214, 216, 221, 228, 233, 244, 250, 258, 260, 271, 278, 289, 292, 308, 314, 324, 328, 333, 343, 351, 364, 368, 372, 379, 383, 390, 396, 403, 411, 419, 427, 444, 455, 466, 471, 475, 480, 491, 496, 500, 514, 525, 539, 550, 553, 558, 580, 588, 591, 596, 609, 612, 621, 629] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 3bf6795c4e1dc..e864eaff3edd7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -29,9 +29,9 @@ public class EsqlBaseParser extends ParserConfig { LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, DEV_JOIN=20, DEV_JOIN_FULL=21, DEV_JOIN_LEFT=22, DEV_JOIN_RIGHT=23, DEV_JOIN_LOOKUP=24, - UNKNOWN_CMD=25, LINE_COMMENT=26, MULTILINE_COMMENT=27, WS=28, COLON=29, - PIPE=30, QUOTED_STRING=31, INTEGER_LITERAL=32, DECIMAL_LITERAL=33, BY=34, - AND=35, ASC=36, ASSIGN=37, CAST_OP=38, COMMA=39, DESC=40, DOT=41, FALSE=42, + UNKNOWN_CMD=25, LINE_COMMENT=26, MULTILINE_COMMENT=27, WS=28, PIPE=29, + QUOTED_STRING=30, INTEGER_LITERAL=31, DECIMAL_LITERAL=32, BY=33, AND=34, + ASC=35, ASSIGN=36, CAST_OP=37, COLON=38, COMMA=39, DESC=40, DOT=41, FALSE=42, FIRST=43, IN=44, IS=45, LAST=46, LIKE=47, LP=48, NOT=49, NULL=50, NULLS=51, OR=52, PARAM=53, RLIKE=54, RP=55, TRUE=56, EQ=57, CIEQ=58, NEQ=59, LT=60, LTE=61, GT=62, GTE=63, PLUS=64, MINUS=65, ASTERISK=66, SLASH=67, PERCENT=68, @@ -99,8 +99,8 @@ private static String[] makeLiteralNames() { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, - null, null, null, null, null, "':'", "'|'", null, null, null, "'by'", - "'and'", "'asc'", "'='", "'::'", "','", "'desc'", "'.'", "'false'", "'first'", + null, null, null, null, null, "'|'", null, null, null, "'by'", "'and'", + "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, null, @@ -118,28 +118,28 @@ private static String[] makeSymbolicNames() { "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", - "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "COLON", "PIPE", - "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", - "ASSIGN", "CAST_OP", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", - "IS", "LAST", "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", - "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", - "MINUS", "ASTERISK", "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", - "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", - "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_WS", - "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "METADATA", "UNQUOTED_SOURCE", - "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", - "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", - "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", - "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", - "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", - "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", - "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", - "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", - "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", - "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", - "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_LINE_COMMENT", - "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", - "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" + "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", + "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", + "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", + "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", + "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", + "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", + "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", + "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", + "LOOKUP_FIELD_WS", "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", + "JOIN_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", + "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + "CLOSING_METRICS_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -907,7 +907,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(207); + setState(206); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -1012,14 +1012,12 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; setState(205); - if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(206); matchBooleanExpression(); } break; } _ctx.stop = _input.LT(-1); - setState(217); + setState(216); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1027,7 +1025,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(215); + setState(214); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -1035,11 +1033,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(209); + setState(208); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(210); + setState(209); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(211); + setState(210); ((LogicalBinaryContext)_localctx).right = booleanExpression(6); } break; @@ -1048,18 +1046,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(212); + setState(211); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(213); + setState(212); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(214); + setState(213); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; } } } - setState(219); + setState(218); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1114,48 +1112,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(234); + setState(233); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(220); + setState(219); valueExpression(); - setState(222); + setState(221); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(221); + setState(220); match(NOT); } } - setState(224); + setState(223); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(225); + setState(224); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(227); + setState(226); valueExpression(); - setState(229); + setState(228); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(228); + setState(227); match(NOT); } } - setState(231); + setState(230); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(232); + setState(231); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1209,11 +1207,11 @@ public final MatchBooleanExpressionContext matchBooleanExpression() throws Recog try { enterOuterAlt(_localctx, 1); { - setState(236); + setState(235); ((MatchBooleanExpressionContext)_localctx).fieldExp = qualifiedName(); - setState(237); + setState(236); match(COLON); - setState(238); + setState(237); ((MatchBooleanExpressionContext)_localctx).queryString = constant(); } } @@ -1297,14 +1295,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 16, RULE_valueExpression); try { - setState(245); + setState(244); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(240); + setState(239); operatorExpression(0); } break; @@ -1312,11 +1310,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(241); + setState(240); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(242); + setState(241); comparisonOperator(); - setState(243); + setState(242); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1441,7 +1439,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(251); + setState(250); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1450,7 +1448,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(248); + setState(247); primaryExpression(0); } break; @@ -1459,7 +1457,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(249); + setState(248); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1470,13 +1468,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(250); + setState(249); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(261); + setState(260); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1484,7 +1482,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(259); + setState(258); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1492,9 +1490,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(253); + setState(252); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(254); + setState(253); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & 7L) != 0)) ) { @@ -1505,7 +1503,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(255); + setState(254); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1514,9 +1512,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(256); + setState(255); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(257); + setState(256); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1527,14 +1525,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(258); + setState(257); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(263); + setState(262); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1692,7 +1690,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(272); + setState(271); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: @@ -1701,7 +1699,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(265); + setState(264); constant(); } break; @@ -1710,7 +1708,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(266); + setState(265); qualifiedName(); } break; @@ -1719,7 +1717,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(267); + setState(266); functionExpression(); } break; @@ -1728,17 +1726,17 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(268); + setState(267); match(LP); - setState(269); + setState(268); booleanExpression(0); - setState(270); + setState(269); match(RP); } break; } _ctx.stop = _input.LT(-1); - setState(279); + setState(278); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1749,16 +1747,16 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc { _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(274); + setState(273); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(275); + setState(274); match(CAST_OP); - setState(276); + setState(275); dataType(); } } } - setState(281); + setState(280); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); } @@ -1820,37 +1818,37 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(282); + setState(281); functionName(); - setState(283); + setState(282); match(LP); - setState(293); + setState(292); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,19,_ctx) ) { case 1: { - setState(284); + setState(283); match(ASTERISK); } break; case 2: { { - setState(285); + setState(284); booleanExpression(0); - setState(290); + setState(289); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(286); + setState(285); match(COMMA); - setState(287); + setState(286); booleanExpression(0); } } - setState(292); + setState(291); _errHandler.sync(this); _la = _input.LA(1); } @@ -1858,7 +1856,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx } break; } - setState(295); + setState(294); match(RP); } } @@ -1904,7 +1902,7 @@ public final FunctionNameContext functionName() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(297); + setState(296); identifierOrParameter(); } } @@ -1962,7 +1960,7 @@ public final DataTypeContext dataType() throws RecognitionException { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(299); + setState(298); identifier(); } } @@ -2009,9 +2007,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(301); + setState(300); match(ROW); - setState(302); + setState(301); fields(); } } @@ -2065,23 +2063,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(304); + setState(303); field(); - setState(309); + setState(308); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(305); + setState(304); match(COMMA); - setState(306); + setState(305); field(); } } } - setState(311); + setState(310); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); } @@ -2133,19 +2131,19 @@ public final FieldContext field() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(315); + setState(314); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { case 1: { - setState(312); + setState(311); qualifiedName(); - setState(313); + setState(312); match(ASSIGN); } break; } - setState(317); + setState(316); booleanExpression(0); } } @@ -2203,34 +2201,34 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(319); + setState(318); match(FROM); - setState(320); + setState(319); indexPattern(); - setState(325); + setState(324); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(321); + setState(320); match(COMMA); - setState(322); + setState(321); indexPattern(); } } } - setState(327); + setState(326); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); } - setState(329); + setState(328); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { case 1: { - setState(328); + setState(327); metadata(); } break; @@ -2283,19 +2281,19 @@ public final IndexPatternContext indexPattern() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(334); + setState(333); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(331); + setState(330); clusterString(); - setState(332); + setState(331); match(COLON); } break; } - setState(336); + setState(335); indexString(); } } @@ -2339,7 +2337,7 @@ public final ClusterStringContext clusterString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(338); + setState(337); match(UNQUOTED_SOURCE); } } @@ -2385,7 +2383,7 @@ public final IndexStringContext indexString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(340); + setState(339); _la = _input.LA(1); if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) { _errHandler.recoverInline(this); @@ -2440,20 +2438,20 @@ public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); enterRule(_localctx, 42, RULE_metadata); try { - setState(344); + setState(343); _errHandler.sync(this); switch (_input.LA(1)) { case METADATA: enterOuterAlt(_localctx, 1); { - setState(342); + setState(341); metadataOption(); } break; case OPENING_BRACKET: enterOuterAlt(_localctx, 2); { - setState(343); + setState(342); deprecated_metadata(); } break; @@ -2510,25 +2508,25 @@ public final MetadataOptionContext metadataOption() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(346); + setState(345); match(METADATA); - setState(347); + setState(346); match(UNQUOTED_SOURCE); - setState(352); + setState(351); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,26,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(348); + setState(347); match(COMMA); - setState(349); + setState(348); match(UNQUOTED_SOURCE); } } } - setState(354); + setState(353); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,26,_ctx); } @@ -2577,11 +2575,11 @@ public final Deprecated_metadataContext deprecated_metadata() throws Recognition try { enterOuterAlt(_localctx, 1); { - setState(355); + setState(354); match(OPENING_BRACKET); - setState(356); + setState(355); metadataOption(); - setState(357); + setState(356); match(CLOSING_BRACKET); } } @@ -2645,46 +2643,46 @@ public final MetricsCommandContext metricsCommand() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(359); + setState(358); match(DEV_METRICS); - setState(360); + setState(359); indexPattern(); - setState(365); + setState(364); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(361); + setState(360); match(COMMA); - setState(362); + setState(361); indexPattern(); } } } - setState(367); + setState(366); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } - setState(369); + setState(368); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { case 1: { - setState(368); + setState(367); ((MetricsCommandContext)_localctx).aggregates = aggFields(); } break; } - setState(373); + setState(372); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(371); + setState(370); match(BY); - setState(372); + setState(371); ((MetricsCommandContext)_localctx).grouping = fields(); } break; @@ -2734,9 +2732,9 @@ public final EvalCommandContext evalCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(375); + setState(374); match(EVAL); - setState(376); + setState(375); fields(); } } @@ -2789,26 +2787,26 @@ public final StatsCommandContext statsCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(378); + setState(377); match(STATS); - setState(380); + setState(379); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(379); + setState(378); ((StatsCommandContext)_localctx).stats = aggFields(); } break; } - setState(384); + setState(383); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(382); + setState(381); match(BY); - setState(383); + setState(382); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2865,23 +2863,23 @@ public final AggFieldsContext aggFields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(386); + setState(385); aggField(); - setState(391); + setState(390); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,32,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(387); + setState(386); match(COMMA); - setState(388); + setState(387); aggField(); } } } - setState(393); + setState(392); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,32,_ctx); } @@ -2933,16 +2931,16 @@ public final AggFieldContext aggField() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(394); + setState(393); field(); - setState(397); + setState(396); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: { - setState(395); + setState(394); match(WHERE); - setState(396); + setState(395); booleanExpression(0); } break; @@ -2999,23 +2997,23 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(399); + setState(398); identifierOrParameter(); - setState(404); + setState(403); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(400); + setState(399); match(DOT); - setState(401); + setState(400); identifierOrParameter(); } } } - setState(406); + setState(405); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } @@ -3071,23 +3069,23 @@ public final QualifiedNamePatternContext qualifiedNamePattern() throws Recogniti int _alt; enterOuterAlt(_localctx, 1); { - setState(407); + setState(406); identifierPattern(); - setState(412); + setState(411); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,35,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(408); + setState(407); match(DOT); - setState(409); + setState(408); identifierPattern(); } } } - setState(414); + setState(413); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,35,_ctx); } @@ -3143,23 +3141,23 @@ public final QualifiedNamePatternsContext qualifiedNamePatterns() throws Recogni int _alt; enterOuterAlt(_localctx, 1); { - setState(415); + setState(414); qualifiedNamePattern(); - setState(420); + setState(419); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(416); + setState(415); match(COMMA); - setState(417); + setState(416); qualifiedNamePattern(); } } } - setState(422); + setState(421); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); } @@ -3207,7 +3205,7 @@ public final IdentifierContext identifier() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(423); + setState(422); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -3260,22 +3258,22 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); enterRule(_localctx, 66, RULE_identifierPattern); try { - setState(428); + setState(427); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,37,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(425); + setState(424); match(ID_PATTERN); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(426); + setState(425); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(427); + setState(426); parameter(); } break; @@ -3548,14 +3546,14 @@ public final ConstantContext constant() throws RecognitionException { enterRule(_localctx, 68, RULE_constant); int _la; try { - setState(472); + setState(471); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(430); + setState(429); match(NULL); } break; @@ -3563,9 +3561,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(431); + setState(430); integerValue(); - setState(432); + setState(431); match(UNQUOTED_IDENTIFIER); } break; @@ -3573,7 +3571,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(434); + setState(433); decimalValue(); } break; @@ -3581,7 +3579,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(435); + setState(434); integerValue(); } break; @@ -3589,7 +3587,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(436); + setState(435); booleanValue(); } break; @@ -3597,7 +3595,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParameterContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(437); + setState(436); parameter(); } break; @@ -3605,7 +3603,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(438); + setState(437); string(); } break; @@ -3613,27 +3611,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(439); + setState(438); match(OPENING_BRACKET); - setState(440); + setState(439); numericValue(); - setState(445); + setState(444); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(441); + setState(440); match(COMMA); - setState(442); + setState(441); numericValue(); } } - setState(447); + setState(446); _errHandler.sync(this); _la = _input.LA(1); } - setState(448); + setState(447); match(CLOSING_BRACKET); } break; @@ -3641,27 +3639,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(450); + setState(449); match(OPENING_BRACKET); - setState(451); + setState(450); booleanValue(); - setState(456); + setState(455); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(452); + setState(451); match(COMMA); - setState(453); + setState(452); booleanValue(); } } - setState(458); + setState(457); _errHandler.sync(this); _la = _input.LA(1); } - setState(459); + setState(458); match(CLOSING_BRACKET); } break; @@ -3669,27 +3667,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(461); + setState(460); match(OPENING_BRACKET); - setState(462); + setState(461); string(); - setState(467); + setState(466); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(463); + setState(462); match(COMMA); - setState(464); + setState(463); string(); } } - setState(469); + setState(468); _errHandler.sync(this); _la = _input.LA(1); } - setState(470); + setState(469); match(CLOSING_BRACKET); } break; @@ -3763,14 +3761,14 @@ public final ParameterContext parameter() throws RecognitionException { ParameterContext _localctx = new ParameterContext(_ctx, getState()); enterRule(_localctx, 70, RULE_parameter); try { - setState(476); + setState(475); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(474); + setState(473); match(PARAM); } break; @@ -3778,7 +3776,7 @@ public final ParameterContext parameter() throws RecognitionException { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(475); + setState(474); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3829,22 +3827,22 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni IdentifierOrParameterContext _localctx = new IdentifierOrParameterContext(_ctx, getState()); enterRule(_localctx, 72, RULE_identifierOrParameter); try { - setState(481); + setState(480); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(478); + setState(477); identifier(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(479); + setState(478); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(480); + setState(479); parameter(); } break; @@ -3891,9 +3889,9 @@ public final LimitCommandContext limitCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(483); + setState(482); match(LIMIT); - setState(484); + setState(483); match(INTEGER_LITERAL); } } @@ -3948,25 +3946,25 @@ public final SortCommandContext sortCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(486); + setState(485); match(SORT); - setState(487); + setState(486); orderExpression(); - setState(492); + setState(491); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,44,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(488); + setState(487); match(COMMA); - setState(489); + setState(488); orderExpression(); } } } - setState(494); + setState(493); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,44,_ctx); } @@ -4022,14 +4020,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(495); + setState(494); booleanExpression(0); - setState(497); + setState(496); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { case 1: { - setState(496); + setState(495); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -4043,14 +4041,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(501); + setState(500); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: { - setState(499); + setState(498); match(NULLS); - setState(500); + setState(499); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -4109,9 +4107,9 @@ public final KeepCommandContext keepCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(503); + setState(502); match(KEEP); - setState(504); + setState(503); qualifiedNamePatterns(); } } @@ -4158,9 +4156,9 @@ public final DropCommandContext dropCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(506); + setState(505); match(DROP); - setState(507); + setState(506); qualifiedNamePatterns(); } } @@ -4215,25 +4213,25 @@ public final RenameCommandContext renameCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(509); + setState(508); match(RENAME); - setState(510); + setState(509); renameClause(); - setState(515); + setState(514); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,47,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(511); + setState(510); match(COMMA); - setState(512); + setState(511); renameClause(); } } } - setState(517); + setState(516); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,47,_ctx); } @@ -4287,11 +4285,11 @@ public final RenameClauseContext renameClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(518); + setState(517); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(519); + setState(518); match(AS); - setState(520); + setState(519); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -4344,18 +4342,18 @@ public final DissectCommandContext dissectCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(522); + setState(521); match(DISSECT); - setState(523); + setState(522); primaryExpression(0); - setState(524); + setState(523); string(); - setState(526); + setState(525); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: { - setState(525); + setState(524); commandOptions(); } break; @@ -4408,11 +4406,11 @@ public final GrokCommandContext grokCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(528); + setState(527); match(GROK); - setState(529); + setState(528); primaryExpression(0); - setState(530); + setState(529); string(); } } @@ -4459,9 +4457,9 @@ public final MvExpandCommandContext mvExpandCommand() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(532); + setState(531); match(MV_EXPAND); - setState(533); + setState(532); qualifiedName(); } } @@ -4515,23 +4513,23 @@ public final CommandOptionsContext commandOptions() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(535); + setState(534); commandOption(); - setState(540); + setState(539); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,49,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(536); + setState(535); match(COMMA); - setState(537); + setState(536); commandOption(); } } } - setState(542); + setState(541); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,49,_ctx); } @@ -4583,11 +4581,11 @@ public final CommandOptionContext commandOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(543); + setState(542); identifier(); - setState(544); + setState(543); match(ASSIGN); - setState(545); + setState(544); constant(); } } @@ -4633,7 +4631,7 @@ public final BooleanValueContext booleanValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(547); + setState(546); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4688,20 +4686,20 @@ public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); enterRule(_localctx, 100, RULE_numericValue); try { - setState(551); + setState(550); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(549); + setState(548); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(550); + setState(549); integerValue(); } break; @@ -4750,12 +4748,12 @@ public final DecimalValueContext decimalValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(554); + setState(553); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(553); + setState(552); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4768,7 +4766,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(556); + setState(555); match(DECIMAL_LITERAL); } } @@ -4815,12 +4813,12 @@ public final IntegerValueContext integerValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(559); + setState(558); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(558); + setState(557); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4833,7 +4831,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(561); + setState(560); match(INTEGER_LITERAL); } } @@ -4877,7 +4875,7 @@ public final StringContext string() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(563); + setState(562); match(QUOTED_STRING); } } @@ -4927,7 +4925,7 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(565); + setState(564); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -432345564227567616L) != 0)) ) { _errHandler.recoverInline(this); @@ -4982,9 +4980,9 @@ public final ExplainCommandContext explainCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(567); + setState(566); match(EXPLAIN); - setState(568); + setState(567); subqueryExpression(); } } @@ -5032,11 +5030,11 @@ public final SubqueryExpressionContext subqueryExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(570); + setState(569); match(OPENING_BRACKET); - setState(571); + setState(570); query(0); - setState(572); + setState(571); match(CLOSING_BRACKET); } } @@ -5093,9 +5091,9 @@ public final ShowCommandContext showCommand() throws RecognitionException { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(574); + setState(573); match(SHOW); - setState(575); + setState(574); match(INFO); } } @@ -5158,46 +5156,46 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(577); + setState(576); match(ENRICH); - setState(578); + setState(577); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(581); + setState(580); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,53,_ctx) ) { case 1: { - setState(579); + setState(578); match(ON); - setState(580); + setState(579); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(592); + setState(591); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,55,_ctx) ) { case 1: { - setState(583); + setState(582); match(WITH); - setState(584); + setState(583); enrichWithClause(); - setState(589); + setState(588); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,54,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(585); + setState(584); match(COMMA); - setState(586); + setState(585); enrichWithClause(); } } } - setState(591); + setState(590); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,54,_ctx); } @@ -5254,19 +5252,19 @@ public final EnrichWithClauseContext enrichWithClause() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(597); + setState(596); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) { case 1: { - setState(594); + setState(593); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(595); + setState(594); match(ASSIGN); } break; } - setState(599); + setState(598); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -5319,13 +5317,13 @@ public final LookupCommandContext lookupCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(601); + setState(600); match(DEV_LOOKUP); - setState(602); + setState(601); ((LookupCommandContext)_localctx).tableName = indexPattern(); - setState(603); + setState(602); match(ON); - setState(604); + setState(603); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5378,18 +5376,18 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(606); + setState(605); match(DEV_INLINESTATS); - setState(607); + setState(606); ((InlinestatsCommandContext)_localctx).stats = aggFields(); - setState(610); + setState(609); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) { case 1: { - setState(608); + setState(607); match(BY); - setState(609); + setState(608); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -5447,12 +5445,12 @@ public final JoinCommandContext joinCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(613); + setState(612); _errHandler.sync(this); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) { { - setState(612); + setState(611); ((JoinCommandContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) ) { @@ -5466,11 +5464,11 @@ public final JoinCommandContext joinCommand() throws RecognitionException { } } - setState(615); + setState(614); match(DEV_JOIN); - setState(616); + setState(615); joinTarget(); - setState(617); + setState(616); joinCondition(); } } @@ -5523,16 +5521,16 @@ public final JoinTargetContext joinTarget() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(619); + setState(618); ((JoinTargetContext)_localctx).index = identifier(); - setState(622); + setState(621); _errHandler.sync(this); _la = _input.LA(1); if (_la==AS) { { - setState(620); + setState(619); match(AS); - setState(621); + setState(620); ((JoinTargetContext)_localctx).alias = identifier(); } } @@ -5590,25 +5588,25 @@ public final JoinConditionContext joinCondition() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(624); + setState(623); match(ON); - setState(625); + setState(624); joinPredicate(); - setState(630); + setState(629); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,60,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(626); + setState(625); match(COMMA); - setState(627); + setState(626); joinPredicate(); } } } - setState(632); + setState(631); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,60,_ctx); } @@ -5656,7 +5654,7 @@ public final JoinPredicateContext joinPredicate() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(633); + setState(632); valueExpression(); } } @@ -5720,47 +5718,45 @@ private boolean processingCommand_sempred(ProcessingCommandContext _localctx, in private boolean booleanExpression_sempred(BooleanExpressionContext _localctx, int predIndex) { switch (predIndex) { case 5: - return this.isDevVersion(); - case 6: return precpred(_ctx, 5); - case 7: + case 6: return precpred(_ctx, 4); } return true; } private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, int predIndex) { switch (predIndex) { - case 8: + case 7: return precpred(_ctx, 2); - case 9: + case 8: return precpred(_ctx, 1); } return true; } private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, int predIndex) { switch (predIndex) { - case 10: + case 9: return precpred(_ctx, 1); } return true; } private boolean identifierPattern_sempred(IdentifierPatternContext _localctx, int predIndex) { switch (predIndex) { - case 11: + case 10: return this.isDevVersion(); } return true; } private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _localctx, int predIndex) { switch (predIndex) { - case 12: + case 11: return this.isDevVersion(); } return true; } public static final String _serializedATN = - "\u0004\u0001\u0080\u027c\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ + "\u0004\u0001\u0080\u027b\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ @@ -5789,377 +5785,377 @@ private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _loca "\u00b9\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ "\u0005\u0005\u00c0\b\u0005\n\u0005\f\u0005\u00c3\t\u0005\u0001\u0005\u0001"+ "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00ca\b\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00d0\b\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005"+ - "\u0005\u00d8\b\u0005\n\u0005\f\u0005\u00db\t\u0005\u0001\u0006\u0001\u0006"+ - "\u0003\u0006\u00df\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0003\u0006\u00e6\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ - "\u0003\u0006\u00eb\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00f6\b\b\u0001\t\u0001"+ - "\t\u0001\t\u0001\t\u0003\t\u00fc\b\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0005\t\u0104\b\t\n\t\f\t\u0107\t\t\u0001\n\u0001\n\u0001\n"+ - "\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0003\n\u0111\b\n\u0001\n\u0001"+ - "\n\u0001\n\u0005\n\u0116\b\n\n\n\f\n\u0119\t\n\u0001\u000b\u0001\u000b"+ - "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0005\u000b\u0121\b\u000b"+ - "\n\u000b\f\u000b\u0124\t\u000b\u0003\u000b\u0126\b\u000b\u0001\u000b\u0001"+ - "\u000b\u0001\f\u0001\f\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0005\u000f\u0134\b\u000f\n\u000f"+ - "\f\u000f\u0137\t\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0003\u0010"+ - "\u013c\b\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0005\u0011\u0144\b\u0011\n\u0011\f\u0011\u0147\t\u0011\u0001"+ - "\u0011\u0003\u0011\u014a\b\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0003"+ - "\u0012\u014f\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001"+ - "\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0003\u0015\u0159\b\u0015\u0001"+ - "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u015f\b\u0016\n"+ - "\u0016\f\u0016\u0162\t\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ - "\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u016c"+ - "\b\u0018\n\u0018\f\u0018\u016f\t\u0018\u0001\u0018\u0003\u0018\u0172\b"+ - "\u0018\u0001\u0018\u0001\u0018\u0003\u0018\u0176\b\u0018\u0001\u0019\u0001"+ - "\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0003\u001a\u017d\b\u001a\u0001"+ - "\u001a\u0001\u001a\u0003\u001a\u0181\b\u001a\u0001\u001b\u0001\u001b\u0001"+ - "\u001b\u0005\u001b\u0186\b\u001b\n\u001b\f\u001b\u0189\t\u001b\u0001\u001c"+ - "\u0001\u001c\u0001\u001c\u0003\u001c\u018e\b\u001c\u0001\u001d\u0001\u001d"+ - "\u0001\u001d\u0005\u001d\u0193\b\u001d\n\u001d\f\u001d\u0196\t\u001d\u0001"+ - "\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u019b\b\u001e\n\u001e\f\u001e"+ - "\u019e\t\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01a3\b"+ - "\u001f\n\u001f\f\u001f\u01a6\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001"+ - "!\u0003!\u01ad\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ - "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01bc\b\"\n"+ - "\"\f\"\u01bf\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005"+ - "\"\u01c7\b\"\n\"\f\"\u01ca\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\""+ - "\u0001\"\u0005\"\u01d2\b\"\n\"\f\"\u01d5\t\"\u0001\"\u0001\"\u0003\"\u01d9"+ - "\b\"\u0001#\u0001#\u0003#\u01dd\b#\u0001$\u0001$\u0001$\u0003$\u01e2\b"+ - "$\u0001%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01eb\b&\n&"+ - "\f&\u01ee\t&\u0001\'\u0001\'\u0003\'\u01f2\b\'\u0001\'\u0001\'\u0003\'"+ - "\u01f6\b\'\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001"+ - "*\u0001*\u0005*\u0202\b*\n*\f*\u0205\t*\u0001+\u0001+\u0001+\u0001+\u0001"+ - ",\u0001,\u0001,\u0001,\u0003,\u020f\b,\u0001-\u0001-\u0001-\u0001-\u0001"+ - ".\u0001.\u0001.\u0001/\u0001/\u0001/\u0005/\u021b\b/\n/\f/\u021e\t/\u0001"+ - "0\u00010\u00010\u00010\u00011\u00011\u00012\u00012\u00032\u0228\b2\u0001"+ - "3\u00033\u022b\b3\u00013\u00013\u00014\u00034\u0230\b4\u00014\u00014\u0001"+ - "5\u00015\u00016\u00016\u00017\u00017\u00017\u00018\u00018\u00018\u0001"+ - "8\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u0246\b:\u0001"+ - ":\u0001:\u0001:\u0001:\u0005:\u024c\b:\n:\f:\u024f\t:\u0003:\u0251\b:"+ - "\u0001;\u0001;\u0001;\u0003;\u0256\b;\u0001;\u0001;\u0001<\u0001<\u0001"+ - "<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001=\u0003=\u0263\b=\u0001>\u0003"+ - ">\u0266\b>\u0001>\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0003?\u026f"+ - "\b?\u0001@\u0001@\u0001@\u0001@\u0005@\u0275\b@\n@\f@\u0278\t@\u0001A"+ - "\u0001A\u0001A\u0000\u0004\u0002\n\u0012\u0014B\u0000\u0002\u0004\u0006"+ - "\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,."+ - "02468:<>@BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0000\t\u0001\u0000"+ - "@A\u0001\u0000BD\u0002\u0000\u001f\u001fQQ\u0001\u0000HI\u0002\u0000$"+ - "$((\u0002\u0000++..\u0002\u0000**88\u0002\u000099;?\u0001\u0000\u0016"+ - "\u0018\u0295\u0000\u0084\u0001\u0000\u0000\u0000\u0002\u0087\u0001\u0000"+ - "\u0000\u0000\u0004\u0098\u0001\u0000\u0000\u0000\u0006\u00ac\u0001\u0000"+ - "\u0000\u0000\b\u00ae\u0001\u0000\u0000\u0000\n\u00cf\u0001\u0000\u0000"+ - "\u0000\f\u00ea\u0001\u0000\u0000\u0000\u000e\u00ec\u0001\u0000\u0000\u0000"+ - "\u0010\u00f5\u0001\u0000\u0000\u0000\u0012\u00fb\u0001\u0000\u0000\u0000"+ - "\u0014\u0110\u0001\u0000\u0000\u0000\u0016\u011a\u0001\u0000\u0000\u0000"+ - "\u0018\u0129\u0001\u0000\u0000\u0000\u001a\u012b\u0001\u0000\u0000\u0000"+ - "\u001c\u012d\u0001\u0000\u0000\u0000\u001e\u0130\u0001\u0000\u0000\u0000"+ - " \u013b\u0001\u0000\u0000\u0000\"\u013f\u0001\u0000\u0000\u0000$\u014e"+ - "\u0001\u0000\u0000\u0000&\u0152\u0001\u0000\u0000\u0000(\u0154\u0001\u0000"+ - "\u0000\u0000*\u0158\u0001\u0000\u0000\u0000,\u015a\u0001\u0000\u0000\u0000"+ - ".\u0163\u0001\u0000\u0000\u00000\u0167\u0001\u0000\u0000\u00002\u0177"+ - "\u0001\u0000\u0000\u00004\u017a\u0001\u0000\u0000\u00006\u0182\u0001\u0000"+ - "\u0000\u00008\u018a\u0001\u0000\u0000\u0000:\u018f\u0001\u0000\u0000\u0000"+ - "<\u0197\u0001\u0000\u0000\u0000>\u019f\u0001\u0000\u0000\u0000@\u01a7"+ - "\u0001\u0000\u0000\u0000B\u01ac\u0001\u0000\u0000\u0000D\u01d8\u0001\u0000"+ - "\u0000\u0000F\u01dc\u0001\u0000\u0000\u0000H\u01e1\u0001\u0000\u0000\u0000"+ - "J\u01e3\u0001\u0000\u0000\u0000L\u01e6\u0001\u0000\u0000\u0000N\u01ef"+ - "\u0001\u0000\u0000\u0000P\u01f7\u0001\u0000\u0000\u0000R\u01fa\u0001\u0000"+ - "\u0000\u0000T\u01fd\u0001\u0000\u0000\u0000V\u0206\u0001\u0000\u0000\u0000"+ - "X\u020a\u0001\u0000\u0000\u0000Z\u0210\u0001\u0000\u0000\u0000\\\u0214"+ - "\u0001\u0000\u0000\u0000^\u0217\u0001\u0000\u0000\u0000`\u021f\u0001\u0000"+ - "\u0000\u0000b\u0223\u0001\u0000\u0000\u0000d\u0227\u0001\u0000\u0000\u0000"+ - "f\u022a\u0001\u0000\u0000\u0000h\u022f\u0001\u0000\u0000\u0000j\u0233"+ - "\u0001\u0000\u0000\u0000l\u0235\u0001\u0000\u0000\u0000n\u0237\u0001\u0000"+ - "\u0000\u0000p\u023a\u0001\u0000\u0000\u0000r\u023e\u0001\u0000\u0000\u0000"+ - "t\u0241\u0001\u0000\u0000\u0000v\u0255\u0001\u0000\u0000\u0000x\u0259"+ - "\u0001\u0000\u0000\u0000z\u025e\u0001\u0000\u0000\u0000|\u0265\u0001\u0000"+ - "\u0000\u0000~\u026b\u0001\u0000\u0000\u0000\u0080\u0270\u0001\u0000\u0000"+ - "\u0000\u0082\u0279\u0001\u0000\u0000\u0000\u0084\u0085\u0003\u0002\u0001"+ - "\u0000\u0085\u0086\u0005\u0000\u0000\u0001\u0086\u0001\u0001\u0000\u0000"+ - "\u0000\u0087\u0088\u0006\u0001\uffff\uffff\u0000\u0088\u0089\u0003\u0004"+ - "\u0002\u0000\u0089\u008f\u0001\u0000\u0000\u0000\u008a\u008b\n\u0001\u0000"+ - "\u0000\u008b\u008c\u0005\u001e\u0000\u0000\u008c\u008e\u0003\u0006\u0003"+ - "\u0000\u008d\u008a\u0001\u0000\u0000\u0000\u008e\u0091\u0001\u0000\u0000"+ - "\u0000\u008f\u008d\u0001\u0000\u0000\u0000\u008f\u0090\u0001\u0000\u0000"+ - "\u0000\u0090\u0003\u0001\u0000\u0000\u0000\u0091\u008f\u0001\u0000\u0000"+ - "\u0000\u0092\u0099\u0003n7\u0000\u0093\u0099\u0003\"\u0011\u0000\u0094"+ - "\u0099\u0003\u001c\u000e\u0000\u0095\u0099\u0003r9\u0000\u0096\u0097\u0004"+ - "\u0002\u0001\u0000\u0097\u0099\u00030\u0018\u0000\u0098\u0092\u0001\u0000"+ - "\u0000\u0000\u0098\u0093\u0001\u0000\u0000\u0000\u0098\u0094\u0001\u0000"+ - "\u0000\u0000\u0098\u0095\u0001\u0000\u0000\u0000\u0098\u0096\u0001\u0000"+ - "\u0000\u0000\u0099\u0005\u0001\u0000\u0000\u0000\u009a\u00ad\u00032\u0019"+ - "\u0000\u009b\u00ad\u0003\b\u0004\u0000\u009c\u00ad\u0003P(\u0000\u009d"+ - "\u00ad\u0003J%\u0000\u009e\u00ad\u00034\u001a\u0000\u009f\u00ad\u0003"+ - "L&\u0000\u00a0\u00ad\u0003R)\u0000\u00a1\u00ad\u0003T*\u0000\u00a2\u00ad"+ - "\u0003X,\u0000\u00a3\u00ad\u0003Z-\u0000\u00a4\u00ad\u0003t:\u0000\u00a5"+ - "\u00ad\u0003\\.\u0000\u00a6\u00a7\u0004\u0003\u0002\u0000\u00a7\u00ad"+ - "\u0003z=\u0000\u00a8\u00a9\u0004\u0003\u0003\u0000\u00a9\u00ad\u0003x"+ - "<\u0000\u00aa\u00ab\u0004\u0003\u0004\u0000\u00ab\u00ad\u0003|>\u0000"+ - "\u00ac\u009a\u0001\u0000\u0000\u0000\u00ac\u009b\u0001\u0000\u0000\u0000"+ - "\u00ac\u009c\u0001\u0000\u0000\u0000\u00ac\u009d\u0001\u0000\u0000\u0000"+ - "\u00ac\u009e\u0001\u0000\u0000\u0000\u00ac\u009f\u0001\u0000\u0000\u0000"+ - "\u00ac\u00a0\u0001\u0000\u0000\u0000\u00ac\u00a1\u0001\u0000\u0000\u0000"+ - "\u00ac\u00a2\u0001\u0000\u0000\u0000\u00ac\u00a3\u0001\u0000\u0000\u0000"+ - "\u00ac\u00a4\u0001\u0000\u0000\u0000\u00ac\u00a5\u0001\u0000\u0000\u0000"+ - "\u00ac\u00a6\u0001\u0000\u0000\u0000\u00ac\u00a8\u0001\u0000\u0000\u0000"+ - "\u00ac\u00aa\u0001\u0000\u0000\u0000\u00ad\u0007\u0001\u0000\u0000\u0000"+ - "\u00ae\u00af\u0005\u0010\u0000\u0000\u00af\u00b0\u0003\n\u0005\u0000\u00b0"+ - "\t\u0001\u0000\u0000\u0000\u00b1\u00b2\u0006\u0005\uffff\uffff\u0000\u00b2"+ - "\u00b3\u00051\u0000\u0000\u00b3\u00d0\u0003\n\u0005\b\u00b4\u00d0\u0003"+ - "\u0010\b\u0000\u00b5\u00d0\u0003\f\u0006\u0000\u00b6\u00b8\u0003\u0010"+ - "\b\u0000\u00b7\u00b9\u00051\u0000\u0000\u00b8\u00b7\u0001\u0000\u0000"+ - "\u0000\u00b8\u00b9\u0001\u0000\u0000\u0000\u00b9\u00ba\u0001\u0000\u0000"+ - "\u0000\u00ba\u00bb\u0005,\u0000\u0000\u00bb\u00bc\u00050\u0000\u0000\u00bc"+ - "\u00c1\u0003\u0010\b\u0000\u00bd\u00be\u0005\'\u0000\u0000\u00be\u00c0"+ - "\u0003\u0010\b\u0000\u00bf\u00bd\u0001\u0000\u0000\u0000\u00c0\u00c3\u0001"+ - "\u0000\u0000\u0000\u00c1\u00bf\u0001\u0000\u0000\u0000\u00c1\u00c2\u0001"+ - "\u0000\u0000\u0000\u00c2\u00c4\u0001\u0000\u0000\u0000\u00c3\u00c1\u0001"+ - "\u0000\u0000\u0000\u00c4\u00c5\u00057\u0000\u0000\u00c5\u00d0\u0001\u0000"+ - "\u0000\u0000\u00c6\u00c7\u0003\u0010\b\u0000\u00c7\u00c9\u0005-\u0000"+ - "\u0000\u00c8\u00ca\u00051\u0000\u0000\u00c9\u00c8\u0001\u0000\u0000\u0000"+ - "\u00c9\u00ca\u0001\u0000\u0000\u0000\u00ca\u00cb\u0001\u0000\u0000\u0000"+ - "\u00cb\u00cc\u00052\u0000\u0000\u00cc\u00d0\u0001\u0000\u0000\u0000\u00cd"+ - "\u00ce\u0004\u0005\u0005\u0000\u00ce\u00d0\u0003\u000e\u0007\u0000\u00cf"+ - "\u00b1\u0001\u0000\u0000\u0000\u00cf\u00b4\u0001\u0000\u0000\u0000\u00cf"+ - "\u00b5\u0001\u0000\u0000\u0000\u00cf\u00b6\u0001\u0000\u0000\u0000\u00cf"+ - "\u00c6\u0001\u0000\u0000\u0000\u00cf\u00cd\u0001\u0000\u0000\u0000\u00d0"+ - "\u00d9\u0001\u0000\u0000\u0000\u00d1\u00d2\n\u0005\u0000\u0000\u00d2\u00d3"+ - "\u0005#\u0000\u0000\u00d3\u00d8\u0003\n\u0005\u0006\u00d4\u00d5\n\u0004"+ - "\u0000\u0000\u00d5\u00d6\u00054\u0000\u0000\u00d6\u00d8\u0003\n\u0005"+ - "\u0005\u00d7\u00d1\u0001\u0000\u0000\u0000\u00d7\u00d4\u0001\u0000\u0000"+ - "\u0000\u00d8\u00db\u0001\u0000\u0000\u0000\u00d9\u00d7\u0001\u0000\u0000"+ - "\u0000\u00d9\u00da\u0001\u0000\u0000\u0000\u00da\u000b\u0001\u0000\u0000"+ - "\u0000\u00db\u00d9\u0001\u0000\u0000\u0000\u00dc\u00de\u0003\u0010\b\u0000"+ - "\u00dd\u00df\u00051\u0000\u0000\u00de\u00dd\u0001\u0000\u0000\u0000\u00de"+ - "\u00df\u0001\u0000\u0000\u0000\u00df\u00e0\u0001\u0000\u0000\u0000\u00e0"+ - "\u00e1\u0005/\u0000\u0000\u00e1\u00e2\u0003j5\u0000\u00e2\u00eb\u0001"+ - "\u0000\u0000\u0000\u00e3\u00e5\u0003\u0010\b\u0000\u00e4\u00e6\u00051"+ - "\u0000\u0000\u00e5\u00e4\u0001\u0000\u0000\u0000\u00e5\u00e6\u0001\u0000"+ - "\u0000\u0000\u00e6\u00e7\u0001\u0000\u0000\u0000\u00e7\u00e8\u00056\u0000"+ - "\u0000\u00e8\u00e9\u0003j5\u0000\u00e9\u00eb\u0001\u0000\u0000\u0000\u00ea"+ - "\u00dc\u0001\u0000\u0000\u0000\u00ea\u00e3\u0001\u0000\u0000\u0000\u00eb"+ - "\r\u0001\u0000\u0000\u0000\u00ec\u00ed\u0003:\u001d\u0000\u00ed\u00ee"+ - "\u0005\u001d\u0000\u0000\u00ee\u00ef\u0003D\"\u0000\u00ef\u000f\u0001"+ - "\u0000\u0000\u0000\u00f0\u00f6\u0003\u0012\t\u0000\u00f1\u00f2\u0003\u0012"+ - "\t\u0000\u00f2\u00f3\u0003l6\u0000\u00f3\u00f4\u0003\u0012\t\u0000\u00f4"+ - "\u00f6\u0001\u0000\u0000\u0000\u00f5\u00f0\u0001\u0000\u0000\u0000\u00f5"+ - "\u00f1\u0001\u0000\u0000\u0000\u00f6\u0011\u0001\u0000\u0000\u0000\u00f7"+ - "\u00f8\u0006\t\uffff\uffff\u0000\u00f8\u00fc\u0003\u0014\n\u0000\u00f9"+ - "\u00fa\u0007\u0000\u0000\u0000\u00fa\u00fc\u0003\u0012\t\u0003\u00fb\u00f7"+ - "\u0001\u0000\u0000\u0000\u00fb\u00f9\u0001\u0000\u0000\u0000\u00fc\u0105"+ - "\u0001\u0000\u0000\u0000\u00fd\u00fe\n\u0002\u0000\u0000\u00fe\u00ff\u0007"+ - "\u0001\u0000\u0000\u00ff\u0104\u0003\u0012\t\u0003\u0100\u0101\n\u0001"+ - "\u0000\u0000\u0101\u0102\u0007\u0000\u0000\u0000\u0102\u0104\u0003\u0012"+ - "\t\u0002\u0103\u00fd\u0001\u0000\u0000\u0000\u0103\u0100\u0001\u0000\u0000"+ - "\u0000\u0104\u0107\u0001\u0000\u0000\u0000\u0105\u0103\u0001\u0000\u0000"+ - "\u0000\u0105\u0106\u0001\u0000\u0000\u0000\u0106\u0013\u0001\u0000\u0000"+ - "\u0000\u0107\u0105\u0001\u0000\u0000\u0000\u0108\u0109\u0006\n\uffff\uffff"+ - "\u0000\u0109\u0111\u0003D\"\u0000\u010a\u0111\u0003:\u001d\u0000\u010b"+ - "\u0111\u0003\u0016\u000b\u0000\u010c\u010d\u00050\u0000\u0000\u010d\u010e"+ - "\u0003\n\u0005\u0000\u010e\u010f\u00057\u0000\u0000\u010f\u0111\u0001"+ - "\u0000\u0000\u0000\u0110\u0108\u0001\u0000\u0000\u0000\u0110\u010a\u0001"+ - "\u0000\u0000\u0000\u0110\u010b\u0001\u0000\u0000\u0000\u0110\u010c\u0001"+ - "\u0000\u0000\u0000\u0111\u0117\u0001\u0000\u0000\u0000\u0112\u0113\n\u0001"+ - "\u0000\u0000\u0113\u0114\u0005&\u0000\u0000\u0114\u0116\u0003\u001a\r"+ - "\u0000\u0115\u0112\u0001\u0000\u0000\u0000\u0116\u0119\u0001\u0000\u0000"+ - "\u0000\u0117\u0115\u0001\u0000\u0000\u0000\u0117\u0118\u0001\u0000\u0000"+ - "\u0000\u0118\u0015\u0001\u0000\u0000\u0000\u0119\u0117\u0001\u0000\u0000"+ - "\u0000\u011a\u011b\u0003\u0018\f\u0000\u011b\u0125\u00050\u0000\u0000"+ - "\u011c\u0126\u0005B\u0000\u0000\u011d\u0122\u0003\n\u0005\u0000\u011e"+ - "\u011f\u0005\'\u0000\u0000\u011f\u0121\u0003\n\u0005\u0000\u0120\u011e"+ - "\u0001\u0000\u0000\u0000\u0121\u0124\u0001\u0000\u0000\u0000\u0122\u0120"+ - "\u0001\u0000\u0000\u0000\u0122\u0123\u0001\u0000\u0000\u0000\u0123\u0126"+ - "\u0001\u0000\u0000\u0000\u0124\u0122\u0001\u0000\u0000\u0000\u0125\u011c"+ - "\u0001\u0000\u0000\u0000\u0125\u011d\u0001\u0000\u0000\u0000\u0125\u0126"+ - "\u0001\u0000\u0000\u0000\u0126\u0127\u0001\u0000\u0000\u0000\u0127\u0128"+ - "\u00057\u0000\u0000\u0128\u0017\u0001\u0000\u0000\u0000\u0129\u012a\u0003"+ - "H$\u0000\u012a\u0019\u0001\u0000\u0000\u0000\u012b\u012c\u0003@ \u0000"+ - "\u012c\u001b\u0001\u0000\u0000\u0000\u012d\u012e\u0005\f\u0000\u0000\u012e"+ - "\u012f\u0003\u001e\u000f\u0000\u012f\u001d\u0001\u0000\u0000\u0000\u0130"+ - "\u0135\u0003 \u0010\u0000\u0131\u0132\u0005\'\u0000\u0000\u0132\u0134"+ - "\u0003 \u0010\u0000\u0133\u0131\u0001\u0000\u0000\u0000\u0134\u0137\u0001"+ - "\u0000\u0000\u0000\u0135\u0133\u0001\u0000\u0000\u0000\u0135\u0136\u0001"+ - "\u0000\u0000\u0000\u0136\u001f\u0001\u0000\u0000\u0000\u0137\u0135\u0001"+ - "\u0000\u0000\u0000\u0138\u0139\u0003:\u001d\u0000\u0139\u013a\u0005%\u0000"+ - "\u0000\u013a\u013c\u0001\u0000\u0000\u0000\u013b\u0138\u0001\u0000\u0000"+ - "\u0000\u013b\u013c\u0001\u0000\u0000\u0000\u013c\u013d\u0001\u0000\u0000"+ - "\u0000\u013d\u013e\u0003\n\u0005\u0000\u013e!\u0001\u0000\u0000\u0000"+ - "\u013f\u0140\u0005\u0006\u0000\u0000\u0140\u0145\u0003$\u0012\u0000\u0141"+ - "\u0142\u0005\'\u0000\u0000\u0142\u0144\u0003$\u0012\u0000\u0143\u0141"+ - "\u0001\u0000\u0000\u0000\u0144\u0147\u0001\u0000\u0000\u0000\u0145\u0143"+ - "\u0001\u0000\u0000\u0000\u0145\u0146\u0001\u0000\u0000\u0000\u0146\u0149"+ - "\u0001\u0000\u0000\u0000\u0147\u0145\u0001\u0000\u0000\u0000\u0148\u014a"+ - "\u0003*\u0015\u0000\u0149\u0148\u0001\u0000\u0000\u0000\u0149\u014a\u0001"+ - "\u0000\u0000\u0000\u014a#\u0001\u0000\u0000\u0000\u014b\u014c\u0003&\u0013"+ - "\u0000\u014c\u014d\u0005\u001d\u0000\u0000\u014d\u014f\u0001\u0000\u0000"+ - "\u0000\u014e\u014b\u0001\u0000\u0000\u0000\u014e\u014f\u0001\u0000\u0000"+ - "\u0000\u014f\u0150\u0001\u0000\u0000\u0000\u0150\u0151\u0003(\u0014\u0000"+ - "\u0151%\u0001\u0000\u0000\u0000\u0152\u0153\u0005Q\u0000\u0000\u0153\'"+ - "\u0001\u0000\u0000\u0000\u0154\u0155\u0007\u0002\u0000\u0000\u0155)\u0001"+ - "\u0000\u0000\u0000\u0156\u0159\u0003,\u0016\u0000\u0157\u0159\u0003.\u0017"+ - "\u0000\u0158\u0156\u0001\u0000\u0000\u0000\u0158\u0157\u0001\u0000\u0000"+ - "\u0000\u0159+\u0001\u0000\u0000\u0000\u015a\u015b\u0005P\u0000\u0000\u015b"+ - "\u0160\u0005Q\u0000\u0000\u015c\u015d\u0005\'\u0000\u0000\u015d\u015f"+ - "\u0005Q\u0000\u0000\u015e\u015c\u0001\u0000\u0000\u0000\u015f\u0162\u0001"+ - "\u0000\u0000\u0000\u0160\u015e\u0001\u0000\u0000\u0000\u0160\u0161\u0001"+ - "\u0000\u0000\u0000\u0161-\u0001\u0000\u0000\u0000\u0162\u0160\u0001\u0000"+ - "\u0000\u0000\u0163\u0164\u0005F\u0000\u0000\u0164\u0165\u0003,\u0016\u0000"+ - "\u0165\u0166\u0005G\u0000\u0000\u0166/\u0001\u0000\u0000\u0000\u0167\u0168"+ - "\u0005\u0013\u0000\u0000\u0168\u016d\u0003$\u0012\u0000\u0169\u016a\u0005"+ - "\'\u0000\u0000\u016a\u016c\u0003$\u0012\u0000\u016b\u0169\u0001\u0000"+ - "\u0000\u0000\u016c\u016f\u0001\u0000\u0000\u0000\u016d\u016b\u0001\u0000"+ - "\u0000\u0000\u016d\u016e\u0001\u0000\u0000\u0000\u016e\u0171\u0001\u0000"+ - "\u0000\u0000\u016f\u016d\u0001\u0000\u0000\u0000\u0170\u0172\u00036\u001b"+ - "\u0000\u0171\u0170\u0001\u0000\u0000\u0000\u0171\u0172\u0001\u0000\u0000"+ - "\u0000\u0172\u0175\u0001\u0000\u0000\u0000\u0173\u0174\u0005\"\u0000\u0000"+ - "\u0174\u0176\u0003\u001e\u000f\u0000\u0175\u0173\u0001\u0000\u0000\u0000"+ - "\u0175\u0176\u0001\u0000\u0000\u0000\u01761\u0001\u0000\u0000\u0000\u0177"+ - "\u0178\u0005\u0004\u0000\u0000\u0178\u0179\u0003\u001e\u000f\u0000\u0179"+ - "3\u0001\u0000\u0000\u0000\u017a\u017c\u0005\u000f\u0000\u0000\u017b\u017d"+ - "\u00036\u001b\u0000\u017c\u017b\u0001\u0000\u0000\u0000\u017c\u017d\u0001"+ - "\u0000\u0000\u0000\u017d\u0180\u0001\u0000\u0000\u0000\u017e\u017f\u0005"+ - "\"\u0000\u0000\u017f\u0181\u0003\u001e\u000f\u0000\u0180\u017e\u0001\u0000"+ - "\u0000\u0000\u0180\u0181\u0001\u0000\u0000\u0000\u01815\u0001\u0000\u0000"+ - "\u0000\u0182\u0187\u00038\u001c\u0000\u0183\u0184\u0005\'\u0000\u0000"+ - "\u0184\u0186\u00038\u001c\u0000\u0185\u0183\u0001\u0000\u0000\u0000\u0186"+ - "\u0189\u0001\u0000\u0000\u0000\u0187\u0185\u0001\u0000\u0000\u0000\u0187"+ - "\u0188\u0001\u0000\u0000\u0000\u01887\u0001\u0000\u0000\u0000\u0189\u0187"+ - "\u0001\u0000\u0000\u0000\u018a\u018d\u0003 \u0010\u0000\u018b\u018c\u0005"+ - "\u0010\u0000\u0000\u018c\u018e\u0003\n\u0005\u0000\u018d\u018b\u0001\u0000"+ - "\u0000\u0000\u018d\u018e\u0001\u0000\u0000\u0000\u018e9\u0001\u0000\u0000"+ - "\u0000\u018f\u0194\u0003H$\u0000\u0190\u0191\u0005)\u0000\u0000\u0191"+ - "\u0193\u0003H$\u0000\u0192\u0190\u0001\u0000\u0000\u0000\u0193\u0196\u0001"+ - "\u0000\u0000\u0000\u0194\u0192\u0001\u0000\u0000\u0000\u0194\u0195\u0001"+ - "\u0000\u0000\u0000\u0195;\u0001\u0000\u0000\u0000\u0196\u0194\u0001\u0000"+ - "\u0000\u0000\u0197\u019c\u0003B!\u0000\u0198\u0199\u0005)\u0000\u0000"+ - "\u0199\u019b\u0003B!\u0000\u019a\u0198\u0001\u0000\u0000\u0000\u019b\u019e"+ - "\u0001\u0000\u0000\u0000\u019c\u019a\u0001\u0000\u0000\u0000\u019c\u019d"+ - "\u0001\u0000\u0000\u0000\u019d=\u0001\u0000\u0000\u0000\u019e\u019c\u0001"+ - "\u0000\u0000\u0000\u019f\u01a4\u0003<\u001e\u0000\u01a0\u01a1\u0005\'"+ - "\u0000\u0000\u01a1\u01a3\u0003<\u001e\u0000\u01a2\u01a0\u0001\u0000\u0000"+ - "\u0000\u01a3\u01a6\u0001\u0000\u0000\u0000\u01a4\u01a2\u0001\u0000\u0000"+ - "\u0000\u01a4\u01a5\u0001\u0000\u0000\u0000\u01a5?\u0001\u0000\u0000\u0000"+ - "\u01a6\u01a4\u0001\u0000\u0000\u0000\u01a7\u01a8\u0007\u0003\u0000\u0000"+ - "\u01a8A\u0001\u0000\u0000\u0000\u01a9\u01ad\u0005U\u0000\u0000\u01aa\u01ab"+ - "\u0004!\u000b\u0000\u01ab\u01ad\u0003F#\u0000\u01ac\u01a9\u0001\u0000"+ - "\u0000\u0000\u01ac\u01aa\u0001\u0000\u0000\u0000\u01adC\u0001\u0000\u0000"+ - "\u0000\u01ae\u01d9\u00052\u0000\u0000\u01af\u01b0\u0003h4\u0000\u01b0"+ - "\u01b1\u0005H\u0000\u0000\u01b1\u01d9\u0001\u0000\u0000\u0000\u01b2\u01d9"+ - "\u0003f3\u0000\u01b3\u01d9\u0003h4\u0000\u01b4\u01d9\u0003b1\u0000\u01b5"+ - "\u01d9\u0003F#\u0000\u01b6\u01d9\u0003j5\u0000\u01b7\u01b8\u0005F\u0000"+ - "\u0000\u01b8\u01bd\u0003d2\u0000\u01b9\u01ba\u0005\'\u0000\u0000\u01ba"+ - "\u01bc\u0003d2\u0000\u01bb\u01b9\u0001\u0000\u0000\u0000\u01bc\u01bf\u0001"+ - "\u0000\u0000\u0000\u01bd\u01bb\u0001\u0000\u0000\u0000\u01bd\u01be\u0001"+ - "\u0000\u0000\u0000\u01be\u01c0\u0001\u0000\u0000\u0000\u01bf\u01bd\u0001"+ - "\u0000\u0000\u0000\u01c0\u01c1\u0005G\u0000\u0000\u01c1\u01d9\u0001\u0000"+ - "\u0000\u0000\u01c2\u01c3\u0005F\u0000\u0000\u01c3\u01c8\u0003b1\u0000"+ - "\u01c4\u01c5\u0005\'\u0000\u0000\u01c5\u01c7\u0003b1\u0000\u01c6\u01c4"+ - "\u0001\u0000\u0000\u0000\u01c7\u01ca\u0001\u0000\u0000\u0000\u01c8\u01c6"+ - "\u0001\u0000\u0000\u0000\u01c8\u01c9\u0001\u0000\u0000\u0000\u01c9\u01cb"+ - "\u0001\u0000\u0000\u0000\u01ca\u01c8\u0001\u0000\u0000\u0000\u01cb\u01cc"+ - "\u0005G\u0000\u0000\u01cc\u01d9\u0001\u0000\u0000\u0000\u01cd\u01ce\u0005"+ - "F\u0000\u0000\u01ce\u01d3\u0003j5\u0000\u01cf\u01d0\u0005\'\u0000\u0000"+ - "\u01d0\u01d2\u0003j5\u0000\u01d1\u01cf\u0001\u0000\u0000\u0000\u01d2\u01d5"+ - "\u0001\u0000\u0000\u0000\u01d3\u01d1\u0001\u0000\u0000\u0000\u01d3\u01d4"+ - "\u0001\u0000\u0000\u0000\u01d4\u01d6\u0001\u0000\u0000\u0000\u01d5\u01d3"+ - "\u0001\u0000\u0000\u0000\u01d6\u01d7\u0005G\u0000\u0000\u01d7\u01d9\u0001"+ - "\u0000\u0000\u0000\u01d8\u01ae\u0001\u0000\u0000\u0000\u01d8\u01af\u0001"+ - "\u0000\u0000\u0000\u01d8\u01b2\u0001\u0000\u0000\u0000\u01d8\u01b3\u0001"+ - "\u0000\u0000\u0000\u01d8\u01b4\u0001\u0000\u0000\u0000\u01d8\u01b5\u0001"+ - "\u0000\u0000\u0000\u01d8\u01b6\u0001\u0000\u0000\u0000\u01d8\u01b7\u0001"+ - "\u0000\u0000\u0000\u01d8\u01c2\u0001\u0000\u0000\u0000\u01d8\u01cd\u0001"+ - "\u0000\u0000\u0000\u01d9E\u0001\u0000\u0000\u0000\u01da\u01dd\u00055\u0000"+ - "\u0000\u01db\u01dd\u0005E\u0000\u0000\u01dc\u01da\u0001\u0000\u0000\u0000"+ - "\u01dc\u01db\u0001\u0000\u0000\u0000\u01ddG\u0001\u0000\u0000\u0000\u01de"+ - "\u01e2\u0003@ \u0000\u01df\u01e0\u0004$\f\u0000\u01e0\u01e2\u0003F#\u0000"+ - "\u01e1\u01de\u0001\u0000\u0000\u0000\u01e1\u01df\u0001\u0000\u0000\u0000"+ - "\u01e2I\u0001\u0000\u0000\u0000\u01e3\u01e4\u0005\t\u0000\u0000\u01e4"+ - "\u01e5\u0005 \u0000\u0000\u01e5K\u0001\u0000\u0000\u0000\u01e6\u01e7\u0005"+ - "\u000e\u0000\u0000\u01e7\u01ec\u0003N\'\u0000\u01e8\u01e9\u0005\'\u0000"+ - "\u0000\u01e9\u01eb\u0003N\'\u0000\u01ea\u01e8\u0001\u0000\u0000\u0000"+ - "\u01eb\u01ee\u0001\u0000\u0000\u0000\u01ec\u01ea\u0001\u0000\u0000\u0000"+ - "\u01ec\u01ed\u0001\u0000\u0000\u0000\u01edM\u0001\u0000\u0000\u0000\u01ee"+ - "\u01ec\u0001\u0000\u0000\u0000\u01ef\u01f1\u0003\n\u0005\u0000\u01f0\u01f2"+ - "\u0007\u0004\u0000\u0000\u01f1\u01f0\u0001\u0000\u0000\u0000\u01f1\u01f2"+ - "\u0001\u0000\u0000\u0000\u01f2\u01f5\u0001\u0000\u0000\u0000\u01f3\u01f4"+ - "\u00053\u0000\u0000\u01f4\u01f6\u0007\u0005\u0000\u0000\u01f5\u01f3\u0001"+ - "\u0000\u0000\u0000\u01f5\u01f6\u0001\u0000\u0000\u0000\u01f6O\u0001\u0000"+ - "\u0000\u0000\u01f7\u01f8\u0005\b\u0000\u0000\u01f8\u01f9\u0003>\u001f"+ - "\u0000\u01f9Q\u0001\u0000\u0000\u0000\u01fa\u01fb\u0005\u0002\u0000\u0000"+ - "\u01fb\u01fc\u0003>\u001f\u0000\u01fcS\u0001\u0000\u0000\u0000\u01fd\u01fe"+ - "\u0005\u000b\u0000\u0000\u01fe\u0203\u0003V+\u0000\u01ff\u0200\u0005\'"+ - "\u0000\u0000\u0200\u0202\u0003V+\u0000\u0201\u01ff\u0001\u0000\u0000\u0000"+ - "\u0202\u0205\u0001\u0000\u0000\u0000\u0203\u0201\u0001\u0000\u0000\u0000"+ - "\u0203\u0204\u0001\u0000\u0000\u0000\u0204U\u0001\u0000\u0000\u0000\u0205"+ - "\u0203\u0001\u0000\u0000\u0000\u0206\u0207\u0003<\u001e\u0000\u0207\u0208"+ - "\u0005Y\u0000\u0000\u0208\u0209\u0003<\u001e\u0000\u0209W\u0001\u0000"+ - "\u0000\u0000\u020a\u020b\u0005\u0001\u0000\u0000\u020b\u020c\u0003\u0014"+ - "\n\u0000\u020c\u020e\u0003j5\u0000\u020d\u020f\u0003^/\u0000\u020e\u020d"+ - "\u0001\u0000\u0000\u0000\u020e\u020f\u0001\u0000\u0000\u0000\u020fY\u0001"+ - "\u0000\u0000\u0000\u0210\u0211\u0005\u0007\u0000\u0000\u0211\u0212\u0003"+ - "\u0014\n\u0000\u0212\u0213\u0003j5\u0000\u0213[\u0001\u0000\u0000\u0000"+ - "\u0214\u0215\u0005\n\u0000\u0000\u0215\u0216\u0003:\u001d\u0000\u0216"+ - "]\u0001\u0000\u0000\u0000\u0217\u021c\u0003`0\u0000\u0218\u0219\u0005"+ - "\'\u0000\u0000\u0219\u021b\u0003`0\u0000\u021a\u0218\u0001\u0000\u0000"+ - "\u0000\u021b\u021e\u0001\u0000\u0000\u0000\u021c\u021a\u0001\u0000\u0000"+ - "\u0000\u021c\u021d\u0001\u0000\u0000\u0000\u021d_\u0001\u0000\u0000\u0000"+ - "\u021e\u021c\u0001\u0000\u0000\u0000\u021f\u0220\u0003@ \u0000\u0220\u0221"+ - "\u0005%\u0000\u0000\u0221\u0222\u0003D\"\u0000\u0222a\u0001\u0000\u0000"+ - "\u0000\u0223\u0224\u0007\u0006\u0000\u0000\u0224c\u0001\u0000\u0000\u0000"+ - "\u0225\u0228\u0003f3\u0000\u0226\u0228\u0003h4\u0000\u0227\u0225\u0001"+ - "\u0000\u0000\u0000\u0227\u0226\u0001\u0000\u0000\u0000\u0228e\u0001\u0000"+ - "\u0000\u0000\u0229\u022b\u0007\u0000\u0000\u0000\u022a\u0229\u0001\u0000"+ - "\u0000\u0000\u022a\u022b\u0001\u0000\u0000\u0000\u022b\u022c\u0001\u0000"+ - "\u0000\u0000\u022c\u022d\u0005!\u0000\u0000\u022dg\u0001\u0000\u0000\u0000"+ - "\u022e\u0230\u0007\u0000\u0000\u0000\u022f\u022e\u0001\u0000\u0000\u0000"+ - "\u022f\u0230\u0001\u0000\u0000\u0000\u0230\u0231\u0001\u0000\u0000\u0000"+ - "\u0231\u0232\u0005 \u0000\u0000\u0232i\u0001\u0000\u0000\u0000\u0233\u0234"+ - "\u0005\u001f\u0000\u0000\u0234k\u0001\u0000\u0000\u0000\u0235\u0236\u0007"+ - "\u0007\u0000\u0000\u0236m\u0001\u0000\u0000\u0000\u0237\u0238\u0005\u0005"+ - "\u0000\u0000\u0238\u0239\u0003p8\u0000\u0239o\u0001\u0000\u0000\u0000"+ - "\u023a\u023b\u0005F\u0000\u0000\u023b\u023c\u0003\u0002\u0001\u0000\u023c"+ - "\u023d\u0005G\u0000\u0000\u023dq\u0001\u0000\u0000\u0000\u023e\u023f\u0005"+ - "\r\u0000\u0000\u023f\u0240\u0005i\u0000\u0000\u0240s\u0001\u0000\u0000"+ - "\u0000\u0241\u0242\u0005\u0003\u0000\u0000\u0242\u0245\u0005_\u0000\u0000"+ - "\u0243\u0244\u0005]\u0000\u0000\u0244\u0246\u0003<\u001e\u0000\u0245\u0243"+ - "\u0001\u0000\u0000\u0000\u0245\u0246\u0001\u0000\u0000\u0000\u0246\u0250"+ - "\u0001\u0000\u0000\u0000\u0247\u0248\u0005^\u0000\u0000\u0248\u024d\u0003"+ - "v;\u0000\u0249\u024a\u0005\'\u0000\u0000\u024a\u024c\u0003v;\u0000\u024b"+ - "\u0249\u0001\u0000\u0000\u0000\u024c\u024f\u0001\u0000\u0000\u0000\u024d"+ - "\u024b\u0001\u0000\u0000\u0000\u024d\u024e\u0001\u0000\u0000\u0000\u024e"+ - "\u0251\u0001\u0000\u0000\u0000\u024f\u024d\u0001\u0000\u0000\u0000\u0250"+ - "\u0247\u0001\u0000\u0000\u0000\u0250\u0251\u0001\u0000\u0000\u0000\u0251"+ - "u\u0001\u0000\u0000\u0000\u0252\u0253\u0003<\u001e\u0000\u0253\u0254\u0005"+ - "%\u0000\u0000\u0254\u0256\u0001\u0000\u0000\u0000\u0255\u0252\u0001\u0000"+ - "\u0000\u0000\u0255\u0256\u0001\u0000\u0000\u0000\u0256\u0257\u0001\u0000"+ - "\u0000\u0000\u0257\u0258\u0003<\u001e\u0000\u0258w\u0001\u0000\u0000\u0000"+ - "\u0259\u025a\u0005\u0012\u0000\u0000\u025a\u025b\u0003$\u0012\u0000\u025b"+ - "\u025c\u0005]\u0000\u0000\u025c\u025d\u0003>\u001f\u0000\u025dy\u0001"+ - "\u0000\u0000\u0000\u025e\u025f\u0005\u0011\u0000\u0000\u025f\u0262\u0003"+ - "6\u001b\u0000\u0260\u0261\u0005\"\u0000\u0000\u0261\u0263\u0003\u001e"+ - "\u000f\u0000\u0262\u0260\u0001\u0000\u0000\u0000\u0262\u0263\u0001\u0000"+ - "\u0000\u0000\u0263{\u0001\u0000\u0000\u0000\u0264\u0266\u0007\b\u0000"+ - "\u0000\u0265\u0264\u0001\u0000\u0000\u0000\u0265\u0266\u0001\u0000\u0000"+ - "\u0000\u0266\u0267\u0001\u0000\u0000\u0000\u0267\u0268\u0005\u0014\u0000"+ - "\u0000\u0268\u0269\u0003~?\u0000\u0269\u026a\u0003\u0080@\u0000\u026a"+ - "}\u0001\u0000\u0000\u0000\u026b\u026e\u0003@ \u0000\u026c\u026d\u0005"+ - "Y\u0000\u0000\u026d\u026f\u0003@ \u0000\u026e\u026c\u0001\u0000\u0000"+ - "\u0000\u026e\u026f\u0001\u0000\u0000\u0000\u026f\u007f\u0001\u0000\u0000"+ - "\u0000\u0270\u0271\u0005]\u0000\u0000\u0271\u0276\u0003\u0082A\u0000\u0272"+ - "\u0273\u0005\'\u0000\u0000\u0273\u0275\u0003\u0082A\u0000\u0274\u0272"+ - "\u0001\u0000\u0000\u0000\u0275\u0278\u0001\u0000\u0000\u0000\u0276\u0274"+ - "\u0001\u0000\u0000\u0000\u0276\u0277\u0001\u0000\u0000\u0000\u0277\u0081"+ - "\u0001\u0000\u0000\u0000\u0278\u0276\u0001\u0000\u0000\u0000\u0279\u027a"+ - "\u0003\u0010\b\u0000\u027a\u0083\u0001\u0000\u0000\u0000=\u008f\u0098"+ - "\u00ac\u00b8\u00c1\u00c9\u00cf\u00d7\u00d9\u00de\u00e5\u00ea\u00f5\u00fb"+ - "\u0103\u0105\u0110\u0117\u0122\u0125\u0135\u013b\u0145\u0149\u014e\u0158"+ - "\u0160\u016d\u0171\u0175\u017c\u0180\u0187\u018d\u0194\u019c\u01a4\u01ac"+ - "\u01bd\u01c8\u01d3\u01d8\u01dc\u01e1\u01ec\u01f1\u01f5\u0203\u020e\u021c"+ - "\u0227\u022a\u022f\u0245\u024d\u0250\u0255\u0262\u0265\u026e\u0276"; + "\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00cf\b\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00d7"+ + "\b\u0005\n\u0005\f\u0005\u00da\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006"+ + "\u00de\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ + "\u0003\u0006\u00e5\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006"+ + "\u00ea\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b"+ + "\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00f5\b\b\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0003\t\u00fb\b\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0005\t\u0103\b\t\n\t\f\t\u0106\t\t\u0001\n\u0001\n\u0001\n\u0001\n"+ + "\u0001\n\u0001\n\u0001\n\u0001\n\u0003\n\u0110\b\n\u0001\n\u0001\n\u0001"+ + "\n\u0005\n\u0115\b\n\n\n\f\n\u0118\t\n\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\u000b\u0005\u000b\u0120\b\u000b\n\u000b"+ + "\f\u000b\u0123\t\u000b\u0003\u000b\u0125\b\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\f\u0001\f\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000f\u0001\u000f\u0001\u000f\u0005\u000f\u0133\b\u000f\n\u000f\f\u000f"+ + "\u0136\t\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0003\u0010\u013b\b"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0005\u0011\u0143\b\u0011\n\u0011\f\u0011\u0146\t\u0011\u0001\u0011"+ + "\u0003\u0011\u0149\b\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012"+ + "\u014e\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0014"+ + "\u0001\u0014\u0001\u0015\u0001\u0015\u0003\u0015\u0158\b\u0015\u0001\u0016"+ + "\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u015e\b\u0016\n\u0016"+ + "\f\u0016\u0161\t\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017"+ + "\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u016b\b\u0018"+ + "\n\u0018\f\u0018\u016e\t\u0018\u0001\u0018\u0003\u0018\u0171\b\u0018\u0001"+ + "\u0018\u0001\u0018\u0003\u0018\u0175\b\u0018\u0001\u0019\u0001\u0019\u0001"+ + "\u0019\u0001\u001a\u0001\u001a\u0003\u001a\u017c\b\u001a\u0001\u001a\u0001"+ + "\u001a\u0003\u001a\u0180\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005"+ + "\u001b\u0185\b\u001b\n\u001b\f\u001b\u0188\t\u001b\u0001\u001c\u0001\u001c"+ + "\u0001\u001c\u0003\u001c\u018d\b\u001c\u0001\u001d\u0001\u001d\u0001\u001d"+ + "\u0005\u001d\u0192\b\u001d\n\u001d\f\u001d\u0195\t\u001d\u0001\u001e\u0001"+ + "\u001e\u0001\u001e\u0005\u001e\u019a\b\u001e\n\u001e\f\u001e\u019d\t\u001e"+ + "\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01a2\b\u001f\n\u001f"+ + "\f\u001f\u01a5\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001!\u0003!\u01ac"+ + "\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ + "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01bb\b\"\n\"\f\"\u01be\t\""+ + "\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01c6\b\"\n\""+ + "\f\"\u01c9\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\""+ + "\u01d1\b\"\n\"\f\"\u01d4\t\"\u0001\"\u0001\"\u0003\"\u01d8\b\"\u0001#"+ + "\u0001#\u0003#\u01dc\b#\u0001$\u0001$\u0001$\u0003$\u01e1\b$\u0001%\u0001"+ + "%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01ea\b&\n&\f&\u01ed\t&\u0001"+ + "\'\u0001\'\u0003\'\u01f1\b\'\u0001\'\u0001\'\u0003\'\u01f5\b\'\u0001("+ + "\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001*\u0005"+ + "*\u0201\b*\n*\f*\u0204\t*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ + ",\u0001,\u0003,\u020e\b,\u0001-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001"+ + ".\u0001/\u0001/\u0001/\u0005/\u021a\b/\n/\f/\u021d\t/\u00010\u00010\u0001"+ + "0\u00010\u00011\u00011\u00012\u00012\u00032\u0227\b2\u00013\u00033\u022a"+ + "\b3\u00013\u00013\u00014\u00034\u022f\b4\u00014\u00014\u00015\u00015\u0001"+ + "6\u00016\u00017\u00017\u00017\u00018\u00018\u00018\u00018\u00019\u0001"+ + "9\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u0245\b:\u0001:\u0001:\u0001"+ + ":\u0001:\u0005:\u024b\b:\n:\f:\u024e\t:\u0003:\u0250\b:\u0001;\u0001;"+ + "\u0001;\u0003;\u0255\b;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001"+ + "<\u0001=\u0001=\u0001=\u0001=\u0003=\u0262\b=\u0001>\u0003>\u0265\b>\u0001"+ + ">\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0003?\u026e\b?\u0001@\u0001"+ + "@\u0001@\u0001@\u0005@\u0274\b@\n@\f@\u0277\t@\u0001A\u0001A\u0001A\u0000"+ + "\u0004\u0002\n\u0012\u0014B\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ + "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ + "TVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0000\t\u0001\u0000@A\u0001\u0000"+ + "BD\u0002\u0000\u001e\u001eQQ\u0001\u0000HI\u0002\u0000##((\u0002\u0000"+ + "++..\u0002\u0000**88\u0002\u000099;?\u0001\u0000\u0016\u0018\u0294\u0000"+ + "\u0084\u0001\u0000\u0000\u0000\u0002\u0087\u0001\u0000\u0000\u0000\u0004"+ + "\u0098\u0001\u0000\u0000\u0000\u0006\u00ac\u0001\u0000\u0000\u0000\b\u00ae"+ + "\u0001\u0000\u0000\u0000\n\u00ce\u0001\u0000\u0000\u0000\f\u00e9\u0001"+ + "\u0000\u0000\u0000\u000e\u00eb\u0001\u0000\u0000\u0000\u0010\u00f4\u0001"+ + "\u0000\u0000\u0000\u0012\u00fa\u0001\u0000\u0000\u0000\u0014\u010f\u0001"+ + "\u0000\u0000\u0000\u0016\u0119\u0001\u0000\u0000\u0000\u0018\u0128\u0001"+ + "\u0000\u0000\u0000\u001a\u012a\u0001\u0000\u0000\u0000\u001c\u012c\u0001"+ + "\u0000\u0000\u0000\u001e\u012f\u0001\u0000\u0000\u0000 \u013a\u0001\u0000"+ + "\u0000\u0000\"\u013e\u0001\u0000\u0000\u0000$\u014d\u0001\u0000\u0000"+ + "\u0000&\u0151\u0001\u0000\u0000\u0000(\u0153\u0001\u0000\u0000\u0000*"+ + "\u0157\u0001\u0000\u0000\u0000,\u0159\u0001\u0000\u0000\u0000.\u0162\u0001"+ + "\u0000\u0000\u00000\u0166\u0001\u0000\u0000\u00002\u0176\u0001\u0000\u0000"+ + "\u00004\u0179\u0001\u0000\u0000\u00006\u0181\u0001\u0000\u0000\u00008"+ + "\u0189\u0001\u0000\u0000\u0000:\u018e\u0001\u0000\u0000\u0000<\u0196\u0001"+ + "\u0000\u0000\u0000>\u019e\u0001\u0000\u0000\u0000@\u01a6\u0001\u0000\u0000"+ + "\u0000B\u01ab\u0001\u0000\u0000\u0000D\u01d7\u0001\u0000\u0000\u0000F"+ + "\u01db\u0001\u0000\u0000\u0000H\u01e0\u0001\u0000\u0000\u0000J\u01e2\u0001"+ + "\u0000\u0000\u0000L\u01e5\u0001\u0000\u0000\u0000N\u01ee\u0001\u0000\u0000"+ + "\u0000P\u01f6\u0001\u0000\u0000\u0000R\u01f9\u0001\u0000\u0000\u0000T"+ + "\u01fc\u0001\u0000\u0000\u0000V\u0205\u0001\u0000\u0000\u0000X\u0209\u0001"+ + "\u0000\u0000\u0000Z\u020f\u0001\u0000\u0000\u0000\\\u0213\u0001\u0000"+ + "\u0000\u0000^\u0216\u0001\u0000\u0000\u0000`\u021e\u0001\u0000\u0000\u0000"+ + "b\u0222\u0001\u0000\u0000\u0000d\u0226\u0001\u0000\u0000\u0000f\u0229"+ + "\u0001\u0000\u0000\u0000h\u022e\u0001\u0000\u0000\u0000j\u0232\u0001\u0000"+ + "\u0000\u0000l\u0234\u0001\u0000\u0000\u0000n\u0236\u0001\u0000\u0000\u0000"+ + "p\u0239\u0001\u0000\u0000\u0000r\u023d\u0001\u0000\u0000\u0000t\u0240"+ + "\u0001\u0000\u0000\u0000v\u0254\u0001\u0000\u0000\u0000x\u0258\u0001\u0000"+ + "\u0000\u0000z\u025d\u0001\u0000\u0000\u0000|\u0264\u0001\u0000\u0000\u0000"+ + "~\u026a\u0001\u0000\u0000\u0000\u0080\u026f\u0001\u0000\u0000\u0000\u0082"+ + "\u0278\u0001\u0000\u0000\u0000\u0084\u0085\u0003\u0002\u0001\u0000\u0085"+ + "\u0086\u0005\u0000\u0000\u0001\u0086\u0001\u0001\u0000\u0000\u0000\u0087"+ + "\u0088\u0006\u0001\uffff\uffff\u0000\u0088\u0089\u0003\u0004\u0002\u0000"+ + "\u0089\u008f\u0001\u0000\u0000\u0000\u008a\u008b\n\u0001\u0000\u0000\u008b"+ + "\u008c\u0005\u001d\u0000\u0000\u008c\u008e\u0003\u0006\u0003\u0000\u008d"+ + "\u008a\u0001\u0000\u0000\u0000\u008e\u0091\u0001\u0000\u0000\u0000\u008f"+ + "\u008d\u0001\u0000\u0000\u0000\u008f\u0090\u0001\u0000\u0000\u0000\u0090"+ + "\u0003\u0001\u0000\u0000\u0000\u0091\u008f\u0001\u0000\u0000\u0000\u0092"+ + "\u0099\u0003n7\u0000\u0093\u0099\u0003\"\u0011\u0000\u0094\u0099\u0003"+ + "\u001c\u000e\u0000\u0095\u0099\u0003r9\u0000\u0096\u0097\u0004\u0002\u0001"+ + "\u0000\u0097\u0099\u00030\u0018\u0000\u0098\u0092\u0001\u0000\u0000\u0000"+ + "\u0098\u0093\u0001\u0000\u0000\u0000\u0098\u0094\u0001\u0000\u0000\u0000"+ + "\u0098\u0095\u0001\u0000\u0000\u0000\u0098\u0096\u0001\u0000\u0000\u0000"+ + "\u0099\u0005\u0001\u0000\u0000\u0000\u009a\u00ad\u00032\u0019\u0000\u009b"+ + "\u00ad\u0003\b\u0004\u0000\u009c\u00ad\u0003P(\u0000\u009d\u00ad\u0003"+ + "J%\u0000\u009e\u00ad\u00034\u001a\u0000\u009f\u00ad\u0003L&\u0000\u00a0"+ + "\u00ad\u0003R)\u0000\u00a1\u00ad\u0003T*\u0000\u00a2\u00ad\u0003X,\u0000"+ + "\u00a3\u00ad\u0003Z-\u0000\u00a4\u00ad\u0003t:\u0000\u00a5\u00ad\u0003"+ + "\\.\u0000\u00a6\u00a7\u0004\u0003\u0002\u0000\u00a7\u00ad\u0003z=\u0000"+ + "\u00a8\u00a9\u0004\u0003\u0003\u0000\u00a9\u00ad\u0003x<\u0000\u00aa\u00ab"+ + "\u0004\u0003\u0004\u0000\u00ab\u00ad\u0003|>\u0000\u00ac\u009a\u0001\u0000"+ + "\u0000\u0000\u00ac\u009b\u0001\u0000\u0000\u0000\u00ac\u009c\u0001\u0000"+ + "\u0000\u0000\u00ac\u009d\u0001\u0000\u0000\u0000\u00ac\u009e\u0001\u0000"+ + "\u0000\u0000\u00ac\u009f\u0001\u0000\u0000\u0000\u00ac\u00a0\u0001\u0000"+ + "\u0000\u0000\u00ac\u00a1\u0001\u0000\u0000\u0000\u00ac\u00a2\u0001\u0000"+ + "\u0000\u0000\u00ac\u00a3\u0001\u0000\u0000\u0000\u00ac\u00a4\u0001\u0000"+ + "\u0000\u0000\u00ac\u00a5\u0001\u0000\u0000\u0000\u00ac\u00a6\u0001\u0000"+ + "\u0000\u0000\u00ac\u00a8\u0001\u0000\u0000\u0000\u00ac\u00aa\u0001\u0000"+ + "\u0000\u0000\u00ad\u0007\u0001\u0000\u0000\u0000\u00ae\u00af\u0005\u0010"+ + "\u0000\u0000\u00af\u00b0\u0003\n\u0005\u0000\u00b0\t\u0001\u0000\u0000"+ + "\u0000\u00b1\u00b2\u0006\u0005\uffff\uffff\u0000\u00b2\u00b3\u00051\u0000"+ + "\u0000\u00b3\u00cf\u0003\n\u0005\b\u00b4\u00cf\u0003\u0010\b\u0000\u00b5"+ + "\u00cf\u0003\f\u0006\u0000\u00b6\u00b8\u0003\u0010\b\u0000\u00b7\u00b9"+ + "\u00051\u0000\u0000\u00b8\u00b7\u0001\u0000\u0000\u0000\u00b8\u00b9\u0001"+ + "\u0000\u0000\u0000\u00b9\u00ba\u0001\u0000\u0000\u0000\u00ba\u00bb\u0005"+ + ",\u0000\u0000\u00bb\u00bc\u00050\u0000\u0000\u00bc\u00c1\u0003\u0010\b"+ + "\u0000\u00bd\u00be\u0005\'\u0000\u0000\u00be\u00c0\u0003\u0010\b\u0000"+ + "\u00bf\u00bd\u0001\u0000\u0000\u0000\u00c0\u00c3\u0001\u0000\u0000\u0000"+ + "\u00c1\u00bf\u0001\u0000\u0000\u0000\u00c1\u00c2\u0001\u0000\u0000\u0000"+ + "\u00c2\u00c4\u0001\u0000\u0000\u0000\u00c3\u00c1\u0001\u0000\u0000\u0000"+ + "\u00c4\u00c5\u00057\u0000\u0000\u00c5\u00cf\u0001\u0000\u0000\u0000\u00c6"+ + "\u00c7\u0003\u0010\b\u0000\u00c7\u00c9\u0005-\u0000\u0000\u00c8\u00ca"+ + "\u00051\u0000\u0000\u00c9\u00c8\u0001\u0000\u0000\u0000\u00c9\u00ca\u0001"+ + "\u0000\u0000\u0000\u00ca\u00cb\u0001\u0000\u0000\u0000\u00cb\u00cc\u0005"+ + "2\u0000\u0000\u00cc\u00cf\u0001\u0000\u0000\u0000\u00cd\u00cf\u0003\u000e"+ + "\u0007\u0000\u00ce\u00b1\u0001\u0000\u0000\u0000\u00ce\u00b4\u0001\u0000"+ + "\u0000\u0000\u00ce\u00b5\u0001\u0000\u0000\u0000\u00ce\u00b6\u0001\u0000"+ + "\u0000\u0000\u00ce\u00c6\u0001\u0000\u0000\u0000\u00ce\u00cd\u0001\u0000"+ + "\u0000\u0000\u00cf\u00d8\u0001\u0000\u0000\u0000\u00d0\u00d1\n\u0005\u0000"+ + "\u0000\u00d1\u00d2\u0005\"\u0000\u0000\u00d2\u00d7\u0003\n\u0005\u0006"+ + "\u00d3\u00d4\n\u0004\u0000\u0000\u00d4\u00d5\u00054\u0000\u0000\u00d5"+ + "\u00d7\u0003\n\u0005\u0005\u00d6\u00d0\u0001\u0000\u0000\u0000\u00d6\u00d3"+ + "\u0001\u0000\u0000\u0000\u00d7\u00da\u0001\u0000\u0000\u0000\u00d8\u00d6"+ + "\u0001\u0000\u0000\u0000\u00d8\u00d9\u0001\u0000\u0000\u0000\u00d9\u000b"+ + "\u0001\u0000\u0000\u0000\u00da\u00d8\u0001\u0000\u0000\u0000\u00db\u00dd"+ + "\u0003\u0010\b\u0000\u00dc\u00de\u00051\u0000\u0000\u00dd\u00dc\u0001"+ + "\u0000\u0000\u0000\u00dd\u00de\u0001\u0000\u0000\u0000\u00de\u00df\u0001"+ + "\u0000\u0000\u0000\u00df\u00e0\u0005/\u0000\u0000\u00e0\u00e1\u0003j5"+ + "\u0000\u00e1\u00ea\u0001\u0000\u0000\u0000\u00e2\u00e4\u0003\u0010\b\u0000"+ + "\u00e3\u00e5\u00051\u0000\u0000\u00e4\u00e3\u0001\u0000\u0000\u0000\u00e4"+ + "\u00e5\u0001\u0000\u0000\u0000\u00e5\u00e6\u0001\u0000\u0000\u0000\u00e6"+ + "\u00e7\u00056\u0000\u0000\u00e7\u00e8\u0003j5\u0000\u00e8\u00ea\u0001"+ + "\u0000\u0000\u0000\u00e9\u00db\u0001\u0000\u0000\u0000\u00e9\u00e2\u0001"+ + "\u0000\u0000\u0000\u00ea\r\u0001\u0000\u0000\u0000\u00eb\u00ec\u0003:"+ + "\u001d\u0000\u00ec\u00ed\u0005&\u0000\u0000\u00ed\u00ee\u0003D\"\u0000"+ + "\u00ee\u000f\u0001\u0000\u0000\u0000\u00ef\u00f5\u0003\u0012\t\u0000\u00f0"+ + "\u00f1\u0003\u0012\t\u0000\u00f1\u00f2\u0003l6\u0000\u00f2\u00f3\u0003"+ + "\u0012\t\u0000\u00f3\u00f5\u0001\u0000\u0000\u0000\u00f4\u00ef\u0001\u0000"+ + "\u0000\u0000\u00f4\u00f0\u0001\u0000\u0000\u0000\u00f5\u0011\u0001\u0000"+ + "\u0000\u0000\u00f6\u00f7\u0006\t\uffff\uffff\u0000\u00f7\u00fb\u0003\u0014"+ + "\n\u0000\u00f8\u00f9\u0007\u0000\u0000\u0000\u00f9\u00fb\u0003\u0012\t"+ + "\u0003\u00fa\u00f6\u0001\u0000\u0000\u0000\u00fa\u00f8\u0001\u0000\u0000"+ + "\u0000\u00fb\u0104\u0001\u0000\u0000\u0000\u00fc\u00fd\n\u0002\u0000\u0000"+ + "\u00fd\u00fe\u0007\u0001\u0000\u0000\u00fe\u0103\u0003\u0012\t\u0003\u00ff"+ + "\u0100\n\u0001\u0000\u0000\u0100\u0101\u0007\u0000\u0000\u0000\u0101\u0103"+ + "\u0003\u0012\t\u0002\u0102\u00fc\u0001\u0000\u0000\u0000\u0102\u00ff\u0001"+ + "\u0000\u0000\u0000\u0103\u0106\u0001\u0000\u0000\u0000\u0104\u0102\u0001"+ + "\u0000\u0000\u0000\u0104\u0105\u0001\u0000\u0000\u0000\u0105\u0013\u0001"+ + "\u0000\u0000\u0000\u0106\u0104\u0001\u0000\u0000\u0000\u0107\u0108\u0006"+ + "\n\uffff\uffff\u0000\u0108\u0110\u0003D\"\u0000\u0109\u0110\u0003:\u001d"+ + "\u0000\u010a\u0110\u0003\u0016\u000b\u0000\u010b\u010c\u00050\u0000\u0000"+ + "\u010c\u010d\u0003\n\u0005\u0000\u010d\u010e\u00057\u0000\u0000\u010e"+ + "\u0110\u0001\u0000\u0000\u0000\u010f\u0107\u0001\u0000\u0000\u0000\u010f"+ + "\u0109\u0001\u0000\u0000\u0000\u010f\u010a\u0001\u0000\u0000\u0000\u010f"+ + "\u010b\u0001\u0000\u0000\u0000\u0110\u0116\u0001\u0000\u0000\u0000\u0111"+ + "\u0112\n\u0001\u0000\u0000\u0112\u0113\u0005%\u0000\u0000\u0113\u0115"+ + "\u0003\u001a\r\u0000\u0114\u0111\u0001\u0000\u0000\u0000\u0115\u0118\u0001"+ + "\u0000\u0000\u0000\u0116\u0114\u0001\u0000\u0000\u0000\u0116\u0117\u0001"+ + "\u0000\u0000\u0000\u0117\u0015\u0001\u0000\u0000\u0000\u0118\u0116\u0001"+ + "\u0000\u0000\u0000\u0119\u011a\u0003\u0018\f\u0000\u011a\u0124\u00050"+ + "\u0000\u0000\u011b\u0125\u0005B\u0000\u0000\u011c\u0121\u0003\n\u0005"+ + "\u0000\u011d\u011e\u0005\'\u0000\u0000\u011e\u0120\u0003\n\u0005\u0000"+ + "\u011f\u011d\u0001\u0000\u0000\u0000\u0120\u0123\u0001\u0000\u0000\u0000"+ + "\u0121\u011f\u0001\u0000\u0000\u0000\u0121\u0122\u0001\u0000\u0000\u0000"+ + "\u0122\u0125\u0001\u0000\u0000\u0000\u0123\u0121\u0001\u0000\u0000\u0000"+ + "\u0124\u011b\u0001\u0000\u0000\u0000\u0124\u011c\u0001\u0000\u0000\u0000"+ + "\u0124\u0125\u0001\u0000\u0000\u0000\u0125\u0126\u0001\u0000\u0000\u0000"+ + "\u0126\u0127\u00057\u0000\u0000\u0127\u0017\u0001\u0000\u0000\u0000\u0128"+ + "\u0129\u0003H$\u0000\u0129\u0019\u0001\u0000\u0000\u0000\u012a\u012b\u0003"+ + "@ \u0000\u012b\u001b\u0001\u0000\u0000\u0000\u012c\u012d\u0005\f\u0000"+ + "\u0000\u012d\u012e\u0003\u001e\u000f\u0000\u012e\u001d\u0001\u0000\u0000"+ + "\u0000\u012f\u0134\u0003 \u0010\u0000\u0130\u0131\u0005\'\u0000\u0000"+ + "\u0131\u0133\u0003 \u0010\u0000\u0132\u0130\u0001\u0000\u0000\u0000\u0133"+ + "\u0136\u0001\u0000\u0000\u0000\u0134\u0132\u0001\u0000\u0000\u0000\u0134"+ + "\u0135\u0001\u0000\u0000\u0000\u0135\u001f\u0001\u0000\u0000\u0000\u0136"+ + "\u0134\u0001\u0000\u0000\u0000\u0137\u0138\u0003:\u001d\u0000\u0138\u0139"+ + "\u0005$\u0000\u0000\u0139\u013b\u0001\u0000\u0000\u0000\u013a\u0137\u0001"+ + "\u0000\u0000\u0000\u013a\u013b\u0001\u0000\u0000\u0000\u013b\u013c\u0001"+ + "\u0000\u0000\u0000\u013c\u013d\u0003\n\u0005\u0000\u013d!\u0001\u0000"+ + "\u0000\u0000\u013e\u013f\u0005\u0006\u0000\u0000\u013f\u0144\u0003$\u0012"+ + "\u0000\u0140\u0141\u0005\'\u0000\u0000\u0141\u0143\u0003$\u0012\u0000"+ + "\u0142\u0140\u0001\u0000\u0000\u0000\u0143\u0146\u0001\u0000\u0000\u0000"+ + "\u0144\u0142\u0001\u0000\u0000\u0000\u0144\u0145\u0001\u0000\u0000\u0000"+ + "\u0145\u0148\u0001\u0000\u0000\u0000\u0146\u0144\u0001\u0000\u0000\u0000"+ + "\u0147\u0149\u0003*\u0015\u0000\u0148\u0147\u0001\u0000\u0000\u0000\u0148"+ + "\u0149\u0001\u0000\u0000\u0000\u0149#\u0001\u0000\u0000\u0000\u014a\u014b"+ + "\u0003&\u0013\u0000\u014b\u014c\u0005&\u0000\u0000\u014c\u014e\u0001\u0000"+ + "\u0000\u0000\u014d\u014a\u0001\u0000\u0000\u0000\u014d\u014e\u0001\u0000"+ + "\u0000\u0000\u014e\u014f\u0001\u0000\u0000\u0000\u014f\u0150\u0003(\u0014"+ + "\u0000\u0150%\u0001\u0000\u0000\u0000\u0151\u0152\u0005Q\u0000\u0000\u0152"+ + "\'\u0001\u0000\u0000\u0000\u0153\u0154\u0007\u0002\u0000\u0000\u0154)"+ + "\u0001\u0000\u0000\u0000\u0155\u0158\u0003,\u0016\u0000\u0156\u0158\u0003"+ + ".\u0017\u0000\u0157\u0155\u0001\u0000\u0000\u0000\u0157\u0156\u0001\u0000"+ + "\u0000\u0000\u0158+\u0001\u0000\u0000\u0000\u0159\u015a\u0005P\u0000\u0000"+ + "\u015a\u015f\u0005Q\u0000\u0000\u015b\u015c\u0005\'\u0000\u0000\u015c"+ + "\u015e\u0005Q\u0000\u0000\u015d\u015b\u0001\u0000\u0000\u0000\u015e\u0161"+ + "\u0001\u0000\u0000\u0000\u015f\u015d\u0001\u0000\u0000\u0000\u015f\u0160"+ + "\u0001\u0000\u0000\u0000\u0160-\u0001\u0000\u0000\u0000\u0161\u015f\u0001"+ + "\u0000\u0000\u0000\u0162\u0163\u0005F\u0000\u0000\u0163\u0164\u0003,\u0016"+ + "\u0000\u0164\u0165\u0005G\u0000\u0000\u0165/\u0001\u0000\u0000\u0000\u0166"+ + "\u0167\u0005\u0013\u0000\u0000\u0167\u016c\u0003$\u0012\u0000\u0168\u0169"+ + "\u0005\'\u0000\u0000\u0169\u016b\u0003$\u0012\u0000\u016a\u0168\u0001"+ + "\u0000\u0000\u0000\u016b\u016e\u0001\u0000\u0000\u0000\u016c\u016a\u0001"+ + "\u0000\u0000\u0000\u016c\u016d\u0001\u0000\u0000\u0000\u016d\u0170\u0001"+ + "\u0000\u0000\u0000\u016e\u016c\u0001\u0000\u0000\u0000\u016f\u0171\u0003"+ + "6\u001b\u0000\u0170\u016f\u0001\u0000\u0000\u0000\u0170\u0171\u0001\u0000"+ + "\u0000\u0000\u0171\u0174\u0001\u0000\u0000\u0000\u0172\u0173\u0005!\u0000"+ + "\u0000\u0173\u0175\u0003\u001e\u000f\u0000\u0174\u0172\u0001\u0000\u0000"+ + "\u0000\u0174\u0175\u0001\u0000\u0000\u0000\u01751\u0001\u0000\u0000\u0000"+ + "\u0176\u0177\u0005\u0004\u0000\u0000\u0177\u0178\u0003\u001e\u000f\u0000"+ + "\u01783\u0001\u0000\u0000\u0000\u0179\u017b\u0005\u000f\u0000\u0000\u017a"+ + "\u017c\u00036\u001b\u0000\u017b\u017a\u0001\u0000\u0000\u0000\u017b\u017c"+ + "\u0001\u0000\u0000\u0000\u017c\u017f\u0001\u0000\u0000\u0000\u017d\u017e"+ + "\u0005!\u0000\u0000\u017e\u0180\u0003\u001e\u000f\u0000\u017f\u017d\u0001"+ + "\u0000\u0000\u0000\u017f\u0180\u0001\u0000\u0000\u0000\u01805\u0001\u0000"+ + "\u0000\u0000\u0181\u0186\u00038\u001c\u0000\u0182\u0183\u0005\'\u0000"+ + "\u0000\u0183\u0185\u00038\u001c\u0000\u0184\u0182\u0001\u0000\u0000\u0000"+ + "\u0185\u0188\u0001\u0000\u0000\u0000\u0186\u0184\u0001\u0000\u0000\u0000"+ + "\u0186\u0187\u0001\u0000\u0000\u0000\u01877\u0001\u0000\u0000\u0000\u0188"+ + "\u0186\u0001\u0000\u0000\u0000\u0189\u018c\u0003 \u0010\u0000\u018a\u018b"+ + "\u0005\u0010\u0000\u0000\u018b\u018d\u0003\n\u0005\u0000\u018c\u018a\u0001"+ + "\u0000\u0000\u0000\u018c\u018d\u0001\u0000\u0000\u0000\u018d9\u0001\u0000"+ + "\u0000\u0000\u018e\u0193\u0003H$\u0000\u018f\u0190\u0005)\u0000\u0000"+ + "\u0190\u0192\u0003H$\u0000\u0191\u018f\u0001\u0000\u0000\u0000\u0192\u0195"+ + "\u0001\u0000\u0000\u0000\u0193\u0191\u0001\u0000\u0000\u0000\u0193\u0194"+ + "\u0001\u0000\u0000\u0000\u0194;\u0001\u0000\u0000\u0000\u0195\u0193\u0001"+ + "\u0000\u0000\u0000\u0196\u019b\u0003B!\u0000\u0197\u0198\u0005)\u0000"+ + "\u0000\u0198\u019a\u0003B!\u0000\u0199\u0197\u0001\u0000\u0000\u0000\u019a"+ + "\u019d\u0001\u0000\u0000\u0000\u019b\u0199\u0001\u0000\u0000\u0000\u019b"+ + "\u019c\u0001\u0000\u0000\u0000\u019c=\u0001\u0000\u0000\u0000\u019d\u019b"+ + "\u0001\u0000\u0000\u0000\u019e\u01a3\u0003<\u001e\u0000\u019f\u01a0\u0005"+ + "\'\u0000\u0000\u01a0\u01a2\u0003<\u001e\u0000\u01a1\u019f\u0001\u0000"+ + "\u0000\u0000\u01a2\u01a5\u0001\u0000\u0000\u0000\u01a3\u01a1\u0001\u0000"+ + "\u0000\u0000\u01a3\u01a4\u0001\u0000\u0000\u0000\u01a4?\u0001\u0000\u0000"+ + "\u0000\u01a5\u01a3\u0001\u0000\u0000\u0000\u01a6\u01a7\u0007\u0003\u0000"+ + "\u0000\u01a7A\u0001\u0000\u0000\u0000\u01a8\u01ac\u0005U\u0000\u0000\u01a9"+ + "\u01aa\u0004!\n\u0000\u01aa\u01ac\u0003F#\u0000\u01ab\u01a8\u0001\u0000"+ + "\u0000\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01acC\u0001\u0000\u0000"+ + "\u0000\u01ad\u01d8\u00052\u0000\u0000\u01ae\u01af\u0003h4\u0000\u01af"+ + "\u01b0\u0005H\u0000\u0000\u01b0\u01d8\u0001\u0000\u0000\u0000\u01b1\u01d8"+ + "\u0003f3\u0000\u01b2\u01d8\u0003h4\u0000\u01b3\u01d8\u0003b1\u0000\u01b4"+ + "\u01d8\u0003F#\u0000\u01b5\u01d8\u0003j5\u0000\u01b6\u01b7\u0005F\u0000"+ + "\u0000\u01b7\u01bc\u0003d2\u0000\u01b8\u01b9\u0005\'\u0000\u0000\u01b9"+ + "\u01bb\u0003d2\u0000\u01ba\u01b8\u0001\u0000\u0000\u0000\u01bb\u01be\u0001"+ + "\u0000\u0000\u0000\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bc\u01bd\u0001"+ + "\u0000\u0000\u0000\u01bd\u01bf\u0001\u0000\u0000\u0000\u01be\u01bc\u0001"+ + "\u0000\u0000\u0000\u01bf\u01c0\u0005G\u0000\u0000\u01c0\u01d8\u0001\u0000"+ + "\u0000\u0000\u01c1\u01c2\u0005F\u0000\u0000\u01c2\u01c7\u0003b1\u0000"+ + "\u01c3\u01c4\u0005\'\u0000\u0000\u01c4\u01c6\u0003b1\u0000\u01c5\u01c3"+ + "\u0001\u0000\u0000\u0000\u01c6\u01c9\u0001\u0000\u0000\u0000\u01c7\u01c5"+ + "\u0001\u0000\u0000\u0000\u01c7\u01c8\u0001\u0000\u0000\u0000\u01c8\u01ca"+ + "\u0001\u0000\u0000\u0000\u01c9\u01c7\u0001\u0000\u0000\u0000\u01ca\u01cb"+ + "\u0005G\u0000\u0000\u01cb\u01d8\u0001\u0000\u0000\u0000\u01cc\u01cd\u0005"+ + "F\u0000\u0000\u01cd\u01d2\u0003j5\u0000\u01ce\u01cf\u0005\'\u0000\u0000"+ + "\u01cf\u01d1\u0003j5\u0000\u01d0\u01ce\u0001\u0000\u0000\u0000\u01d1\u01d4"+ + "\u0001\u0000\u0000\u0000\u01d2\u01d0\u0001\u0000\u0000\u0000\u01d2\u01d3"+ + "\u0001\u0000\u0000\u0000\u01d3\u01d5\u0001\u0000\u0000\u0000\u01d4\u01d2"+ + "\u0001\u0000\u0000\u0000\u01d5\u01d6\u0005G\u0000\u0000\u01d6\u01d8\u0001"+ + "\u0000\u0000\u0000\u01d7\u01ad\u0001\u0000\u0000\u0000\u01d7\u01ae\u0001"+ + "\u0000\u0000\u0000\u01d7\u01b1\u0001\u0000\u0000\u0000\u01d7\u01b2\u0001"+ + "\u0000\u0000\u0000\u01d7\u01b3\u0001\u0000\u0000\u0000\u01d7\u01b4\u0001"+ + "\u0000\u0000\u0000\u01d7\u01b5\u0001\u0000\u0000\u0000\u01d7\u01b6\u0001"+ + "\u0000\u0000\u0000\u01d7\u01c1\u0001\u0000\u0000\u0000\u01d7\u01cc\u0001"+ + "\u0000\u0000\u0000\u01d8E\u0001\u0000\u0000\u0000\u01d9\u01dc\u00055\u0000"+ + "\u0000\u01da\u01dc\u0005E\u0000\u0000\u01db\u01d9\u0001\u0000\u0000\u0000"+ + "\u01db\u01da\u0001\u0000\u0000\u0000\u01dcG\u0001\u0000\u0000\u0000\u01dd"+ + "\u01e1\u0003@ \u0000\u01de\u01df\u0004$\u000b\u0000\u01df\u01e1\u0003"+ + "F#\u0000\u01e0\u01dd\u0001\u0000\u0000\u0000\u01e0\u01de\u0001\u0000\u0000"+ + "\u0000\u01e1I\u0001\u0000\u0000\u0000\u01e2\u01e3\u0005\t\u0000\u0000"+ + "\u01e3\u01e4\u0005\u001f\u0000\u0000\u01e4K\u0001\u0000\u0000\u0000\u01e5"+ + "\u01e6\u0005\u000e\u0000\u0000\u01e6\u01eb\u0003N\'\u0000\u01e7\u01e8"+ + "\u0005\'\u0000\u0000\u01e8\u01ea\u0003N\'\u0000\u01e9\u01e7\u0001\u0000"+ + "\u0000\u0000\u01ea\u01ed\u0001\u0000\u0000\u0000\u01eb\u01e9\u0001\u0000"+ + "\u0000\u0000\u01eb\u01ec\u0001\u0000\u0000\u0000\u01ecM\u0001\u0000\u0000"+ + "\u0000\u01ed\u01eb\u0001\u0000\u0000\u0000\u01ee\u01f0\u0003\n\u0005\u0000"+ + "\u01ef\u01f1\u0007\u0004\u0000\u0000\u01f0\u01ef\u0001\u0000\u0000\u0000"+ + "\u01f0\u01f1\u0001\u0000\u0000\u0000\u01f1\u01f4\u0001\u0000\u0000\u0000"+ + "\u01f2\u01f3\u00053\u0000\u0000\u01f3\u01f5\u0007\u0005\u0000\u0000\u01f4"+ + "\u01f2\u0001\u0000\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000\u0000\u01f5"+ + "O\u0001\u0000\u0000\u0000\u01f6\u01f7\u0005\b\u0000\u0000\u01f7\u01f8"+ + "\u0003>\u001f\u0000\u01f8Q\u0001\u0000\u0000\u0000\u01f9\u01fa\u0005\u0002"+ + "\u0000\u0000\u01fa\u01fb\u0003>\u001f\u0000\u01fbS\u0001\u0000\u0000\u0000"+ + "\u01fc\u01fd\u0005\u000b\u0000\u0000\u01fd\u0202\u0003V+\u0000\u01fe\u01ff"+ + "\u0005\'\u0000\u0000\u01ff\u0201\u0003V+\u0000\u0200\u01fe\u0001\u0000"+ + "\u0000\u0000\u0201\u0204\u0001\u0000\u0000\u0000\u0202\u0200\u0001\u0000"+ + "\u0000\u0000\u0202\u0203\u0001\u0000\u0000\u0000\u0203U\u0001\u0000\u0000"+ + "\u0000\u0204\u0202\u0001\u0000\u0000\u0000\u0205\u0206\u0003<\u001e\u0000"+ + "\u0206\u0207\u0005Y\u0000\u0000\u0207\u0208\u0003<\u001e\u0000\u0208W"+ + "\u0001\u0000\u0000\u0000\u0209\u020a\u0005\u0001\u0000\u0000\u020a\u020b"+ + "\u0003\u0014\n\u0000\u020b\u020d\u0003j5\u0000\u020c\u020e\u0003^/\u0000"+ + "\u020d\u020c\u0001\u0000\u0000\u0000\u020d\u020e\u0001\u0000\u0000\u0000"+ + "\u020eY\u0001\u0000\u0000\u0000\u020f\u0210\u0005\u0007\u0000\u0000\u0210"+ + "\u0211\u0003\u0014\n\u0000\u0211\u0212\u0003j5\u0000\u0212[\u0001\u0000"+ + "\u0000\u0000\u0213\u0214\u0005\n\u0000\u0000\u0214\u0215\u0003:\u001d"+ + "\u0000\u0215]\u0001\u0000\u0000\u0000\u0216\u021b\u0003`0\u0000\u0217"+ + "\u0218\u0005\'\u0000\u0000\u0218\u021a\u0003`0\u0000\u0219\u0217\u0001"+ + "\u0000\u0000\u0000\u021a\u021d\u0001\u0000\u0000\u0000\u021b\u0219\u0001"+ + "\u0000\u0000\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c_\u0001\u0000"+ + "\u0000\u0000\u021d\u021b\u0001\u0000\u0000\u0000\u021e\u021f\u0003@ \u0000"+ + "\u021f\u0220\u0005$\u0000\u0000\u0220\u0221\u0003D\"\u0000\u0221a\u0001"+ + "\u0000\u0000\u0000\u0222\u0223\u0007\u0006\u0000\u0000\u0223c\u0001\u0000"+ + "\u0000\u0000\u0224\u0227\u0003f3\u0000\u0225\u0227\u0003h4\u0000\u0226"+ + "\u0224\u0001\u0000\u0000\u0000\u0226\u0225\u0001\u0000\u0000\u0000\u0227"+ + "e\u0001\u0000\u0000\u0000\u0228\u022a\u0007\u0000\u0000\u0000\u0229\u0228"+ + "\u0001\u0000\u0000\u0000\u0229\u022a\u0001\u0000\u0000\u0000\u022a\u022b"+ + "\u0001\u0000\u0000\u0000\u022b\u022c\u0005 \u0000\u0000\u022cg\u0001\u0000"+ + "\u0000\u0000\u022d\u022f\u0007\u0000\u0000\u0000\u022e\u022d\u0001\u0000"+ + "\u0000\u0000\u022e\u022f\u0001\u0000\u0000\u0000\u022f\u0230\u0001\u0000"+ + "\u0000\u0000\u0230\u0231\u0005\u001f\u0000\u0000\u0231i\u0001\u0000\u0000"+ + "\u0000\u0232\u0233\u0005\u001e\u0000\u0000\u0233k\u0001\u0000\u0000\u0000"+ + "\u0234\u0235\u0007\u0007\u0000\u0000\u0235m\u0001\u0000\u0000\u0000\u0236"+ + "\u0237\u0005\u0005\u0000\u0000\u0237\u0238\u0003p8\u0000\u0238o\u0001"+ + "\u0000\u0000\u0000\u0239\u023a\u0005F\u0000\u0000\u023a\u023b\u0003\u0002"+ + "\u0001\u0000\u023b\u023c\u0005G\u0000\u0000\u023cq\u0001\u0000\u0000\u0000"+ + "\u023d\u023e\u0005\r\u0000\u0000\u023e\u023f\u0005i\u0000\u0000\u023f"+ + "s\u0001\u0000\u0000\u0000\u0240\u0241\u0005\u0003\u0000\u0000\u0241\u0244"+ + "\u0005_\u0000\u0000\u0242\u0243\u0005]\u0000\u0000\u0243\u0245\u0003<"+ + "\u001e\u0000\u0244\u0242\u0001\u0000\u0000\u0000\u0244\u0245\u0001\u0000"+ + "\u0000\u0000\u0245\u024f\u0001\u0000\u0000\u0000\u0246\u0247\u0005^\u0000"+ + "\u0000\u0247\u024c\u0003v;\u0000\u0248\u0249\u0005\'\u0000\u0000\u0249"+ + "\u024b\u0003v;\u0000\u024a\u0248\u0001\u0000\u0000\u0000\u024b\u024e\u0001"+ + "\u0000\u0000\u0000\u024c\u024a\u0001\u0000\u0000\u0000\u024c\u024d\u0001"+ + "\u0000\u0000\u0000\u024d\u0250\u0001\u0000\u0000\u0000\u024e\u024c\u0001"+ + "\u0000\u0000\u0000\u024f\u0246\u0001\u0000\u0000\u0000\u024f\u0250\u0001"+ + "\u0000\u0000\u0000\u0250u\u0001\u0000\u0000\u0000\u0251\u0252\u0003<\u001e"+ + "\u0000\u0252\u0253\u0005$\u0000\u0000\u0253\u0255\u0001\u0000\u0000\u0000"+ + "\u0254\u0251\u0001\u0000\u0000\u0000\u0254\u0255\u0001\u0000\u0000\u0000"+ + "\u0255\u0256\u0001\u0000\u0000\u0000\u0256\u0257\u0003<\u001e\u0000\u0257"+ + "w\u0001\u0000\u0000\u0000\u0258\u0259\u0005\u0012\u0000\u0000\u0259\u025a"+ + "\u0003$\u0012\u0000\u025a\u025b\u0005]\u0000\u0000\u025b\u025c\u0003>"+ + "\u001f\u0000\u025cy\u0001\u0000\u0000\u0000\u025d\u025e\u0005\u0011\u0000"+ + "\u0000\u025e\u0261\u00036\u001b\u0000\u025f\u0260\u0005!\u0000\u0000\u0260"+ + "\u0262\u0003\u001e\u000f\u0000\u0261\u025f\u0001\u0000\u0000\u0000\u0261"+ + "\u0262\u0001\u0000\u0000\u0000\u0262{\u0001\u0000\u0000\u0000\u0263\u0265"+ + "\u0007\b\u0000\u0000\u0264\u0263\u0001\u0000\u0000\u0000\u0264\u0265\u0001"+ + "\u0000\u0000\u0000\u0265\u0266\u0001\u0000\u0000\u0000\u0266\u0267\u0005"+ + "\u0014\u0000\u0000\u0267\u0268\u0003~?\u0000\u0268\u0269\u0003\u0080@"+ + "\u0000\u0269}\u0001\u0000\u0000\u0000\u026a\u026d\u0003@ \u0000\u026b"+ + "\u026c\u0005Y\u0000\u0000\u026c\u026e\u0003@ \u0000\u026d\u026b\u0001"+ + "\u0000\u0000\u0000\u026d\u026e\u0001\u0000\u0000\u0000\u026e\u007f\u0001"+ + "\u0000\u0000\u0000\u026f\u0270\u0005]\u0000\u0000\u0270\u0275\u0003\u0082"+ + "A\u0000\u0271\u0272\u0005\'\u0000\u0000\u0272\u0274\u0003\u0082A\u0000"+ + "\u0273\u0271\u0001\u0000\u0000\u0000\u0274\u0277\u0001\u0000\u0000\u0000"+ + "\u0275\u0273\u0001\u0000\u0000\u0000\u0275\u0276\u0001\u0000\u0000\u0000"+ + "\u0276\u0081\u0001\u0000\u0000\u0000\u0277\u0275\u0001\u0000\u0000\u0000"+ + "\u0278\u0279\u0003\u0010\b\u0000\u0279\u0083\u0001\u0000\u0000\u0000="+ + "\u008f\u0098\u00ac\u00b8\u00c1\u00c9\u00ce\u00d6\u00d8\u00dd\u00e4\u00e9"+ + "\u00f4\u00fa\u0102\u0104\u010f\u0116\u0121\u0124\u0134\u013a\u0144\u0148"+ + "\u014d\u0157\u015f\u016c\u0170\u0174\u017b\u017f\u0186\u018c\u0193\u019b"+ + "\u01a3\u01ab\u01bc\u01c7\u01d2\u01d7\u01db\u01e0\u01eb\u01f0\u01f4\u0202"+ + "\u020d\u021b\u0226\u0229\u022e\u0244\u024c\u024f\u0254\u0261\u0264\u026d"+ + "\u0275"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index a389923afee79..2770ed1f336ae 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -2317,8 +2317,6 @@ public void testInvalidNamedParamsForIdentifierPatterns() { } public void testFromEnrichAndMatchColonUsage() { - assumeTrue("Match operator is available just for snapshots", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - LogicalPlan plan = analyze(""" from *:test | EVAL x = to_string(languages) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index ca50ece7fa08b..06d8cb244ef19 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1159,8 +1159,6 @@ public void testMatchInsideEval() throws Exception { } public void testMatchFilter() throws Exception { - assumeTrue("Match operator is available just for snapshots", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - assertEquals( "1:19: first argument of [salary:\"100\"] must be [string], found value [salary] type [integer]", error("from test | where salary:\"100\"") @@ -1190,7 +1188,6 @@ public void testMatchFunctionNotAllowedAfterCommands() throws Exception { } public void testMatchFunctionAndOperatorHaveCorrectErrorMessages() throws Exception { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); assertEquals( "1:24: [MATCH] function cannot be used after LIMIT", error("from test | limit 10 | where match(first_name, \"Anna\")") @@ -1271,7 +1268,6 @@ public void testMatchFunctionOnlyAllowedInWhere() throws Exception { } public void testMatchOperatornOnlyAllowedInWhere() throws Exception { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); checkFullTextFunctionsOnlyAllowedInWhere(":", "first_name:\"Anna\"", "operator"); } @@ -1317,8 +1313,6 @@ public void testMatchFunctionWithDisjunctions() { } public void testMatchOperatorWithDisjunctions() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - checkWithDisjunctions(":", "first_name : \"Anna\"", "operator"); } @@ -1374,7 +1368,6 @@ public void testMatchFunctionWithNonBooleanFunctions() { } public void testMatchOperatorWithNonBooleanFunctions() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); checkFullTextFunctionsWithNonBooleanFunctions(":", "first_name:\"Anna\"", "operator"); } @@ -1452,8 +1445,6 @@ public void testMatchFunctionCurrentlyUnsupportedBehaviour() throws Exception { "1:68: Unknown column [first_name]", error("from test | stats max_salary = max(salary) by emp_no | where match(first_name, \"Anna\")") ); - - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); assertEquals( "1:62: Unknown column [first_name]", error("from test | stats max_salary = max(salary) by emp_no | where first_name : \"Anna\"") @@ -1473,8 +1464,6 @@ public void testMatchFunctionNullArgs() throws Exception { public void testMatchTargetsExistingField() throws Exception { assertEquals("1:39: Unknown column [first_name]", error("from test | keep emp_no | where match(first_name, \"Anna\")")); - - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); assertEquals("1:33: Unknown column [first_name]", error("from test | keep emp_no | where first_name : \"Anna\"")); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 181b8d52bf888..7802d74d2264f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -44,6 +44,7 @@ import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; @@ -130,7 +131,9 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { entry("mod", Mod.class), entry("neg", Neg.class), entry("is_null", IsNull.class), - entry("is_not_null", IsNotNull.class) + entry("is_not_null", IsNotNull.class), + // Match operator is both a function and an operator + entry("match_operator", Match.class) ); private static EsqlFunctionRegistry functionRegistry = new EsqlFunctionRegistry().snapshotRegistry(); @@ -813,6 +816,10 @@ private static String buildSignatureSvg(String name) throws IOException { if (unaryOperator != null) { return RailRoadDiagram.unaryOperator(unaryOperator); } + String searchOperator = searchOperator(name); + if (searchOperator != null) { + return RailRoadDiagram.searchOperator(searchOperator); + } FunctionDefinition definition = definition(name); if (definition != null) { return RailRoadDiagram.functionSignature(definition); @@ -862,7 +869,7 @@ public static void renderDocs() throws IOException { return; } String name = functionName(); - if (binaryOperator(name) != null || unaryOperator(name) != null || likeOrInOperator(name)) { + if (binaryOperator(name) != null || unaryOperator(name) != null || searchOperator(name) != null || likeOrInOperator(name)) { renderDocsForOperators(name); return; } @@ -1258,6 +1265,16 @@ private static String binaryOperator(String name) { }; } + /** + * If this test is a for a search operator return its symbol, otherwise return {@code null}. + */ + private static String searchOperator(String name) { + return switch (name) { + case "match_operator" -> ":"; + default -> null; + }; + } + /** * If this tests is for a unary operator return its symbol, otherwise return {@code null}. * This is functionally the reverse of {@link ExpressionBuilder#visitArithmeticUnary}. diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/RailRoadDiagram.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/RailRoadDiagram.java index df0737feadd8d..43e2ededeff0e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/RailRoadDiagram.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/RailRoadDiagram.java @@ -89,6 +89,18 @@ static String binaryOperator(String operator) throws IOException { return toSvg(new Sequence(expressions.toArray(Expression[]::new))); } + /** + * Generate a railroad diagram for a search operator. The output would look like + * {@code field : value}. + */ + static String searchOperator(String operator) throws IOException { + List expressions = new ArrayList<>(); + expressions.add(new Literal("field")); + expressions.add(new Syntax(operator)); + expressions.add(new Literal("query")); + return toSvg(new Sequence(expressions.toArray(Expression[]::new))); + } + /** * Generate a railroad diagram for unary operator. The output would look like * {@code -v}. diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java new file mode 100644 index 0000000000000..32e9670286ef7 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.util.LinkedList; +import java.util.List; +import java.util.function.Supplier; + +/** + * This class is only used to generates docs for the match operator - all testing is done in {@link MatchTests} + */ +@FunctionName("match_operator") +public class MatchOperatorTests extends MatchTests { + + public MatchOperatorTests(@Name("TestCase") Supplier testCaseSupplier) { + super(testCaseSupplier); + } + + @ParametersFactory + public static Iterable parameters() { + // Have a minimal test so that we can generate the appropriate types in the docs + List suppliers = new LinkedList<>(); + addPositiveTestCase(List.of(DataType.KEYWORD, DataType.KEYWORD), suppliers); + addPositiveTestCase(List.of(DataType.TEXT, DataType.TEXT), suppliers); + addPositiveTestCase(List.of(DataType.KEYWORD, DataType.TEXT), suppliers); + addPositiveTestCase(List.of(DataType.TEXT, DataType.KEYWORD), suppliers); + return parameterSuppliersFromTypedData(suppliers); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java index 6d0c45a972299..6a4a7404135f9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java @@ -36,19 +36,11 @@ public MatchTests(@Name("TestCase") Supplier testCase @ParametersFactory public static Iterable parameters() { - Set supportedTextParams = Set.of(DataType.KEYWORD, DataType.TEXT); - Set supportedNumericParams = Set.of(DataType.DOUBLE, DataType.INTEGER); - Set supportedFuzzinessParams = Set.of(DataType.INTEGER, DataType.KEYWORD, DataType.TEXT); - List> supportedPerPosition = List.of( - supportedTextParams, - supportedTextParams, - supportedNumericParams, - supportedFuzzinessParams - ); + List> supportedPerPosition = supportedParams(); List suppliers = new LinkedList<>(); for (DataType fieldType : DataType.stringTypes()) { for (DataType queryType : DataType.stringTypes()) { - addPositiveTestCase(List.of(fieldType, queryType), supportedPerPosition, suppliers); + addPositiveTestCase(List.of(fieldType, queryType), suppliers); addNonFieldTestCase(List.of(fieldType, queryType), supportedPerPosition, suppliers); } } @@ -61,11 +53,20 @@ public static Iterable parameters() { ); } - private static void addPositiveTestCase( - List paramDataTypes, - List> supportedPerPosition, - List suppliers - ) { + protected static List> supportedParams() { + Set supportedTextParams = Set.of(DataType.KEYWORD, DataType.TEXT); + Set supportedNumericParams = Set.of(DataType.DOUBLE, DataType.INTEGER); + Set supportedFuzzinessParams = Set.of(DataType.INTEGER, DataType.KEYWORD, DataType.TEXT); + List> supportedPerPosition = List.of( + supportedTextParams, + supportedTextParams, + supportedNumericParams, + supportedFuzzinessParams + ); + return supportedPerPosition; + } + + protected static void addPositiveTestCase(List paramDataTypes, List suppliers) { // Positive case - creates an ES field from the field parameter type suppliers.add( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 2134e16b00131..269b4806680a6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.EsqlTestUtils.TestSearchStats; -import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; @@ -1093,8 +1092,6 @@ public void testMissingFieldsDoNotGetExtracted() { * estimatedRowSize[324] */ public void testSingleMatchFilterPushdown() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - var plan = plannerOptimizer.plan(""" from test | where first_name:"Anna" @@ -1125,8 +1122,6 @@ public void testSingleMatchFilterPushdown() { * [_doc{f}#22], limit[1000], sort[[FieldSort[field=emp_no{f}#12, direction=ASC, nulls=LAST]]] estimatedRowSize[336] */ public void testMultipleMatchFilterPushdown() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - String query = """ from test | where first_name:"Anna" and first_name:"Anneke" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 4c1a9228b26e2..7b6c0048f2980 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -2300,7 +2300,6 @@ public void testMetricWithGroupKeyAsAgg() { } public void testMatchOperatorConstantQueryString() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); var plan = statement("FROM test | WHERE field:\"value\""); var filter = as(plan, Filter.class); var match = (Match) filter.condition(); @@ -2310,7 +2309,6 @@ public void testMatchOperatorConstantQueryString() { } public void testInvalidMatchOperator() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); expectError("from test | WHERE field:", "line 1:25: mismatched input '' expecting {QUOTED_STRING, "); expectError( "from test | WHERE field:CONCAT(\"hello\", \"world\")", From 8c20ac5884158b88fdd598e422db632e1734aabb Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Thu, 21 Nov 2024 08:00:05 +0100 Subject: [PATCH 124/386] [Build] Make test cluster plugin configuration cache compatible (#116890) This adds infrastructure to make the legacy test cluster plugin and the legacy test cluster based test plugins generally configuration cache compatible. --- .../gradle/internal/AntFixtureStop.groovy | 17 ++-- .../gradle/internal/AntTask.groovy | 10 +- .../gradle/internal/test/AntFixture.groovy | 60 +++++++++--- .../gradle/internal/test/Fixture.java | 21 ---- .../AbstractYamlRestCompatTestPlugin.java | 4 +- .../testclusters/ElasticsearchCluster.java | 15 +++ .../testclusters/ElasticsearchNode.java | 15 ++- .../gradle/testclusters/TestClusterInfo.java | 36 +++++++ .../testclusters/TestClusterValueSource.java | 34 +++++++ .../testclusters/TestClustersAware.java | 14 ++- .../testclusters/TestClustersPlugin.java | 47 +++++---- .../testclusters/TestClustersRegistry.java | 56 +++++++++-- .../discovery-ec2/qa/amazon-ec2/build.gradle | 24 +++-- qa/mixed-cluster/build.gradle | 42 ++++++-- .../downgrade-to-basic-license/build.gradle | 51 ++++++++-- .../plugin/ccr/qa/multi-cluster/build.gradle | 96 +++++++++++++++---- .../ccr/qa/non-compliant-license/build.gradle | 34 ++++++- x-pack/plugin/ccr/qa/restart/build.gradle | 57 +++++++++-- x-pack/plugin/ccr/qa/security/build.gradle | 61 ++++++++---- x-pack/plugin/esql/build.gradle | 1 - .../plugin/sql/qa/jdbc/security/build.gradle | 18 ++++ .../qa/repository-old-versions/build.gradle | 1 - 22 files changed, 561 insertions(+), 153 deletions(-) delete mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/Fixture.java create mode 100644 build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterInfo.java create mode 100644 build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterValueSource.java diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntFixtureStop.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntFixtureStop.groovy index ad37fa9f02c8c..6c87149095186 100644 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntFixtureStop.groovy +++ b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntFixtureStop.groovy @@ -15,16 +15,12 @@ import org.elasticsearch.gradle.internal.test.AntFixture import org.gradle.api.file.FileSystemOperations import org.gradle.api.file.ProjectLayout import org.gradle.api.provider.ProviderFactory -import org.gradle.api.tasks.Internal import org.gradle.process.ExecOperations import javax.inject.Inject abstract class AntFixtureStop extends LoggedExec implements FixtureStop { - @Internal - AntFixture fixture - @Inject AntFixtureStop(ProjectLayout projectLayout, ExecOperations execOperations, @@ -34,12 +30,12 @@ abstract class AntFixtureStop extends LoggedExec implements FixtureStop { } void setFixture(AntFixture fixture) { - assert this.fixture == null - this.fixture = fixture; - final Object pid = "${-> this.fixture.pid}" - onlyIf("pidFile exists") { fixture.pidFile.exists() } + def pidFile = fixture.pidFile + def fixtureName = fixture.name + final Object pid = "${-> Integer.parseInt(pidFile.getText('UTF-8').trim())}" + onlyIf("pidFile exists") { pidFile.exists() } doFirst { - logger.info("Shutting down ${fixture.name} with pid ${pid}") + logger.info("Shutting down ${fixtureName} with pid ${pid}") } if (OS.current() == OS.WINDOWS) { @@ -51,9 +47,8 @@ abstract class AntFixtureStop extends LoggedExec implements FixtureStop { } doLast { fileSystemOperations.delete { - it.delete(fixture.pidFile) + it.delete(pidFile) } } - this.fixture = fixture } } diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntTask.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntTask.groovy index 81f21f8c62d86..01a3bdaee2337 100644 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntTask.groovy +++ b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntTask.groovy @@ -29,11 +29,6 @@ import java.nio.charset.Charset */ public abstract class AntTask extends DefaultTask { - /** - * A buffer that will contain the output of the ant code run, - * if the output was not already written directly to stdout. - */ - public final ByteArrayOutputStream outputBuffer = new ByteArrayOutputStream() @Inject protected FileSystemOperations getFileSystemOperations() { @@ -57,6 +52,11 @@ public abstract class AntTask extends DefaultTask { // otherwise groovy replaces System.out, and you have no chance to debug // ant.saveStreams = false + /** + * A buffer that will contain the output of the ant code run, + * if the output was not already written directly to stdout. + */ + ByteArrayOutputStream outputBuffer = new ByteArrayOutputStream() final int outputLevel = logger.isDebugEnabled() ? Project.MSG_DEBUG : Project.MSG_INFO final PrintStream stream = useStdout() ? System.out : new PrintStream(outputBuffer, true, Charset.defaultCharset().name()) diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/test/AntFixture.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/test/AntFixture.groovy index f2837ff40fb79..88a68f1194858 100644 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/test/AntFixture.groovy +++ b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/test/AntFixture.groovy @@ -10,22 +10,37 @@ package org.elasticsearch.gradle.internal.test import org.elasticsearch.gradle.OS + import org.elasticsearch.gradle.internal.AntFixtureStop import org.elasticsearch.gradle.internal.AntTask +import org.elasticsearch.gradle.testclusters.TestClusterInfo +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersRegistry import org.gradle.api.GradleException +import org.gradle.api.file.ProjectLayout +import org.gradle.api.provider.Property +import org.gradle.api.provider.Provider +import org.gradle.api.provider.ProviderFactory +import org.gradle.api.provider.ValueSource +import org.gradle.api.provider.ValueSourceParameters +import org.gradle.api.tasks.Input import org.gradle.api.tasks.Internal import org.gradle.api.tasks.TaskProvider +import javax.inject.Inject + /** * A fixture for integration tests which runs in a separate process launched by Ant. */ -class AntFixture extends AntTask implements Fixture { +class AntFixture extends AntTask { /** The path to the executable that starts the fixture. */ @Internal String executable private final List arguments = new ArrayList<>() + private ProjectLayout projectLayout + private final ProviderFactory providerFactory void args(Object... args) { arguments.addAll(args) @@ -69,19 +84,14 @@ class AntFixture extends AntTask implements Fixture { return tmpFile.exists() } - private final TaskProvider stopTask - - AntFixture() { - stopTask = createStopTask() + @Inject + AntFixture(ProjectLayout projectLayout, ProviderFactory providerFactory) { + this.providerFactory = providerFactory + this.projectLayout = projectLayout; + TaskProvider stopTask = createStopTask() finalizedBy(stopTask) } - @Override - @Internal - TaskProvider getStopTask() { - return stopTask - } - @Override protected void runAnt(AntBuilder ant) { // reset everything @@ -231,7 +241,7 @@ class AntFixture extends AntTask implements Fixture { */ @Internal protected File getBaseDir() { - return new File(project.buildDir, "fixtures/${name}") + return new File(projectLayout.getBuildDirectory().getAsFile().get(), "fixtures/${name}") } /** Returns the working directory for the process. Defaults to "cwd" inside baseDir. */ @@ -242,7 +252,7 @@ class AntFixture extends AntTask implements Fixture { /** Returns the file the process writes its pid to. Defaults to "pid" inside baseDir. */ @Internal - protected File getPidFile() { + File getPidFile() { return new File(baseDir, 'pid') } @@ -264,6 +274,12 @@ class AntFixture extends AntTask implements Fixture { return portsFile.readLines("UTF-8").get(0) } + @Internal + Provider getAddressAndPortProvider() { + File thePortFile = portsFile + return providerFactory.provider(() -> thePortFile.readLines("UTF-8").get(0)) + } + /** Returns a file that wraps around the actual command when {@code spawn == true}. */ @Internal protected File getWrapperScript() { @@ -281,4 +297,22 @@ class AntFixture extends AntTask implements Fixture { protected File getRunLog() { return new File(cwd, 'run.log') } + + @Internal + Provider getAddressAndPortSource() { + return providerFactory.of(AntFixtureValueSource.class, spec -> { + spec.getParameters().getPortFile().set(portsFile); + }); + } + + static abstract class AntFixtureValueSource implements ValueSource { + @Override + String obtain() { + return getParameters().getPortFile().map { it.readLines("UTF-8").get(0) }.get() + } + + interface Parameters extends ValueSourceParameters { + Property getPortFile(); + } + } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/Fixture.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/Fixture.java deleted file mode 100644 index f7ee88c715dfa..0000000000000 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/Fixture.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.gradle.internal.test; - -/** - * Any object that can produce an accompanying stop task, meant to tear down - * a previously instantiated service. - */ -public interface Fixture { - - /** A task which will stop this fixture. This should be used as a finalizedBy for any tasks that use the fixture. */ - Object getStopTask(); - -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java index 61dea47eb15c1..ca669276123b3 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java @@ -26,6 +26,7 @@ import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.Dependency; import org.gradle.api.file.Directory; +import org.gradle.api.file.FileCollection; import org.gradle.api.file.ProjectLayout; import org.gradle.api.file.RelativePath; import org.gradle.api.internal.file.FileOperations; @@ -244,10 +245,11 @@ public void apply(Project project) { yamlRestCompatTestTask.configure(testTask -> { testTask.systemProperty("tests.restCompat", true); // Use test runner and classpath from "normal" yaml source set + FileCollection outputFileCollection = yamlCompatTestSourceSet.getOutput(); testTask.setTestClassesDirs( yamlTestSourceSet.getOutput().getClassesDirs().plus(yamlCompatTestSourceSet.getOutput().getClassesDirs()) ); - testTask.onlyIf("Compatibility tests are available", t -> yamlCompatTestSourceSet.getOutput().isEmpty() == false); + testTask.onlyIf("Compatibility tests are available", t -> outputFileCollection.isEmpty() == false); testTask.setClasspath( yamlCompatTestSourceSet.getRuntimeClasspath() // remove the "normal" api and tests diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java index ec341ecfd8b79..77393fe16b4c2 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java @@ -76,6 +76,7 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named { private final LinkedHashMap> waitConditions = new LinkedHashMap<>(); private final transient Project project; private final Provider reaper; + private final Provider testClustersRegistryProvider; private final FileSystemOperations fileSystemOperations; private final ArchiveOperations archiveOperations; private final ExecOperations execOperations; @@ -87,11 +88,14 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named { private boolean shared = false; + private int claims = 0; + public ElasticsearchCluster( String path, String clusterName, Project project, Provider reaper, + Provider testClustersRegistryProvider, FileSystemOperations fileSystemOperations, ArchiveOperations archiveOperations, ExecOperations execOperations, @@ -104,6 +108,7 @@ public ElasticsearchCluster( this.clusterName = clusterName; this.project = project; this.reaper = reaper; + this.testClustersRegistryProvider = testClustersRegistryProvider; this.fileSystemOperations = fileSystemOperations; this.archiveOperations = archiveOperations; this.execOperations = execOperations; @@ -120,6 +125,7 @@ public ElasticsearchCluster( clusterName + "-0", project, reaper, + testClustersRegistryProvider, fileSystemOperations, archiveOperations, execOperations, @@ -177,6 +183,7 @@ public void setNumberOfNodes(int numberOfNodes) { clusterName + "-" + i, project, reaper, + testClustersRegistryProvider, fileSystemOperations, archiveOperations, execOperations, @@ -408,6 +415,7 @@ public void setPreserveDataDir(boolean preserveDataDir) { public void freeze() { nodes.forEach(ElasticsearchNode::freeze); configurationFrozen.set(true); + nodes.whenObjectAdded(node -> { throw new IllegalStateException("Cannot add nodes to test cluster after is has been frozen"); }); } private void checkFrozen() { @@ -663,4 +671,11 @@ public String toString() { return "cluster{" + path + ":" + clusterName + "}"; } + int addClaim() { + return ++this.claims; + } + + int removeClaim() { + return --this.claims; + } } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index df11733928f0f..90162591cfcef 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -124,6 +124,8 @@ public class ElasticsearchNode implements TestClusterConfiguration { private final String name; transient private final Project project; private final Provider reaperServiceProvider; + private final Provider testClustersRegistryProvider; + private final FileSystemOperations fileSystemOperations; private final ArchiveOperations archiveOperations; private final ExecOperations execOperations; @@ -164,7 +166,6 @@ public class ElasticsearchNode implements TestClusterConfiguration { private final List distributions = new ArrayList<>(); private int currentDistro = 0; private TestDistribution testDistribution; - private volatile Process esProcess; private Function nameCustomization = s -> s; private boolean isWorkingDirConfigured = false; private String httpPort = "0"; @@ -179,6 +180,7 @@ public class ElasticsearchNode implements TestClusterConfiguration { String name, Project project, Provider reaperServiceProvider, + Provider testClustersRegistryProvider, FileSystemOperations fileSystemOperations, ArchiveOperations archiveOperations, ExecOperations execOperations, @@ -191,6 +193,7 @@ public class ElasticsearchNode implements TestClusterConfiguration { this.name = name; this.project = project; this.reaperServiceProvider = reaperServiceProvider; + this.testClustersRegistryProvider = testClustersRegistryProvider; this.fileSystemOperations = fileSystemOperations; this.archiveOperations = archiveOperations; this.execOperations = execOperations; @@ -892,11 +895,13 @@ private void startElasticsearchProcess() { } } LOGGER.info("Running `{}` in `{}` for {} env: {}", command, workingDir, this, environment); + Process esProcess; try { esProcess = processBuilder.start(); } catch (IOException e) { throw new TestClustersException("Failed to start ES process for " + this, e); } + testClustersRegistryProvider.get().storeProcess(id(), esProcess); reaperServiceProvider.get().registerPid(toString(), esProcess.pid()); } @@ -982,6 +987,7 @@ public synchronized void stop(boolean tailLogs) { } catch (IOException e) { throw new UncheckedIOException(e); } + Process esProcess = testClustersRegistryProvider.get().getProcess(id()); if (esProcess == null && tailLogs) { // This is a special case. If start() throws an exception the plugin will still call stop // Another exception here would eat the orriginal. @@ -1574,6 +1580,7 @@ public List getFeatureFlags() { @Override @Internal public boolean isProcessAlive() { + Process esProcess = testClustersRegistryProvider.get().getProcess(id()); requireNonNull(esProcess, "Can't wait for `" + this + "` as it's not started. Does the task have `useCluster` ?"); return esProcess.isAlive(); } @@ -1602,6 +1609,10 @@ public int hashCode() { @Override public String toString() { + return id() + " (" + System.identityHashCode(this) + ")"; + } + + private String id() { return "node{" + path + ":" + name + "}"; } @@ -1702,7 +1713,7 @@ public CharSequence[] getArgs() { } } - private record FeatureFlag(String feature, Version from, Version until) { + public record FeatureFlag(String feature, Version from, Version until) { @Input public String getFeature() { diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterInfo.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterInfo.java new file mode 100644 index 0000000000000..07663de7a9df9 --- /dev/null +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterInfo.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.gradle.testclusters; + +import java.io.File; +import java.util.List; + +public class TestClusterInfo { + private final List allHttpSocketURI; + private final List allTransportPortURI; + private final List auditLogs; + + public TestClusterInfo(List allHttpSocketURI, List allTransportPortURI, List auditLogs) { + this.allHttpSocketURI = allHttpSocketURI; + this.allTransportPortURI = allTransportPortURI; + this.auditLogs = auditLogs; + } + + public List getAllHttpSocketURI() { + return allHttpSocketURI; + } + + public List getAllTransportPortURI() { + return allTransportPortURI; + } + + public List getAuditLogs() { + return auditLogs; + } +} diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterValueSource.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterValueSource.java new file mode 100644 index 0000000000000..8ecadcdc6d2b1 --- /dev/null +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterValueSource.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.testclusters; + +import org.gradle.api.provider.Property; +import org.gradle.api.provider.ValueSource; +import org.gradle.api.provider.ValueSourceParameters; +import org.jetbrains.annotations.Nullable; + +public abstract class TestClusterValueSource implements ValueSource { + + @Nullable + @Override + public TestClusterInfo obtain() { + String clusterName = getParameters().getClusterName().get(); + String path = getParameters().getPath().get(); + return getParameters().getService().get().getClusterDetails(path, clusterName); + } + + interface Parameters extends ValueSourceParameters { + Property getClusterName(); + + Property getPath(); + + Property getService(); + } +} diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java index f84aa2a0389c2..9e5fc1f09ac9e 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.gradle.testclusters; +import org.elasticsearch.gradle.ElasticsearchDistribution; import org.gradle.api.Task; import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; @@ -34,10 +35,15 @@ default void useCluster(ElasticsearchCluster cluster) { if (cluster.getPath().equals(getProject().getPath()) == false) { throw new TestClustersException("Task " + getPath() + " can't use test cluster from" + " another project " + cluster); } - - cluster.getNodes() - .all(node -> node.getDistributions().forEach(distro -> dependsOn(getProject().provider(() -> distro.maybeFreeze())))); - dependsOn(cluster.getPluginAndModuleConfigurations()); + if (cluster.getName().equals(getName())) { + for (ElasticsearchNode node : cluster.getNodes()) { + for (ElasticsearchDistribution distro : node.getDistributions()) { + ElasticsearchDistribution frozenDistro = distro.maybeFreeze(); + dependsOn(frozenDistro); + } + } + dependsOn(cluster.getPluginAndModuleConfigurations()); + } getClusters().add(cluster); } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java index 301782d52d1a3..ada31bc11a653 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java @@ -26,6 +26,7 @@ import org.gradle.api.invocation.Gradle; import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; +import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; import org.gradle.api.provider.ProviderFactory; import org.gradle.api.services.BuildService; @@ -106,15 +107,22 @@ public void apply(Project project) { runtimeJavaProvider = providerFactory.provider( () -> System.getenv("RUNTIME_JAVA_HOME") == null ? Jvm.current().getJavaHome() : new File(System.getenv("RUNTIME_JAVA_HOME")) ); + + // register cluster registry as a global build service + Provider testClustersRegistryProvider = project.getGradle() + .getSharedServices() + .registerIfAbsent(REGISTRY_SERVICE_NAME, TestClustersRegistry.class, noop()); + // enable the DSL to describe clusters - NamedDomainObjectContainer container = createTestClustersContainerExtension(project, reaperServiceProvider); + NamedDomainObjectContainer container = createTestClustersContainerExtension( + project, + testClustersRegistryProvider, + reaperServiceProvider + ); // provide a task to be able to list defined clusters. createListClustersTask(project, container); - // register cluster registry as a global build service - project.getGradle().getSharedServices().registerIfAbsent(REGISTRY_SERVICE_NAME, TestClustersRegistry.class, noop()); - // register throttle so we only run at most max-workers/2 nodes concurrently Provider testClustersThrottleProvider = project.getGradle() .getSharedServices() @@ -145,6 +153,7 @@ private void configureArtifactTransforms(Project project) { private NamedDomainObjectContainer createTestClustersContainerExtension( Project project, + Provider testClustersRegistryProvider, Provider reaper ) { // Create an extensions that allows describing clusters @@ -155,6 +164,7 @@ private NamedDomainObjectContainer createTestClustersConta name, project, reaper, + testClustersRegistryProvider, getFileSystemOperations(), getArchiveOperations(), getExecOperations(), @@ -199,7 +209,9 @@ public void apply(Project project) { Provider testClusterTasksService = project.getGradle() .getSharedServices() - .registerIfAbsent(TEST_CLUSTER_TASKS_SERVICE, TaskEventsService.class, spec -> {}); + .registerIfAbsent(TEST_CLUSTER_TASKS_SERVICE, TaskEventsService.class, spec -> { + spec.getParameters().getRegistry().set(registryProvider); + }); TestClustersRegistry registry = registryProvider.get(); // When we know what tasks will run, we claim the clusters of those task to differentiate between clusters @@ -209,7 +221,7 @@ public void apply(Project project) { configureClaimClustersHook(project.getGradle(), registry); // Before each task, we determine if a cluster needs to be started for that task. - configureStartClustersHook(project.getGradle(), registry, testClusterTasksService); + configureStartClustersHook(project.getGradle()); // After each task we determine if there are clusters that are no longer needed. getEventsListenerRegistry().onTaskCompletion(testClusterTasksService); @@ -228,12 +240,7 @@ private static void configureClaimClustersHook(Gradle gradle, TestClustersRegist }); } - private void configureStartClustersHook( - Gradle gradle, - TestClustersRegistry registry, - Provider testClusterTasksService - ) { - testClusterTasksService.get().registry(registry); + private void configureStartClustersHook(Gradle gradle) { gradle.getTaskGraph().whenReady(taskExecutionGraph -> { taskExecutionGraph.getAllTasks() .stream() @@ -249,19 +256,14 @@ private void configureStartClustersHook( } } - static public abstract class TaskEventsService implements BuildService, OperationCompletionListener { + static public abstract class TaskEventsService implements BuildService, OperationCompletionListener { Map tasksMap = new HashMap<>(); - private TestClustersRegistry registryProvider; public void register(TestClustersAware task) { tasksMap.put(task.getPath(), task); } - public void registry(TestClustersRegistry registry) { - this.registryProvider = registry; - } - @Override public void onFinish(FinishEvent finishEvent) { if (finishEvent instanceof TaskFinishEvent taskFinishEvent) { @@ -273,11 +275,18 @@ public void onFinish(FinishEvent finishEvent) { if (task.getDidWork()) { task.getClusters() .forEach( - cluster -> registryProvider.stopCluster(cluster, taskFinishEvent.getResult() instanceof TaskFailureResult) + cluster -> getParameters().getRegistry() + .get() + .stopCluster(cluster, taskFinishEvent.getResult() instanceof TaskFailureResult) ); } } } } + + // Some parameters for the web server + interface Params extends BuildServiceParameters { + Property getRegistry(); + } } } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersRegistry.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersRegistry.java index 8de0dd67b654c..8d2a9217e7d0c 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersRegistry.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersRegistry.java @@ -10,6 +10,8 @@ import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; +import org.gradle.api.provider.Provider; +import org.gradle.api.provider.ProviderFactory; import org.gradle.api.services.BuildService; import org.gradle.api.services.BuildServiceParameters; @@ -17,20 +19,23 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; + +import javax.inject.Inject; public abstract class TestClustersRegistry implements BuildService { private static final Logger logger = Logging.getLogger(TestClustersRegistry.class); private static final String TESTCLUSTERS_INSPECT_FAILURE = "testclusters.inspect.failure"; private final Boolean allowClusterToSurvive = Boolean.valueOf(System.getProperty(TESTCLUSTERS_INSPECT_FAILURE, "false")); - private final Map claimsInventory = new HashMap<>(); - private final Set runningClusters = new HashSet<>(); + private final Map nodeProcesses = new HashMap<>(); + + @Inject + public abstract ProviderFactory getProviderFactory(); public void claimCluster(ElasticsearchCluster cluster) { - cluster.freeze(); - int claim = claimsInventory.getOrDefault(cluster, 0) + 1; - claimsInventory.put(cluster, claim); - if (claim > 1) { + int claims = cluster.addClaim(); + if (claims > 1) { cluster.setShared(true); } } @@ -43,6 +48,13 @@ public void maybeStartCluster(ElasticsearchCluster cluster) { cluster.start(); } + public Provider getClusterInfo(String clusterName) { + return getProviderFactory().of(TestClusterValueSource.class, spec -> { + spec.getParameters().getService().set(TestClustersRegistry.this); + spec.getParameters().getClusterName().set(clusterName); + }); + } + public void stopCluster(ElasticsearchCluster cluster, boolean taskFailed) { if (taskFailed) { // If the task fails, and other tasks use this cluster, the other task will likely never be @@ -67,8 +79,7 @@ public void stopCluster(ElasticsearchCluster cluster, boolean taskFailed) { runningClusters.remove(cluster); } } else { - int currentClaims = claimsInventory.getOrDefault(cluster, 0) - 1; - claimsInventory.put(cluster, currentClaims); + int currentClaims = cluster.removeClaim(); if (currentClaims <= 0 && runningClusters.contains(cluster)) { cluster.stop(false); runningClusters.remove(cluster); @@ -76,4 +87,33 @@ public void stopCluster(ElasticsearchCluster cluster, boolean taskFailed) { } } + public TestClusterInfo getClusterDetails(String path, String clusterName) { + ElasticsearchCluster cluster = runningClusters.stream() + .filter(c -> c.getPath().equals(path)) + .filter(c -> c.getName().equals(clusterName)) + .findFirst() + .orElseThrow(); + return new TestClusterInfo( + cluster.getAllHttpSocketURI(), + cluster.getAllTransportPortURI(), + cluster.getNodes().stream().map(n -> n.getAuditLog()).collect(Collectors.toList()) + ); + } + + public void restart(String path, String clusterName) { + ElasticsearchCluster cluster = runningClusters.stream() + .filter(c -> c.getPath().equals(path)) + .filter(c -> c.getName().equals(clusterName)) + .findFirst() + .orElseThrow(); + cluster.restart(); + } + + public void storeProcess(String id, Process esProcess) { + nodeProcesses.put(id, esProcess); + } + + public Process getProcess(String id) { + return nodeProcesses.get(id); + } } diff --git a/plugins/discovery-ec2/qa/amazon-ec2/build.gradle b/plugins/discovery-ec2/qa/amazon-ec2/build.gradle index aad59be376262..5f0fee6636256 100644 --- a/plugins/discovery-ec2/qa/amazon-ec2/build.gradle +++ b/plugins/discovery-ec2/qa/amazon-ec2/build.gradle @@ -8,7 +8,6 @@ */ import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.AntFixture import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.internal.test.rest.LegacyYamlRestTestPlugin @@ -55,8 +54,9 @@ tasks.named("yamlRestTest").configure { enabled = false } ['KeyStore', 'EnvVariables', 'SystemProperties', 'ContainerCredentials', 'InstanceProfile'].forEach { action -> TaskProvider fixture = tasks.register("ec2Fixture${action}", AntFixture) { dependsOn project.sourceSets.yamlRestTest.runtimeClasspath - env 'CLASSPATH', "${-> project.sourceSets.yamlRestTest.runtimeClasspath.asPath}" - executable = "${buildParams.runtimeJavaHome.get()}/bin/java" + FileCollection cp = project.sourceSets.yamlRestTest.runtimeClasspath + env 'CLASSPATH', "${-> cp.asPath}" + executable = "${buildParams.runtimeJavaHome.get() }/bin/java" args 'org.elasticsearch.discovery.ec2.AmazonEC2Fixture', baseDir, "${buildDir}/testclusters/yamlRestTest${action}-1/config/unicast_hosts.txt" } @@ -68,9 +68,18 @@ tasks.named("yamlRestTest").configure { enabled = false } classpath = yamlRestTestSourceSet.getRuntimeClasspath() } + if(action == 'ContainerCredentials') { + def addressAndPortSource = fixture.get().addressAndPortSource + testClusters.matching { it.name == "yamlRestTestContainerCredentials" }.configureEach { + environment 'AWS_CONTAINER_CREDENTIALS_FULL_URI', + () -> addressAndPortSource.map{ addr -> "http://${addr}/ecs_credentials_endpoint" }.get(), IGNORE_VALUE + } + } + tasks.named("check").configure { dependsOn(yamlRestTestTask) } + def addressAndPortSource = fixture.get().addressAndPortSource testClusters.matching { it.name == yamlRestTestTask.name}.configureEach { numberOfNodes = ec2NumberOfNodes @@ -78,9 +87,9 @@ tasks.named("yamlRestTest").configure { enabled = false } setting 'discovery.seed_providers', 'ec2' setting 'network.host', '_ec2_' - setting 'discovery.ec2.endpoint', { "http://${-> fixture.get().addressAndPort}" }, IGNORE_VALUE + setting 'discovery.ec2.endpoint', { "http://${-> addressAndPortSource.get()}" }, IGNORE_VALUE - systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", { "http://${-> fixture.get().addressAndPort}" }, IGNORE_VALUE + systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", { "http://${-> addressAndPortSource.get()}" }, IGNORE_VALUE } } @@ -107,11 +116,6 @@ tasks.named("ec2FixtureContainerCredentials").configure { env 'ACTIVATE_CONTAINER_CREDENTIALS', true } -testClusters.matching { it.name == "yamlRestTestContainerCredentials" }.configureEach { - environment 'AWS_CONTAINER_CREDENTIALS_FULL_URI', - { "http://${-> tasks.findByName("ec2FixtureContainerCredentials").addressAndPort}/ecs_credentials_endpoint" }, IGNORE_VALUE -} - // Extra config for InstanceProfile tasks.named("ec2FixtureInstanceProfile").configure { env 'ACTIVATE_INSTANCE_PROFILE', true diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index f6549a2d83fe6..d8f906b23d523 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -11,6 +11,10 @@ import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils +import org.elasticsearch.gradle.testclusters.TestClustersPlugin apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -63,6 +67,8 @@ excludeList.add('indices.resolve_index/20_resolve_system_index/*') // Excluded because the error has changed excludeList.add('aggregations/percentiles_hdr_metric/Negative values test') +def clusterPath = getPath() + buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> if (bwcVersion != VersionProperties.getElasticsearchVersion()) { /* This project runs the core REST tests against a 4 node cluster where two of @@ -84,18 +90,42 @@ buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> tasks.register("${baseName}#mixedClusterTest", StandaloneRestIntegTestTask) { useCluster baseCluster mustRunAfter("precommit") + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + + def baseInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set(baseName) + it.parameters.service = serviceProvider + }.map { it.getAllHttpSocketURI() } + + def baseInfoAfterOneNodeUpdate = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set(baseName) + it.parameters.service = serviceProvider + }.map { it.getAllHttpSocketURI() } + + def baseInfoAfterTwoNodesUpdate = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set(baseName) + it.parameters.service = serviceProvider + }.map { it.getAllHttpSocketURI() } + def nonInputProps = nonInputProperties + def sharedRepoFolder = new File(buildDir, "cluster/shared/repo/${baseName}") doFirst { - delete("${buildDir}/cluster/shared/repo/${baseName}") + delete(sharedRepoFolder) // Getting the endpoints causes a wait for the cluster - println "Test cluster endpoints are: ${-> baseCluster.get().allHttpSocketURI.join(",")}" + println "Test cluster endpoints are: ${-> baseInfo.get().join(",")}" println "Upgrading one node to create a mixed cluster" baseCluster.get().nextNodeToNextVersion() // Getting the endpoints causes a wait for the cluster - println "Upgrade complete, endpoints are: ${-> baseCluster.get().allHttpSocketURI.join(",")}" + println "Upgrade complete, endpoints are: ${-> baseInfoAfterOneNodeUpdate.get().join(",")}" println "Upgrading another node to create a mixed cluster" baseCluster.get().nextNodeToNextVersion() - nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) - nonInputProperties.systemProperty('tests.clustername', baseName) + nonInputProps.systemProperty('tests.rest.cluster', baseInfoAfterTwoNodesUpdate.map(c -> c.join(","))) + nonInputProps.systemProperty('tests.clustername', baseName) if (excludeList.isEmpty() == false) { systemProperty 'tests.rest.blacklist', excludeList.join(',') } @@ -103,7 +133,7 @@ buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> systemProperty 'tests.path.repo', "${buildDir}/cluster/shared/repo/${baseName}" systemProperty 'tests.bwc_nodes_version', bwcVersion.toString().replace('-SNAPSHOT', '') systemProperty 'tests.new_nodes_version', project.version.toString().replace('-SNAPSHOT', '') - onlyIf("BWC tests disabled") { project.bwc_tests_enabled } +// onlyIf("BWC tests disabled") { project.bwc_tests_enabled } } tasks.register(bwcTaskName(bwcVersion)) { diff --git a/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle b/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle index da39d221f92f1..ac8ce1b0fd331 100644 --- a/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle +++ b/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle @@ -1,5 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' @@ -11,6 +15,8 @@ dependencies { testImplementation project(':x-pack:plugin:ccr:qa') } +def clusterPath = getPath() + def leaderCluster = testClusters.register("leader-cluster") { testDistribution = 'DEFAULT' setting 'xpack.license.self_generated.type', 'trial' @@ -24,7 +30,19 @@ def followCluster = testClusters.register("follow-cluster") { setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' - setting 'cluster.remote.leader_cluster.seeds', { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + } + def leaderUris = leaderInfo.map { it.getAllTransportPortURI() } + + setting 'cluster.remote.leader_cluster.seeds', + { "\"${leaderUris.get().join(",")}\"" }, IGNORE_VALUE } tasks.register("leader-cluster", RestIntegTestTask) { @@ -41,7 +59,7 @@ tasks.register("writeJavaPolicy") { policyFile.write( [ "grant {", - " permission java.io.FilePermission \"${-> testClusters."follow-cluster".getFirstNode().getServerLog()}\", \"read\";", + " permission java.io.FilePermission \"${-> followCluster.map { it.getFirstNode().getServerLog() }.get()}\", \"read\";", "};" ].join("\n") ) @@ -50,11 +68,28 @@ tasks.register("writeJavaPolicy") { tasks.register("follow-cluster", RestIntegTestTask) { dependsOn 'writeJavaPolicy', "leader-cluster" - useCluster leaderCluster - systemProperty 'tests.target_cluster', 'follow' - nonInputProperties.systemProperty 'java.security.policy', "file://${policyFile}" - nonInputProperties.systemProperty 'tests.leader_host', leaderCluster.map(c -> c.allHttpSocketURI.get(0)) - nonInputProperties.systemProperty 'log', followCluster.map(c -> c.getFirstNode().getServerLog()) + useCluster leaderCluster + systemProperty 'tests.target_cluster', 'follow' + nonInputProperties.systemProperty 'java.security.policy', "file://${policyFile}" + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + } + def followInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("follow-cluster") + it.parameters.service = serviceProvider + } + def leaderUri = leaderInfo.map { it.getAllHttpSocketURI().get(0) } + def followerUri = followInfo.map { it.getAllHttpSocketURI().get(0) } + + nonInputProperties.systemProperty 'tests.leader_host', leaderUri + nonInputProperties.systemProperty 'log', followCluster.map(c -> c.getFirstNode().getServerLog()) } tasks.named("check").configure { dependsOn "follow-cluster" } diff --git a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle index 2475a56aa87aa..86abbbbeedf6b 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle @@ -1,6 +1,10 @@ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' @@ -12,6 +16,7 @@ dependencies { testImplementation project(':x-pack:plugin:ccr:qa') } +def clusterPath = getPath() def leaderCluster = testClusters.register('leader-cluster') { testDistribution = 'DEFAULT' setting 'xpack.license.self_generated.type', 'trial' @@ -21,12 +26,23 @@ def leaderCluster = testClusters.register('leader-cluster') { } def middleCluster = testClusters.register('middle-cluster') { - testDistribution = 'DEFAULT' - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.security.enabled', 'true' - user username: 'admin', password: 'admin-password', role: 'superuser' - setting 'cluster.remote.leader_cluster.seeds', - { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE + testDistribution = 'DEFAULT' + setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.security.enabled', 'true' + user username: 'admin', password: 'admin-password', role: 'superuser' + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + } + def leaderUris = leaderInfo.map { it.getAllTransportPortURI() } + setting 'cluster.remote.leader_cluster.seeds', + { "\"${leaderUris.get().join(",")}\"" }, IGNORE_VALUE } tasks.register("leader-cluster", RestIntegTestTask) { @@ -40,30 +56,74 @@ tasks.register("middle-cluster", RestIntegTestTask) { useCluster testClusters.named("leader-cluster") systemProperty 'tests.target_cluster', 'middle' systemProperty 'tests.leader_cluster_repository_path', "${buildDir}/cluster/shared/repo/leader-cluster" - nonInputProperties.systemProperty 'tests.leader_host',leaderCluster.map(c -> c.allHttpSocketURI.get(0)) -} + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + + def leaderUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + nonInputProperties.systemProperty 'tests.leader_host', leaderUri +} tasks.register('follow-cluster', RestIntegTestTask) { dependsOn "leader-cluster", "middle-cluster" - useCluster leaderCluster - useCluster middleCluster - systemProperty 'tests.target_cluster', 'follow' - systemProperty 'tests.leader_cluster_repository_path', "${buildDir}/cluster/shared/repo/leader-cluster" - nonInputProperties.systemProperty 'tests.leader_host', leaderCluster.map(c -> c.allHttpSocketURI.get(0)) - nonInputProperties.systemProperty 'tests.middle_host', middleCluster.map(c -> c.allHttpSocketURI.get(0)) + useCluster leaderCluster + useCluster middleCluster + systemProperty 'tests.target_cluster', 'follow' + systemProperty 'tests.leader_cluster_repository_path', "${buildDir}/cluster/shared/repo/leader-cluster" + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + + def leaderUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + + def middleUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("middle-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + nonInputProperties.systemProperty 'tests.leader_host', leaderUri + nonInputProperties.systemProperty 'tests.middle_host', middleUri } -testClusters.matching {it.name == "follow-cluster" }.configureEach { +testClusters.matching { it.name == "follow-cluster" }.configureEach { testDistribution = 'DEFAULT' setting 'xpack.monitoring.collection.enabled', 'true' setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderUris = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.getAllTransportPortURI() } + + def middleUris = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("middle-cluster") + it.parameters.service = serviceProvider + }.map { it.getAllTransportPortURI() } + setting 'cluster.remote.leader_cluster.seeds', - { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE + { "\"${leaderUris.get().join(",")}\"" }, IGNORE_VALUE setting 'cluster.remote.middle_cluster.seeds', - { "\"${middleCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE + { "\"${middleUris.get().join(",")}\"" }, IGNORE_VALUE } diff --git a/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle b/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle index 7661ea08b057d..ff342accef277 100644 --- a/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle +++ b/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle @@ -1,5 +1,9 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -10,6 +14,8 @@ dependencies { testImplementation project(':x-pack:plugin:ccr:qa:') } +def clusterPath = getPath() + def leaderCluster = testClusters.register('leader-cluster') { testDistribution = 'DEFAULT' setting 'xpack.security.enabled', 'true' @@ -21,8 +27,20 @@ def followerCluster = testClusters.register('follow-cluster') { setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + } + def leaderUris = leaderInfo.map { it.getAllTransportPortURI() } + setting 'cluster.remote.leader_cluster.seeds', - { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE + { "\"${leaderUris.get().join(",")}\"" }, IGNORE_VALUE } tasks.register('leader-cluster', RestIntegTestTask) { @@ -34,7 +52,19 @@ tasks.register('follow-cluster', RestIntegTestTask) { dependsOn 'leader-cluster' useCluster leaderCluster systemProperty 'tests.target_cluster', 'follow' - nonInputProperties.systemProperty 'tests.leader_host', followerCluster.map(c -> c.allHttpSocketURI.get(0)) + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def followInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("follow-cluster") + it.parameters.service = serviceProvider + } + def followUri = followInfo.map { it.allHttpSocketURI.get(0) } + + nonInputProperties.systemProperty 'tests.leader_host', followUri } tasks.named("check").configure { dependsOn "follow-cluster" } diff --git a/x-pack/plugin/ccr/qa/restart/build.gradle b/x-pack/plugin/ccr/qa/restart/build.gradle index 47d37801e2dcf..848beb1da10ae 100644 --- a/x-pack/plugin/ccr/qa/restart/build.gradle +++ b/x-pack/plugin/ccr/qa/restart/build.gradle @@ -1,6 +1,10 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -9,6 +13,8 @@ dependencies { testImplementation project(':x-pack:plugin:ccr:qa') } +def clusterPath = getPath() + def leaderCluster = testClusters.register('leader-cluster') { testDistribution = 'DEFAULT' setting 'xpack.license.self_generated.type', 'trial' @@ -22,12 +28,23 @@ def followCluster = testClusters.register('follow-cluster') { setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + } + def leaderUri = leaderInfo.map { it.getAllTransportPortURI().get(0) } + setting 'cluster.remote.leader_cluster.seeds', - { "\"${leaderCluster.get().getAllTransportPortURI().get(0)}\"" }, IGNORE_VALUE + { "\"${leaderUri.get()}\"" }, IGNORE_VALUE nameCustomization = { 'follow' } } - tasks.register('leader-cluster', RestIntegTestTask) { mustRunAfter("precommit") systemProperty 'tests.target_cluster', 'leader' @@ -37,8 +54,19 @@ tasks.register('follow-cluster', RestIntegTestTask) { dependsOn 'leader-cluster' useCluster leaderCluster systemProperty 'tests.target_cluster', 'follow' + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + nonInputProperties.systemProperty 'tests.leader_host', - "${-> leaderCluster.get().getAllHttpSocketURI().get(0)}" + "${-> leaderUri.get() }" } tasks.register("followClusterRestartTest", StandaloneRestIntegTestTask) { @@ -48,10 +76,27 @@ tasks.register("followClusterRestartTest", StandaloneRestIntegTestTask) { systemProperty 'tests.rest.load_packaged', 'false' systemProperty 'tests.target_cluster', 'follow-restart' + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + + def followUris = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("follow-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.join(",") } + + nonInputProperties.systemProperty 'tests.leader_host', leaderUri + nonInputProperties.systemProperty 'tests.rest.cluster', followUris + doFirst { - followCluster.get().restart() - nonInputProperties.systemProperty 'tests.leader_host', leaderCluster.map(c-> c.getAllHttpSocketURI().get(0)) - nonInputProperties.systemProperty 'tests.rest.cluster', followCluster.map(c -> c.getAllHttpSocketURI().join(",")) + serviceProvider.get().restart(clusterPath, "follow-cluster") } } diff --git a/x-pack/plugin/ccr/qa/security/build.gradle b/x-pack/plugin/ccr/qa/security/build.gradle index 5515aefeaa091..454a9ae721736 100644 --- a/x-pack/plugin/ccr/qa/security/build.gradle +++ b/x-pack/plugin/ccr/qa/security/build.gradle @@ -1,4 +1,9 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' @@ -10,26 +15,38 @@ dependencies { testImplementation project(':x-pack:plugin:ccr:qa') } +def clusterPath = getPath() + def leadCluster = testClusters.register('leader-cluster') { - testDistribution = 'DEFAULT' - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.security.enabled', 'true' - extraConfigFile 'roles.yml', file('leader-roles.yml') - user username: "test_admin", role: "superuser" - user username: "test_ccr", role: "ccruser" + testDistribution = 'DEFAULT' + setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.security.enabled', 'true' + extraConfigFile 'roles.yml', file('leader-roles.yml') + user username: "test_admin", role: "superuser" + user username: "test_ccr", role: "ccruser" } testClusters.register('follow-cluster') { - testDistribution = 'DEFAULT' - setting 'cluster.remote.leader_cluster.seeds', { - "\"${leadCluster.get().getAllTransportPortURI().join(",")}\"" - }, IGNORE_VALUE - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.security.enabled', 'true' - setting 'xpack.monitoring.collection.enabled', 'false' // will be enabled by tests - extraConfigFile 'roles.yml', file('follower-roles.yml') - user username: "test_admin", role: "superuser" - user username: "test_ccr", role: "ccruser" + testDistribution = 'DEFAULT' + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderUris = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.AllTransportPortURI } + + setting 'cluster.remote.leader_cluster.seeds', { + "\"${leaderUris.get().join(",")}\"" + }, IGNORE_VALUE + setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.security.enabled', 'true' + setting 'xpack.monitoring.collection.enabled', 'false' // will be enabled by tests + extraConfigFile 'roles.yml', file('follower-roles.yml') + user username: "test_admin", role: "superuser" + user username: "test_ccr", role: "ccruser" } tasks.register('leader-cluster', RestIntegTestTask) { @@ -41,7 +58,17 @@ def followerClusterTestTask = tasks.register('follow-cluster', RestIntegTestTask dependsOn 'leader-cluster' useCluster leadCluster systemProperty 'tests.target_cluster', 'follow' - nonInputProperties.systemProperty 'tests.leader_host', leadCluster.map(c-> c.getAllHttpSocketURI().get(0)) + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + + nonInputProperties.systemProperty 'tests.leader_host', leaderUri } tasks.named("check").configure { dependsOn(followerClusterTestTask) } diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 6541fcd84afef..201863108a6c8 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -83,7 +83,6 @@ tasks.named("test").configure { } } File functionsFolder = file("build/testrun/test/temp/esql/functions") - File signatureFolder = file("build/testrun/test/temp/esql/functions/signature") File typesFolder = file("build/testrun/test/temp/esql/functions/types") def functionsDocFolder = file("${rootDir}/docs/reference/esql/functions") def effectiveProjectDir = projectDir diff --git a/x-pack/plugin/sql/qa/jdbc/security/build.gradle b/x-pack/plugin/sql/qa/jdbc/security/build.gradle index c446755e91929..82510285cb996 100644 --- a/x-pack/plugin/sql/qa/jdbc/security/build.gradle +++ b/x-pack/plugin/sql/qa/jdbc/security/build.gradle @@ -1,4 +1,8 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils apply plugin: 'elasticsearch.internal-test-artifact' @@ -11,7 +15,10 @@ dependencies { Project mainProject = project + subprojects { + def clusterPath = getPath() + // Use tests from the root security qa project in subprojects configurations.create('testArtifacts').transitive(false) @@ -46,6 +53,17 @@ subprojects { dependsOn copyTestClasses classpath += configurations.testArtifacts testClassesDirs = project.files(testArtifactsDir) + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("javaRestTest") + it.parameters.service = serviceProvider + } + nonInputProperties.systemProperty 'tests.audit.logfile', "${-> testClusters.javaRestTest.singleNode().getAuditLog()}" nonInputProperties.systemProperty 'tests.audit.yesterday.logfile', diff --git a/x-pack/qa/repository-old-versions/build.gradle b/x-pack/qa/repository-old-versions/build.gradle index 78cfc0f688e4a..ecd02ac9d209f 100644 --- a/x-pack/qa/repository-old-versions/build.gradle +++ b/x-pack/qa/repository-old-versions/build.gradle @@ -9,7 +9,6 @@ import org.elasticsearch.gradle.Architecture import org.elasticsearch.gradle.OS import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.BwcVersions -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.AntFixture import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask import org.elasticsearch.gradle.transform.UnzipTransform From 8dd15125b7e709dbe5163dafd1b7af78d660ba53 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Thu, 21 Nov 2024 10:22:43 +0100 Subject: [PATCH 125/386] Bump 8.x version to 8.18.0 (#117234) --- server/src/main/java/org/elasticsearch/Version.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 7791ca200a785..40071b19af5d3 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -190,6 +190,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_16_0 = new Version(8_16_00_99); public static final Version V_8_16_1 = new Version(8_16_01_99); public static final Version V_8_17_0 = new Version(8_17_00_99); + public static final Version V_8_18_0 = new Version(8_18_00_99); public static final Version V_9_0_0 = new Version(9_00_00_99); public static final Version CURRENT = V_9_0_0; From 8e6d5e2ab64264967a45083e2c5acf7936eb6dc8 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Thu, 21 Nov 2024 09:40:57 +0000 Subject: [PATCH 126/386] [CI][ML] Unmute InferenceRestIT (#117158) For #116542 From 2b91e7a7a3c354dfcacaf74fa147a6c3199c353d Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 21 Nov 2024 10:26:04 +0000 Subject: [PATCH 127/386] Remove `ValidateJoinRequest` (#117225) Since 8.3.0 (#85380) we have sent join-validation requests as a `BytesTransportRequest` to facilitate sharing these large messages (and the work needed to create them) amongst all nodes that join the cluster at around the same time. For BwC with versions earlier than 8.3.0 we use a `ValidateJoinRequest` class to represent the received data, whichever scheme it uses. We no longer need to maintain this compatibility, so we can use a bare `BytesTransportRequest` on both sender and receiver, and therefore drop the `ValidateJoinRequest` adapter and the special-cased assertion in `MockTransportService`. Relates #114808 which was reverted in #117200. --- .../cluster/coordination/Coordinator.java | 1 + .../coordination/JoinValidationService.java | 21 ++++- .../coordination/ValidateJoinRequest.java | 92 ------------------- .../transport/BytesTransportRequest.java | 3 +- .../JoinValidationServiceTests.java | 64 +++++++++++-- .../test/transport/MockTransportService.java | 20 +--- 6 files changed, 78 insertions(+), 123 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index 8a4464f194fc5..35b6eb1852237 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -246,6 +246,7 @@ public Coordinator( this.joinValidationService = new JoinValidationService( settings, transportService, + namedWriteableRegistry, this::getStateForJoinValidationService, () -> getLastAcceptedState().metadata(), this.onJoinValidators diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java index 7de7fd4d92d1b..9d5d74fa24648 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java @@ -21,6 +21,8 @@ import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -106,6 +108,7 @@ public class JoinValidationService { public JoinValidationService( Settings settings, TransportService transportService, + NamedWriteableRegistry namedWriteableRegistry, Supplier clusterStateSupplier, Supplier metadataSupplier, Collection> joinValidators @@ -120,9 +123,9 @@ public JoinValidationService( transportService.registerRequestHandler( JoinValidationService.JOIN_VALIDATE_ACTION_NAME, this.responseExecutor, - ValidateJoinRequest::new, + BytesTransportRequest::new, (request, channel, task) -> { - final var remoteState = request.getOrReadState(); + final var remoteState = readClusterState(namedWriteableRegistry, request); final var remoteMetadata = remoteState.metadata(); final var localMetadata = metadataSupplier.get(); if (localMetadata.clusterUUIDCommitted() && localMetadata.clusterUUID().equals(remoteMetadata.clusterUUID()) == false) { @@ -145,6 +148,20 @@ public JoinValidationService( ); } + private static ClusterState readClusterState(NamedWriteableRegistry namedWriteableRegistry, BytesTransportRequest request) + throws IOException { + try ( + var bytesStreamInput = request.bytes().streamInput(); + var in = new NamedWriteableAwareStreamInput( + CompressorFactory.COMPRESSOR.threadLocalStreamInput(bytesStreamInput), + namedWriteableRegistry + ) + ) { + in.setTransportVersion(request.version()); + return ClusterState.readFrom(in, null); + } + } + public void validateJoin(DiscoveryNode discoveryNode, ActionListener listener) { // This node isn't in the cluster yet so ClusterState#getMinTransportVersion() doesn't apply, we must obtain a specific connection // so we can check its transport version to decide how to proceed. diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java deleted file mode 100644 index c81e4877196b3..0000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ -package org.elasticsearch.cluster.coordination; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.common.CheckedSupplier; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressorFactory; -import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.RefCounted; -import org.elasticsearch.transport.TransportRequest; - -import java.io.IOException; - -public class ValidateJoinRequest extends TransportRequest { - private final CheckedSupplier stateSupplier; - private final RefCounted refCounted; - - public ValidateJoinRequest(StreamInput in) throws IOException { - super(in); - // recent versions send a BytesTransportRequest containing a compressed representation of the state - final var bytes = in.readReleasableBytesReference(); - final var version = in.getTransportVersion(); - final var namedWriteableRegistry = in.namedWriteableRegistry(); - this.stateSupplier = () -> readCompressed(version, bytes, namedWriteableRegistry); - this.refCounted = bytes; - } - - private static ClusterState readCompressed( - TransportVersion version, - BytesReference bytes, - NamedWriteableRegistry namedWriteableRegistry - ) throws IOException { - try ( - var bytesStreamInput = bytes.streamInput(); - var in = new NamedWriteableAwareStreamInput( - CompressorFactory.COMPRESSOR.threadLocalStreamInput(bytesStreamInput), - namedWriteableRegistry - ) - ) { - in.setTransportVersion(version); - return ClusterState.readFrom(in, null); - } - } - - public ValidateJoinRequest(ClusterState state) { - this.stateSupplier = () -> state; - this.refCounted = null; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - stateSupplier.get().writeTo(out); - } - - public ClusterState getOrReadState() throws IOException { - return stateSupplier.get(); - } - - @Override - public void incRef() { - if (refCounted != null) { - refCounted.incRef(); - } - } - - @Override - public boolean tryIncRef() { - return refCounted == null || refCounted.tryIncRef(); - } - - @Override - public boolean decRef() { - return refCounted != null && refCounted.decRef(); - } - - @Override - public boolean hasReferences() { - return refCounted == null || refCounted.hasReferences(); - } -} diff --git a/server/src/main/java/org/elasticsearch/transport/BytesTransportRequest.java b/server/src/main/java/org/elasticsearch/transport/BytesTransportRequest.java index 7bf172388eccd..0db3de9abdb7b 100644 --- a/server/src/main/java/org/elasticsearch/transport/BytesTransportRequest.java +++ b/server/src/main/java/org/elasticsearch/transport/BytesTransportRequest.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.RefCounted; import java.io.IOException; @@ -22,7 +21,7 @@ * A specialized, bytes only request, that can potentially be optimized on the network * layer, specifically for the same large buffer send to several nodes. */ -public class BytesTransportRequest extends TransportRequest implements RefCounted { +public class BytesTransportRequest extends TransportRequest { final ReleasableBytesReference bytes; private final TransportVersion version; diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java index 4518bd655346a..226f5dbf3b2ff 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java @@ -22,10 +22,14 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.component.Lifecycle; +import org.elasticsearch.common.compress.CompressorFactory; +import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistryTests; +import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -38,6 +42,7 @@ import org.elasticsearch.test.transport.MockTransport; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.BytesTransportRequest; import org.elasticsearch.transport.CloseableConnection; import org.elasticsearch.transport.RemoteTransportException; import org.elasticsearch.transport.TestTransportChannel; @@ -49,6 +54,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.ToXContent; +import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; @@ -155,6 +161,7 @@ public void doRun() { final var joinValidationService = new JoinValidationService( settings, transportService, + writableRegistry(), () -> usually() ? clusterState : null, clusterState::metadata, List.of() @@ -286,7 +293,14 @@ public void writeTo(StreamOutput out) {} ); // registers request handler - new JoinValidationService(Settings.EMPTY, joiningNodeTransportService, () -> clusterState, clusterState::metadata, List.of()); + new JoinValidationService( + Settings.EMPTY, + joiningNodeTransportService, + writableRegistry(), + () -> clusterState, + clusterState::metadata, + List.of() + ); joiningNodeTransportService.start(); joiningNodeTransportService.acceptIncomingRequests(); @@ -325,6 +339,7 @@ protected void onSendRequest(long requestId, String action, TransportRequest req final var joinValidationService = new JoinValidationService( Settings.EMPTY, masterTransportService, + writableRegistry(), () -> clusterState, clusterState::metadata, List.of() @@ -349,7 +364,7 @@ protected void onSendRequest(long requestId, String action, TransportRequest req } } - public void testJoinValidationRejectsMismatchedClusterUUID() { + public void testJoinValidationRejectsMismatchedClusterUUID() throws IOException { final var deterministicTaskQueue = new DeterministicTaskQueue(); final var mockTransport = new MockTransport(); final var localNode = DiscoveryNodeUtils.create("node0"); @@ -371,7 +386,14 @@ public void testJoinValidationRejectsMismatchedClusterUUID() { final var settings = Settings.builder().put(Environment.PATH_DATA_SETTING.getKey(), dataPath).build(); // registers request handler - new JoinValidationService(settings, transportService, () -> localClusterState, localClusterState::metadata, List.of()); + new JoinValidationService( + settings, + transportService, + writableRegistry(), + () -> localClusterState, + localClusterState::metadata, + List.of() + ); transportService.start(); transportService.acceptIncomingRequests(); @@ -384,7 +406,7 @@ public void testJoinValidationRejectsMismatchedClusterUUID() { transportService.sendRequest( localNode, JoinValidationService.JOIN_VALIDATE_ACTION_NAME, - new ValidateJoinRequest(otherClusterState), + serializeClusterState(otherClusterState), new ActionListenerResponseHandler<>(future, in -> TransportResponse.Empty.INSTANCE, TransportResponseHandler.TRANSPORT_WORKER) ); deterministicTaskQueue.runAllTasks(); @@ -401,6 +423,22 @@ public void testJoinValidationRejectsMismatchedClusterUUID() { ); } + private static BytesTransportRequest serializeClusterState(ClusterState clusterState) { + try ( + var bytesStream = new BytesStreamOutput(); + var compressedStream = new OutputStreamStreamOutput( + CompressorFactory.COMPRESSOR.threadLocalOutputStream(Streams.flushOnCloseStream(bytesStream)) + ) + ) { + compressedStream.setTransportVersion(TransportVersion.current()); + clusterState.writeTo(compressedStream); + compressedStream.flush(); + return new BytesTransportRequest(ReleasableBytesReference.wrap(bytesStream.bytes()), TransportVersion.current()); + } catch (Exception e) { + throw new AssertionError(e); + } + } + public void testJoinValidationRunsJoinValidators() { final var deterministicTaskQueue = new DeterministicTaskQueue(); final var mockTransport = new MockTransport(); @@ -420,11 +458,12 @@ public void testJoinValidationRunsJoinValidators() { new JoinValidationService( Settings.EMPTY, transportService, + writableRegistry(), () -> localClusterState, localClusterState::metadata, List.of((node, state) -> { assertSame(node, localNode); - assertSame(state, stateForValidation); + assertEquals(state.stateUUID(), stateForValidation.stateUUID()); throw new IllegalStateException("simulated validation failure"); }) ); // registers request handler @@ -435,7 +474,7 @@ public void testJoinValidationRunsJoinValidators() { transportService.sendRequest( localNode, JoinValidationService.JOIN_VALIDATE_ACTION_NAME, - new ValidateJoinRequest(stateForValidation), + serializeClusterState(stateForValidation), new ActionListenerResponseHandler<>(future, in -> TransportResponse.Empty.INSTANCE, TransportResponseHandler.TRANSPORT_WORKER) ); deterministicTaskQueue.runAllTasks(); @@ -467,9 +506,16 @@ protected void onSendRequest(long requestId, String action, TransportRequest req null, Collections.emptySet() ); - final var joinValidationService = new JoinValidationService(Settings.EMPTY, masterTransportService, () -> null, () -> { - throw new AssertionError("should not be called"); - }, List.of()); + final var joinValidationService = new JoinValidationService( + Settings.EMPTY, + masterTransportService, + writableRegistry(), + () -> null, + () -> { + throw new AssertionError("should not be called"); + }, + List.of() + ); masterTransportService.start(); masterTransportService.acceptIncomingRequests(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index fd376fcd07688..18c591166e720 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -39,7 +39,6 @@ import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; @@ -50,7 +49,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.tasks.MockTaskManager; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.BytesTransportRequest; import org.elasticsearch.transport.ClusterConnectionManager; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.ConnectionProfile; @@ -586,13 +584,8 @@ public void sendRequest( // poor mans request cloning... BytesStreamOutput bStream = new BytesStreamOutput(); request.writeTo(bStream); - final TransportRequest clonedRequest; - if (request instanceof BytesTransportRequest) { - clonedRequest = copyRawBytesForBwC(bStream); - } else { - RequestHandlerRegistry reg = MockTransportService.this.getRequestHandler(action); - clonedRequest = reg.newRequest(bStream.bytes().streamInput()); - } + RequestHandlerRegistry reg = MockTransportService.this.getRequestHandler(action); + final TransportRequest clonedRequest = reg.newRequest(bStream.bytes().streamInput()); assert clonedRequest.getClass().equals(MasterNodeRequestHelper.unwrapTermOverride(request).getClass()) : clonedRequest + " vs " + request; @@ -640,15 +633,6 @@ protected void doRun() throws IOException { } } - // Some request handlers read back a BytesTransportRequest - // into a different class that cannot be re-serialized (i.e. JOIN_VALIDATE_ACTION_NAME), - // in those cases we just copy the raw bytes back to a BytesTransportRequest. - // This is only needed for the BwC for JOIN_VALIDATE_ACTION_NAME and can be removed in the next major - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) - private static TransportRequest copyRawBytesForBwC(BytesStreamOutput bStream) throws IOException { - return new BytesTransportRequest(bStream.bytes().streamInput()); - } - @Override public void clearCallback() { synchronized (this) { From 0e945127edcf45110358be4ec0cfc1462486a553 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Thu, 21 Nov 2024 02:54:19 -0800 Subject: [PATCH 128/386] Policy manager for entitlements (#116695) --- .../bootstrap/EntitlementBootstrap.java | 25 +++- .../EntitlementInitialization.java | 82 ++++++++++++- .../api/ElasticsearchEntitlementChecker.java | 74 +---------- .../runtime/policy/FlagEntitlementType.java | 14 +++ .../runtime/policy/PolicyManager.java | 116 ++++++++++++++++++ .../bootstrap/Elasticsearch.java | 21 +++- .../elasticsearch/plugins/PluginsUtils.java | 4 +- 7 files changed, 260 insertions(+), 76 deletions(-) create mode 100644 libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FlagEntitlementType.java create mode 100644 libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java index 7f68457baea9e..01b8f4d574f90 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java @@ -15,6 +15,7 @@ import com.sun.tools.attach.VirtualMachine; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.Tuple; import org.elasticsearch.entitlement.initialization.EntitlementInitialization; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -22,15 +23,33 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Collection; +import java.util.Objects; +import java.util.function.Function; public class EntitlementBootstrap { + public record BootstrapArgs(Collection> pluginData, Function, String> pluginResolver) {} + + private static BootstrapArgs bootstrapArgs; + + public static BootstrapArgs bootstrapArgs() { + return bootstrapArgs; + } + /** - * Activates entitlement checking. Once this method returns, calls to forbidden methods - * will throw {@link org.elasticsearch.entitlement.runtime.api.NotEntitledException}. + * Activates entitlement checking. Once this method returns, calls to methods protected by Entitlements from classes without a valid + * policy will throw {@link org.elasticsearch.entitlement.runtime.api.NotEntitledException}. + * @param pluginData a collection of (plugin path, boolean), that holds the paths of all the installed Elasticsearch modules and + * plugins, and whether they are Java modular or not. + * @param pluginResolver a functor to map a Java Class to the plugin it belongs to (the plugin name). */ - public static void bootstrap() { + public static void bootstrap(Collection> pluginData, Function, String> pluginResolver) { logger.debug("Loading entitlement agent"); + if (EntitlementBootstrap.bootstrapArgs != null) { + throw new IllegalStateException("plugin data is already set"); + } + EntitlementBootstrap.bootstrapArgs = new BootstrapArgs(Objects.requireNonNull(pluginData), Objects.requireNonNull(pluginResolver)); exportInitializationToAgent(); loadAgent(findAgentJar()); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index 30c6045d1ccef..6d31abe4cf054 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -9,19 +9,36 @@ package org.elasticsearch.entitlement.initialization; +import org.elasticsearch.core.Tuple; import org.elasticsearch.core.internal.provider.ProviderLocator; +import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; import org.elasticsearch.entitlement.bridge.EntitlementChecker; import org.elasticsearch.entitlement.instrumentation.CheckerMethod; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.elasticsearch.entitlement.instrumentation.Transformer; import org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementChecker; +import org.elasticsearch.entitlement.runtime.policy.Policy; +import org.elasticsearch.entitlement.runtime.policy.PolicyManager; +import org.elasticsearch.entitlement.runtime.policy.PolicyParser; +import org.elasticsearch.entitlement.runtime.policy.Scope; +import java.io.IOException; import java.lang.instrument.Instrumentation; +import java.lang.module.ModuleFinder; +import java.lang.module.ModuleReference; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; +import static org.elasticsearch.entitlement.runtime.policy.PolicyManager.ALL_UNNAMED; + /** * Called by the agent during {@code agentmain} to configure the entitlement system, * instantiate and configure an {@link EntitlementChecker}, @@ -30,6 +47,9 @@ * to begin injecting our instrumentation. */ public class EntitlementInitialization { + + private static final String POLICY_FILE_NAME = "entitlement-policy.yaml"; + private static ElasticsearchEntitlementChecker manager; // Note: referenced by bridge reflectively @@ -39,7 +59,7 @@ public static EntitlementChecker checker() { // Note: referenced by agent reflectively public static void initialize(Instrumentation inst) throws Exception { - manager = new ElasticsearchEntitlementChecker(); + manager = new ElasticsearchEntitlementChecker(createPolicyManager()); Map methodMap = INSTRUMENTER_FACTORY.lookupMethodsToInstrument( "org.elasticsearch.entitlement.bridge.EntitlementChecker" @@ -61,6 +81,66 @@ private static Class internalNameToClass(String internalName) { } } + private static PolicyManager createPolicyManager() throws IOException { + Map pluginPolicies = createPluginPolicies(EntitlementBootstrap.bootstrapArgs().pluginData()); + + // TODO: What should the name be? + // TODO(ES-10031): Decide what goes in the elasticsearch default policy and extend it + var serverPolicy = new Policy("server", List.of()); + return new PolicyManager(serverPolicy, pluginPolicies, EntitlementBootstrap.bootstrapArgs().pluginResolver()); + } + + private static Map createPluginPolicies(Collection> pluginData) throws IOException { + Map pluginPolicies = new HashMap<>(pluginData.size()); + for (Tuple entry : pluginData) { + Path pluginRoot = entry.v1(); + boolean isModular = entry.v2(); + + String pluginName = pluginRoot.getFileName().toString(); + final Policy policy = loadPluginPolicy(pluginRoot, isModular, pluginName); + + pluginPolicies.put(pluginName, policy); + } + return pluginPolicies; + } + + private static Policy loadPluginPolicy(Path pluginRoot, boolean isModular, String pluginName) throws IOException { + Path policyFile = pluginRoot.resolve(POLICY_FILE_NAME); + + final Set moduleNames = getModuleNames(pluginRoot, isModular); + final Policy policy = parsePolicyIfExists(pluginName, policyFile); + + // TODO: should this check actually be part of the parser? + for (Scope scope : policy.scopes) { + if (moduleNames.contains(scope.name) == false) { + throw new IllegalStateException("policy [" + policyFile + "] contains invalid module [" + scope.name + "]"); + } + } + return policy; + } + + private static Policy parsePolicyIfExists(String pluginName, Path policyFile) throws IOException { + if (Files.exists(policyFile)) { + return new PolicyParser(Files.newInputStream(policyFile, StandardOpenOption.READ), pluginName).parsePolicy(); + } + return new Policy(pluginName, List.of()); + } + + private static Set getModuleNames(Path pluginRoot, boolean isModular) { + if (isModular) { + ModuleFinder moduleFinder = ModuleFinder.of(pluginRoot); + Set moduleReferences = moduleFinder.findAll(); + + return moduleReferences.stream().map(mr -> mr.descriptor().name()).collect(Collectors.toUnmodifiableSet()); + } + // When isModular == false we use the same "ALL-UNNAMED" constant as the JDK to indicate (any) unnamed module for this plugin + return Set.of(ALL_UNNAMED); + } + + private static String internalName(Class c) { + return c.getName().replace('.', '/'); + } + private static final InstrumentationService INSTRUMENTER_FACTORY = new ProviderLocator<>( "entitlement", InstrumentationService.class, diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index 6324dbf73ee05..790416ca5659a 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -10,14 +10,8 @@ package org.elasticsearch.entitlement.runtime.api; import org.elasticsearch.entitlement.bridge.EntitlementChecker; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; - -import java.lang.module.ModuleFinder; -import java.lang.module.ModuleReference; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; +import org.elasticsearch.entitlement.runtime.policy.FlagEntitlementType; +import org.elasticsearch.entitlement.runtime.policy.PolicyManager; /** * Implementation of the {@link EntitlementChecker} interface, providing additional @@ -25,70 +19,14 @@ * The trampoline module loads this object via SPI. */ public class ElasticsearchEntitlementChecker implements EntitlementChecker { - private static final Logger logger = LogManager.getLogger(ElasticsearchEntitlementChecker.class); - - private static final Set systemModules = findSystemModules(); - - private static Set findSystemModules() { - var systemModulesDescriptors = ModuleFinder.ofSystem() - .findAll() - .stream() - .map(ModuleReference::descriptor) - .collect(Collectors.toUnmodifiableSet()); + private final PolicyManager policyManager; - return ModuleLayer.boot() - .modules() - .stream() - .filter(m -> systemModulesDescriptors.contains(m.getDescriptor())) - .collect(Collectors.toUnmodifiableSet()); + public ElasticsearchEntitlementChecker(PolicyManager policyManager) { + this.policyManager = policyManager; } @Override public void check$java_lang_System$exit(Class callerClass, int status) { - var requestingModule = requestingModule(callerClass); - if (isTriviallyAllowed(requestingModule)) { - return; - } - - // TODO: this will be checked using policies - if (requestingModule.isNamed() && requestingModule.getName().equals("org.elasticsearch.server")) { - logger.debug("Allowed: caller in {} is entitled to exit the JVM", requestingModule.getName()); - return; - } - - // Hard-forbidden until we develop the permission granting scheme - throw new NotEntitledException("Missing entitlement for " + requestingModule); - } - - private static Module requestingModule(Class callerClass) { - if (callerClass != null) { - Module callerModule = callerClass.getModule(); - if (systemModules.contains(callerModule) == false) { - // fast path - return callerModule; - } - } - int framesToSkip = 1 // getCallingClass (this method) - + 1 // the checkXxx method - + 1 // the runtime config method - + 1 // the instrumented method - ; - Optional module = StackWalker.getInstance(StackWalker.Option.RETAIN_CLASS_REFERENCE) - .walk( - s -> s.skip(framesToSkip) - .map(f -> f.getDeclaringClass().getModule()) - .filter(m -> systemModules.contains(m) == false) - .findFirst() - ); - return module.orElse(null); - } - - private static boolean isTriviallyAllowed(Module requestingModule) { - if (requestingModule == null) { - logger.debug("Trivially allowed: entire call stack is in composed of classes in system modules"); - return true; - } - logger.trace("Not trivially allowed"); - return false; + policyManager.checkFlagEntitlement(callerClass, FlagEntitlementType.SYSTEM_EXIT); } } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FlagEntitlementType.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FlagEntitlementType.java new file mode 100644 index 0000000000000..60490baf41a10 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FlagEntitlementType.java @@ -0,0 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +public enum FlagEntitlementType { + SYSTEM_EXIT; +} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java new file mode 100644 index 0000000000000..c06dc09758de5 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import org.elasticsearch.core.Strings; +import org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementChecker; +import org.elasticsearch.entitlement.runtime.api.NotEntitledException; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; + +import java.lang.module.ModuleFinder; +import java.lang.module.ModuleReference; +import java.util.Collections; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; + +public class PolicyManager { + private static final Logger logger = LogManager.getLogger(ElasticsearchEntitlementChecker.class); + + protected final Policy serverPolicy; + protected final Map pluginPolicies; + private final Function, String> pluginResolver; + + public static final String ALL_UNNAMED = "ALL-UNNAMED"; + + private static final Set systemModules = findSystemModules(); + + private static Set findSystemModules() { + var systemModulesDescriptors = ModuleFinder.ofSystem() + .findAll() + .stream() + .map(ModuleReference::descriptor) + .collect(Collectors.toUnmodifiableSet()); + + return ModuleLayer.boot() + .modules() + .stream() + .filter(m -> systemModulesDescriptors.contains(m.getDescriptor())) + .collect(Collectors.toUnmodifiableSet()); + } + + public PolicyManager(Policy defaultPolicy, Map pluginPolicies, Function, String> pluginResolver) { + this.serverPolicy = Objects.requireNonNull(defaultPolicy); + this.pluginPolicies = Collections.unmodifiableMap(Objects.requireNonNull(pluginPolicies)); + this.pluginResolver = pluginResolver; + } + + public void checkFlagEntitlement(Class callerClass, FlagEntitlementType type) { + var requestingModule = requestingModule(callerClass); + if (isTriviallyAllowed(requestingModule)) { + return; + } + + // TODO: real policy check. For now, we only allow our hardcoded System.exit policy for server. + // TODO: this will be checked using policies + if (requestingModule.isNamed() + && requestingModule.getName().equals("org.elasticsearch.server") + && type == FlagEntitlementType.SYSTEM_EXIT) { + logger.debug("Allowed: caller [{}] in module [{}] has entitlement [{}]", callerClass, requestingModule.getName(), type); + return; + } + + // TODO: plugins policy check using pluginResolver and pluginPolicies + throw new NotEntitledException( + Strings.format("Missing entitlement [%s] for caller [%s] in module [%s]", type, callerClass, requestingModule.getName()) + ); + } + + private static Module requestingModule(Class callerClass) { + if (callerClass != null) { + Module callerModule = callerClass.getModule(); + if (systemModules.contains(callerModule) == false) { + // fast path + return callerModule; + } + } + int framesToSkip = 1 // getCallingClass (this method) + + 1 // the checkXxx method + + 1 // the runtime config method + + 1 // the instrumented method + ; + Optional module = StackWalker.getInstance(StackWalker.Option.RETAIN_CLASS_REFERENCE) + .walk( + s -> s.skip(framesToSkip) + .map(f -> f.getDeclaringClass().getModule()) + .filter(m -> systemModules.contains(m) == false) + .findFirst() + ); + return module.orElse(null); + } + + private static boolean isTriviallyAllowed(Module requestingModule) { + if (requestingModule == null) { + logger.debug("Trivially allowed: entire call stack is in composed of classes in system modules"); + return true; + } + logger.trace("Not trivially allowed"); + return false; + } + + @Override + public String toString() { + return "PolicyManager{" + "serverPolicy=" + serverPolicy + ", pluginPolicies=" + pluginPolicies + '}'; + } +} diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 77875e65ab9b8..95e5b00a2805f 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -30,6 +30,7 @@ import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.Tuple; import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexVersion; @@ -41,7 +42,9 @@ import org.elasticsearch.nativeaccess.NativeAccess; import org.elasticsearch.node.Node; import org.elasticsearch.node.NodeValidationException; +import org.elasticsearch.plugins.PluginBundle; import org.elasticsearch.plugins.PluginsLoader; +import org.elasticsearch.plugins.PluginsUtils; import java.io.IOException; import java.io.InputStream; @@ -51,8 +54,10 @@ import java.nio.file.Path; import java.security.Permission; import java.security.Security; +import java.util.ArrayList; import java.util.List; import java.util.Objects; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -201,11 +206,23 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { ); // load the plugin Java modules and layers now for use in entitlements - bootstrap.setPluginsLoader(new PluginsLoader(nodeEnv.modulesFile(), nodeEnv.pluginsFile())); + var pluginsLoader = new PluginsLoader(nodeEnv.modulesFile(), nodeEnv.pluginsFile()); + bootstrap.setPluginsLoader(pluginsLoader); if (Boolean.parseBoolean(System.getProperty("es.entitlements.enabled"))) { logger.info("Bootstrapping Entitlements"); - EntitlementBootstrap.bootstrap(); + + List> pluginData = new ArrayList<>(); + Set moduleBundles = PluginsUtils.getModuleBundles(nodeEnv.modulesFile()); + for (PluginBundle moduleBundle : moduleBundles) { + pluginData.add(Tuple.tuple(moduleBundle.getDir(), moduleBundle.pluginDescriptor().isModular())); + } + Set pluginBundles = PluginsUtils.getPluginBundles(nodeEnv.pluginsFile()); + for (PluginBundle pluginBundle : pluginBundles) { + pluginData.add(Tuple.tuple(pluginBundle.getDir(), pluginBundle.pluginDescriptor().isModular())); + } + // TODO: add a functor to map module to plugin name + EntitlementBootstrap.bootstrap(pluginData, callerClass -> null); } else { // install SM after natives, shutdown hooks, etc. logger.info("Bootstrapping java SecurityManager"); diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsUtils.java b/server/src/main/java/org/elasticsearch/plugins/PluginsUtils.java index 44fb531f8610e..155cff57a0ebf 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsUtils.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsUtils.java @@ -210,12 +210,12 @@ public static void checkForFailedPluginRemovals(final Path pluginsDirectory) thr } /** Get bundles for plugins installed in the given modules directory. */ - static Set getModuleBundles(Path modulesDirectory) throws IOException { + public static Set getModuleBundles(Path modulesDirectory) throws IOException { return findBundles(modulesDirectory, "module"); } /** Get bundles for plugins installed in the given plugins directory. */ - static Set getPluginBundles(final Path pluginsDirectory) throws IOException { + public static Set getPluginBundles(final Path pluginsDirectory) throws IOException { return findBundles(pluginsDirectory, "plugin"); } From 7c18f1108d7b16b96c5b06854c1123476401ce02 Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Thu, 21 Nov 2024 13:25:43 +0200 Subject: [PATCH 129/386] Adding missing json spec for allow_partial_search_results in point-in-time (#117121) --- .../main/resources/rest-api-spec/api/open_point_in_time.json | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/open_point_in_time.json b/rest-api-spec/src/main/resources/rest-api-spec/api/open_point_in_time.json index bce8dfd794dca..6f3d09c15c081 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/open_point_in_time.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/open_point_in_time.json @@ -55,6 +55,10 @@ "type": "string", "description": "Specific the time to live for the point in time", "required": true + }, + "allow_partial_search_results": { + "type": "boolean", + "description": "Specify whether to tolerate shards missing when creating the point-in-time, or otherwise throw an exception. (default: false)" } }, "body":{ From 49a1bb83d1815a0ae90e6e9b3a606f5e1e4fa748 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Thu, 21 Nov 2024 13:36:16 +0200 Subject: [PATCH 130/386] Scale down randomized esql tests with logsdb (#117228) --- .../qa/StandardVersusLogsIndexModeChallengeRestIT.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java index 8d7a813b206d8..8930ff23fb3b0 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java @@ -271,7 +271,7 @@ public void testDateHistogramAggregation() throws IOException { } public void testEsqlSource() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(100, 200); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); @@ -287,7 +287,7 @@ public void testEsqlSource() throws IOException { } public void testEsqlTermsAggregation() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(100, 200); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); @@ -302,7 +302,7 @@ public void testEsqlTermsAggregation() throws IOException { } public void testEsqlTermsAggregationByMethod() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(100, 200); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); From 75f423227466ea73e8ffaa71e0beeb08b2ffe2ed Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Thu, 21 Nov 2024 12:44:17 +0100 Subject: [PATCH 131/386] Check the real memory circuit breaker when building internal aggregations (#117019) checks periodically the real memory circuit breaker when allocating objects. --- .../adjacency/AdjacencyMatrixAggregator.java | 24 +++---- .../timeseries/TimeSeriesAggregator.java | 7 +- .../search/aggregations/AggregatorBase.java | 29 ++++++++ .../aggregations/NonCollectingAggregator.java | 6 +- .../bucket/BucketsAggregator.java | 71 ++++++++----------- .../countedterms/CountedTermsAggregator.java | 10 +-- .../bucket/geogrid/GeoGridAggregator.java | 10 +-- .../bucket/prefix/IpPrefixAggregator.java | 21 +++--- .../GlobalOrdinalsStringTermsAggregator.java | 20 +++--- .../bucket/terms/LongRareTermsAggregator.java | 18 ++--- .../terms/MapStringTermsAggregator.java | 11 +-- .../bucket/terms/NumericTermsAggregator.java | 10 +-- .../terms/StringRareTermsAggregator.java | 18 ++--- .../metrics/MetricsAggregator.java | 6 +- .../multiterms/MultiTermsAggregator.java | 10 +-- .../CategorizeTextAggregator.java | 13 ++-- 16 files changed, 138 insertions(+), 146 deletions(-) diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java index 29e8aec00a02d..203105edc5a24 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java @@ -188,17 +188,16 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw } } try (LongArray bucketOrdsToBuild = bigArrays().newLongArray(totalBucketsToBuild)) { - int builtBucketIndex = 0; + int[] builtBucketIndex = new int[] { 0 }; for (int ord = 0; ord < maxOrd; ord++) { if (bucketDocCount(ord) > 0) { - bucketOrdsToBuild.set(builtBucketIndex++, ord); + bucketOrdsToBuild.set(builtBucketIndex[0]++, ord); } } - assert builtBucketIndex == totalBucketsToBuild; - builtBucketIndex = 0; + assert builtBucketIndex[0] == totalBucketsToBuild; + builtBucketIndex[0] = 0; var bucketSubAggs = buildSubAggsForBuckets(bucketOrdsToBuild); - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - for (int owningBucketOrdIdx = 0; owningBucketOrdIdx < results.length; owningBucketOrdIdx++) { + InternalAggregation[] aggregations = buildAggregations(Math.toIntExact(owningBucketOrds.size()), owningBucketOrdIdx -> { List buckets = new ArrayList<>(filters.length); for (int i = 0; i < keys.length; i++) { long bucketOrd = bucketOrd(owningBucketOrds.get(owningBucketOrdIdx), i); @@ -207,10 +206,11 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw // a date-histogram where we will look for transactions over time and can expect many // empty buckets. if (docCount > 0) { + checkRealMemoryCBForInternalBucket(); InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket( keys[i], docCount, - bucketSubAggs.apply(builtBucketIndex++) + bucketSubAggs.apply(builtBucketIndex[0]++) ); buckets.add(bucket); } @@ -226,17 +226,17 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket( intersectKey, docCount, - bucketSubAggs.apply(builtBucketIndex++) + bucketSubAggs.apply(builtBucketIndex[0]++) ); buckets.add(bucket); } pos++; } } - results[owningBucketOrdIdx] = new InternalAdjacencyMatrix(name, buckets, metadata()); - } - assert builtBucketIndex == totalBucketsToBuild; - return results; + return new InternalAdjacencyMatrix(name, buckets, metadata()); + }); + assert builtBucketIndex[0] == totalBucketsToBuild; + return aggregations; } } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java index 1263d4282a18a..369ae4590fe97 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java @@ -79,6 +79,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw while (ordsEnum.next()) { long docCount = bucketDocCount(ordsEnum.ord()); ordsEnum.readValue(spare); + checkRealMemoryCBForInternalBucket(); InternalTimeSeries.InternalBucket bucket = new InternalTimeSeries.InternalBucket( BytesRef.deepCopyOf(spare), // Closing bucketOrds will corrupt the bytes ref, so need to make a deep copy here. docCount, @@ -101,11 +102,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw } buildSubAggsForAllBuckets(allBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); - InternalAggregation[] result = new InternalAggregation[Math.toIntExact(allBucketsPerOrd.size())]; - for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { - result[ordIdx] = buildResult(allBucketsPerOrd.get(ordIdx)); - } - return result; + return buildAggregations(Math.toIntExact(allBucketsPerOrd.size()), ordIdx -> buildResult(allBucketsPerOrd.get(ordIdx))); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java index 1ea7769b33384..11444edca080d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java @@ -13,6 +13,8 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; +import org.elasticsearch.common.CheckedIntFunction; +import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.Maps; @@ -48,6 +50,8 @@ public abstract class AggregatorBase extends Aggregator { private Map subAggregatorbyName; private long requestBytesUsed; + private final CircuitBreaker breaker; + private int callCount; /** * Constructs a new Aggregator. @@ -72,6 +76,7 @@ protected AggregatorBase( this.metadata = metadata; this.parent = parent; this.context = context; + this.breaker = context.breaker(); assert factories != null : "sub-factories provided to BucketAggregator must not be null, use AggragatorFactories.EMPTY instead"; this.subAggregators = factories.createSubAggregators(this, subAggregatorCardinality); context.addReleasable(this); @@ -327,6 +332,30 @@ protected final InternalAggregations buildEmptySubAggregations() { return InternalAggregations.from(aggs); } + /** + * Builds the aggregations array with the provided size and populates it using the provided function. + */ + protected final InternalAggregation[] buildAggregations(int size, CheckedIntFunction aggFunction) + throws IOException { + final InternalAggregation[] results = new InternalAggregation[size]; + for (int i = 0; i < results.length; i++) { + checkRealMemoryCB("internal_aggregation"); + results[i] = aggFunction.apply(i); + } + return results; + } + + /** + * This method calls the circuit breaker from time to time in order to give it a chance to check available + * memory in the parent breaker (Which should be a real memory breaker) and break the execution if we are running out. + * To achieve that, we are passing 0 as the estimated bytes every 1024 calls + */ + protected final void checkRealMemoryCB(String label) { + if ((++callCount & 0x3FF) == 0) { + breaker.addEstimateBytesAndMaybeBreak(0, label); + } + } + @Override public String toString() { return name; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/NonCollectingAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/NonCollectingAggregator.java index 4da2d10cfc0c2..a32211fd4d8fb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/NonCollectingAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/NonCollectingAggregator.java @@ -41,10 +41,6 @@ public final LeafBucketCollector getLeafCollector(AggregationExecutionContext ag @Override public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { - results[ordIdx] = buildEmptyAggregation(); - } - return results; + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> buildEmptyAggregation()); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java index 252eb0877d024..ea667b821a7dd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.aggregations.bucket; import org.apache.lucene.index.LeafReaderContext; -import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.util.IntArray; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.ObjectArray; @@ -42,10 +41,9 @@ import java.util.function.ToLongFunction; public abstract class BucketsAggregator extends AggregatorBase { - private final CircuitBreaker breaker; + private LongArray docCounts; protected final DocCountProvider docCountProvider; - private int callCount; @SuppressWarnings("this-escape") public BucketsAggregator( @@ -57,7 +55,6 @@ public BucketsAggregator( Map metadata ) throws IOException { super(name, factories, aggCtx, parent, bucketCardinality, metadata); - breaker = aggCtx.breaker(); docCounts = bigArrays().newLongArray(1, true); docCountProvider = new DocCountProvider(); } @@ -83,7 +80,7 @@ public final void collectBucket(LeafBucketCollector subCollector, int doc, long grow(bucketOrd + 1); int docCount = docCountProvider.getDocCount(doc); if (docCounts.increment(bucketOrd, docCount) == docCount) { - updateCircuitBreaker("allocated_buckets"); + checkRealMemoryCB("allocated_buckets"); } subCollector.collect(doc, bucketOrd); } @@ -176,7 +173,7 @@ protected final IntFunction buildSubAggsForBuckets(LongArr prepareSubAggs(bucketOrdsToCollect); InternalAggregation[][] aggregations = new InternalAggregation[subAggregators.length][]; for (int i = 0; i < subAggregators.length; i++) { - updateCircuitBreaker("building_sub_aggregation"); + checkRealMemoryCB("building_sub_aggregation"); aggregations[i] = subAggregators[i].buildAggregations(bucketOrdsToCollect); } return subAggsForBucketFunction(aggregations); @@ -247,31 +244,30 @@ protected final InternalAggregation[] buildAggregationsForFixedBucketCount( Function, InternalAggregation> resultBuilder ) throws IOException { try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(owningBucketOrds.size() * bucketsPerOwningBucketOrd)) { - int bucketOrdIdx = 0; + final int[] bucketOrdIdx = new int[] { 0 }; for (long i = 0; i < owningBucketOrds.size(); i++) { long ord = owningBucketOrds.get(i) * bucketsPerOwningBucketOrd; for (int offsetInOwningOrd = 0; offsetInOwningOrd < bucketsPerOwningBucketOrd; offsetInOwningOrd++) { - bucketOrdsToCollect.set(bucketOrdIdx++, ord++); + bucketOrdsToCollect.set(bucketOrdIdx[0]++, ord++); } } - bucketOrdIdx = 0; + bucketOrdIdx[0] = 0; var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - for (int owningOrdIdx = 0; owningOrdIdx < results.length; owningOrdIdx++) { + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { List buckets = new ArrayList<>(bucketsPerOwningBucketOrd); for (int offsetInOwningOrd = 0; offsetInOwningOrd < bucketsPerOwningBucketOrd; offsetInOwningOrd++) { + checkRealMemoryCBForInternalBucket(); buckets.add( bucketBuilder.build( offsetInOwningOrd, - bucketDocCount(bucketOrdsToCollect.get(bucketOrdIdx)), - subAggregationResults.apply(bucketOrdIdx++) + bucketDocCount(bucketOrdsToCollect.get(bucketOrdIdx[0])), + subAggregationResults.apply(bucketOrdIdx[0]++) ) ); } - results[owningOrdIdx] = resultBuilder.apply(buckets); - } - return results; + return resultBuilder.apply(buckets); + }); } } @@ -295,11 +291,10 @@ protected final InternalAggregation[] buildAggregationsForSingleBucket( * here but we don't because single bucket aggs never have. */ var subAggregationResults = buildSubAggsForBuckets(owningBucketOrds); - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { - results[ordIdx] = resultBuilder.build(owningBucketOrds.get(ordIdx), subAggregationResults.apply(ordIdx)); - } - return results; + return buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> resultBuilder.build(owningBucketOrds.get(ordIdx), subAggregationResults.apply(ordIdx)) + ); } @FunctionalInterface @@ -335,37 +330,36 @@ protected final InternalAggregation[] buildAggregationsForVariableBuckets( ); } try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(totalOrdsToCollect)) { - int b = 0; + final int[] b = new int[] { 0 }; for (long i = 0; i < owningBucketOrds.size(); i++) { LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(i)); while (ordsEnum.next()) { - bucketOrdsToCollect.set(b++, ordsEnum.ord()); + bucketOrdsToCollect.set(b[0]++, ordsEnum.ord()); } } var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - b = 0; - for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { + b[0] = 0; + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { final long owningBucketOrd = owningBucketOrds.get(ordIdx); List buckets = new ArrayList<>(bucketsInOrd.get(ordIdx)); LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); while (ordsEnum.next()) { - if (bucketOrdsToCollect.get(b) != ordsEnum.ord()) { + if (bucketOrdsToCollect.get(b[0]) != ordsEnum.ord()) { // If we hit this, something has gone horribly wrong and we need to investigate throw AggregationErrors.iterationOrderChangedWithoutMutating( bucketOrds.toString(), ordsEnum.ord(), - bucketOrdsToCollect.get(b) + bucketOrdsToCollect.get(b[0]) ); } + checkRealMemoryCBForInternalBucket(); buckets.add( - bucketBuilder.build(ordsEnum.value(), bucketDocCount(ordsEnum.ord()), subAggregationResults.apply(b++)) + bucketBuilder.build(ordsEnum.value(), bucketDocCount(ordsEnum.ord()), subAggregationResults.apply(b[0]++)) ); } - results[ordIdx] = resultBuilder.build(owningBucketOrd, buckets); - } - return results; + return resultBuilder.build(owningBucketOrd, buckets); + }); } } } @@ -425,14 +419,9 @@ protected void preGetSubLeafCollectors(LeafReaderContext ctx) throws IOException docCountProvider.setLeafReaderContext(ctx); } - /** - * This method calls the circuit breaker from time to time in order to give it a chance to check available - * memory in the parent breaker (Which should be a real memory breaker) and break the execution if we are running out. - * To achieve that, we are passing 0 as the estimated bytes every 1024 calls - */ - private void updateCircuitBreaker(String label) { - if ((++callCount & 0x3FF) == 0) { - breaker.addEstimateBytesAndMaybeBreak(0, label); - } + /** This method should be called whenever a new bucket object is created. It will check the real memory + * circuit breaker in a sampling fashion. See {@link #checkRealMemoryCB(String)} */ + protected final void checkRealMemoryCBForInternalBucket() { + checkRealMemoryCB("internal_bucket"); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java index 05fce2cff64d5..344b90b06c4f6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java @@ -140,6 +140,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw long docCount = bucketDocCount(ordsEnum.ord()); otherDocCounts.increment(ordIdx, docCount); if (spare == null) { + checkRealMemoryCBForInternalBucket(); spare = emptyBucketBuilder.get(); } ordsEnum.readValue(spare.getTermBytes()); @@ -158,8 +159,8 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw } buildSubAggsForAllBuckets(topBucketsPerOrd, InternalTerms.Bucket::getBucketOrd, InternalTerms.Bucket::setAggregations); - InternalAggregation[] result = new InternalAggregation[Math.toIntExact(topBucketsPerOrd.size())]; - for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { + + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { final BucketOrder reduceOrder; if (isKeyOrder(order) == false) { reduceOrder = InternalOrder.key(true); @@ -167,7 +168,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw } else { reduceOrder = order; } - result[ordIdx] = new StringTerms( + return new StringTerms( name, reduceOrder, order, @@ -181,8 +182,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw Arrays.asList(topBucketsPerOrd.get(ordIdx)), null ); - } - return result; + }); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java index 0e63e26e77a55..1d3614af08768 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java @@ -144,6 +144,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx)); while (ordsEnum.next()) { if (spare == null) { + checkRealMemoryCBForInternalBucket(); spare = newEmptyBucket(); } @@ -162,11 +163,10 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw } } buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(topBucketsPerOrd.size())]; - for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { - results[ordIdx] = buildAggregation(name, requiredSize, Arrays.asList(topBucketsPerOrd.get(ordIdx)), metadata()); - } - return results; + return buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildAggregation(name, requiredSize, Arrays.asList(topBucketsPerOrd.get(ordIdx)), metadata()) + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java index e8ba0393208a0..e3192e9b2fa16 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java @@ -172,32 +172,32 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw } try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(totalOrdsToCollect)) { - int b = 0; + int[] b = new int[] { 0 }; for (long i = 0; i < owningBucketOrds.size(); i++) { BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(i)); while (ordsEnum.next()) { - bucketOrdsToCollect.set(b++, ordsEnum.ord()); + bucketOrdsToCollect.set(b[0]++, ordsEnum.ord()); } } var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - b = 0; - for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { + b[0] = 0; + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { List buckets = new ArrayList<>(bucketsInOrd.get(ordIdx)); BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx)); while (ordsEnum.next()) { long ordinal = ordsEnum.ord(); - if (bucketOrdsToCollect.get(b) != ordinal) { + if (bucketOrdsToCollect.get(b[0]) != ordinal) { throw AggregationErrors.iterationOrderChangedWithoutMutating( bucketOrds.toString(), ordinal, - bucketOrdsToCollect.get(b) + bucketOrdsToCollect.get(b[0]) ); } BytesRef ipAddress = new BytesRef(); ordsEnum.readValue(ipAddress); long docCount = bucketDocCount(ordinal); + checkRealMemoryCBForInternalBucket(); buckets.add( new InternalIpPrefix.Bucket( config.format(), @@ -207,16 +207,15 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw ipPrefix.prefixLength, ipPrefix.appendPrefixLength, docCount, - subAggregationResults.apply(b++) + subAggregationResults.apply(b[0]++) ) ); // NOTE: the aggregator is expected to return sorted results CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); } - results[ordIdx] = new InternalIpPrefix(name, config.format(), keyed, minDocCount, buckets, metadata()); - } - return results; + return new InternalIpPrefix(name, config.format(), keyed, minDocCount, buckets, metadata()); + }); } } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index d04d7528ea938..db9da6ed67207 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -700,11 +700,10 @@ abstract class ResultStrategy< private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { if (valueCount == 0) { // no context in this reader - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { - results[ordIdx] = buildNoValuesResult(owningBucketOrds.get(ordIdx)); - } - return results; + return GlobalOrdinalsStringTermsAggregator.this.buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildNoValuesResult(owningBucketOrds.get(ordIdx)) + ); } try ( LongArray otherDocCount = bigArrays().newLongArray(owningBucketOrds.size(), true); @@ -731,6 +730,7 @@ public void accept(long globalOrd, long bucketOrd, long docCount) throws IOExcep otherDocCount.increment(finalOrdIdx, docCount); if (docCount >= bucketCountThresholds.getShardMinDocCount()) { if (spare == null) { + checkRealMemoryCBForInternalBucket(); spare = buildEmptyTemporaryBucket(); } updater.updateBucket(spare, globalOrd, bucketOrd, docCount); @@ -742,6 +742,7 @@ public void accept(long globalOrd, long bucketOrd, long docCount) throws IOExcep // Get the top buckets topBucketsPreOrd.set(ordIdx, buildBuckets((int) ordered.size())); for (int i = (int) ordered.size() - 1; i >= 0; --i) { + checkRealMemoryCBForInternalBucket(); B bucket = convertTempBucketToRealBucket(ordered.pop(), lookupGlobalOrd); topBucketsPreOrd.get(ordIdx)[i] = bucket; otherDocCount.increment(ordIdx, -bucket.getDocCount()); @@ -751,11 +752,10 @@ public void accept(long globalOrd, long bucketOrd, long docCount) throws IOExcep buildSubAggs(topBucketsPreOrd); - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(topBucketsPreOrd.size())]; - for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { - results[ordIdx] = buildResult(owningBucketOrds.get(ordIdx), otherDocCount.get(ordIdx), topBucketsPreOrd.get(ordIdx)); - } - return results; + return GlobalOrdinalsStringTermsAggregator.this.buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildResult(owningBucketOrds.get(ordIdx), otherDocCount.get(ordIdx), topBucketsPreOrd.get(ordIdx)) + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java index 877bd2cac4b05..45ea1245ec38d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java @@ -142,6 +142,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw long docCount = bucketDocCount(collectedBuckets.ord()); // if the key is below threshold, reinsert into the new ords if (docCount <= maxDocCount) { + checkRealMemoryCBForInternalBucket(); LongRareTerms.Bucket bucket = new LongRareTerms.Bucket(collectedBuckets.value(), docCount, null, format); bucket.bucketOrd = offset + bucketsInThisOwningBucketToCollect.add(collectedBuckets.value()); mergeMap.set(collectedBuckets.ord(), bucket.bucketOrd); @@ -173,21 +174,12 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw * Now build the results! */ buildSubAggsForAllBuckets(rarestPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); - InternalAggregation[] result = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { + + return LongRareTermsAggregator.this.buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { LongRareTerms.Bucket[] buckets = rarestPerOrd.get(ordIdx); Arrays.sort(buckets, ORDER.comparator()); - result[ordIdx] = new LongRareTerms( - name, - ORDER, - metadata(), - format, - Arrays.asList(buckets), - maxDocCount, - filters.get(ordIdx) - ); - } - return result; + return new LongRareTerms(name, ORDER, metadata(), format, Arrays.asList(buckets), maxDocCount, filters.get(ordIdx)); + }); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java index c02ed5509e6ae..6ae47d5975479 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java @@ -304,6 +304,7 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro continue; } if (spare == null) { + checkRealMemoryCBForInternalBucket(); spare = emptyBucketBuilder.get(); } updateBucket(spare, ordsEnum, docCount); @@ -320,11 +321,11 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro } buildSubAggs(topBucketsPerOrd); - InternalAggregation[] result = new InternalAggregation[Math.toIntExact(topBucketsPerOrd.size())]; - for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { - result[ordIdx] = buildResult(owningBucketOrds.get(ordIdx), otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)); - } - return result; + + return MapStringTermsAggregator.this.buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildResult(owningBucketOrds.get(ordIdx), otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)) + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java index e10f0b8944027..ce89b95b76a05 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java @@ -185,6 +185,7 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro continue; } if (spare == null) { + checkRealMemoryCBForInternalBucket(); spare = emptyBucketBuilder.get(); } updateBucket(spare, ordsEnum, docCount); @@ -203,11 +204,10 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro buildSubAggs(topBucketsPerOrd); - InternalAggregation[] result = new InternalAggregation[Math.toIntExact(topBucketsPerOrd.size())]; - for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { - result[ordIdx] = buildResult(owningBucketOrds.get(ordIdx), otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)); - } - return result; + return NumericTermsAggregator.this.buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildResult(owningBucketOrds.get(ordIdx), otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)) + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java index 7200c33c71f70..8a2c9d52f4212 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java @@ -145,6 +145,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw long docCount = bucketDocCount(collectedBuckets.ord()); // if the key is below threshold, reinsert into the new ords if (docCount <= maxDocCount) { + checkRealMemoryCBForInternalBucket(); StringRareTerms.Bucket bucket = new StringRareTerms.Bucket( BytesRef.deepCopyOf(scratch), docCount, @@ -181,21 +182,12 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw * Now build the results! */ buildSubAggsForAllBuckets(rarestPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); - InternalAggregation[] result = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { + + return StringRareTermsAggregator.this.buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { StringRareTerms.Bucket[] buckets = rarestPerOrd.get(ordIdx); Arrays.sort(buckets, ORDER.comparator()); - result[ordIdx] = new StringRareTerms( - name, - ORDER, - metadata(), - format, - Arrays.asList(buckets), - maxDocCount, - filters.get(ordIdx) - ); - } - return result; + return new StringRareTerms(name, ORDER, metadata(), format, Arrays.asList(buckets), maxDocCount, filters.get(ordIdx)); + }); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregator.java index 0d767e356108a..cf65f1ff7c835 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregator.java @@ -38,10 +38,6 @@ protected MetricsAggregator(String name, AggregationContext context, Aggregator @Override public final InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { - results[ordIdx] = buildAggregation(owningBucketOrds.get(ordIdx)); - } - return results; + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> buildAggregation(owningBucketOrds.get(ordIdx))); } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java index 0c6e94a15ec36..1691aedf543f4 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java @@ -264,6 +264,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw continue; } if (spare == null) { + checkRealMemoryCBForInternalBucket(); spare = new InternalMultiTerms.Bucket(null, 0, null, showTermDocCountError, 0, formats, keyConverters); spareKey = new BytesRef(); } @@ -287,11 +288,10 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); - InternalAggregation[] result = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { - result[ordIdx] = buildResult(otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)); - } - return result; + return buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildResult(otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)) + ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java index 5b1ed7c954fe9..e13b1e0033191 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java @@ -121,21 +121,22 @@ public InternalAggregation[] buildAggregations(LongArray ordsToCollect) throws I continue; } int size = (int) Math.min(bucketOrds.bucketsInOrd(ordIdx), bucketCountThresholds.getShardSize()); + checkRealMemoryCBForInternalBucket(); topBucketsPerOrd.set(ordIdx, categorizer.toOrderedBuckets(size)); } buildSubAggsForAllBuckets(topBucketsPerOrd, Bucket::getBucketOrd, Bucket::setAggregations); - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(ordsToCollect.size())]; - for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { - results[ordIdx] = new InternalCategorizationAggregation( + + return buildAggregations( + Math.toIntExact(ordsToCollect.size()), + ordIdx -> new InternalCategorizationAggregation( name, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getMinDocCount(), similarityThreshold, metadata(), Arrays.asList(topBucketsPerOrd.get(ordIdx)) - ); - } - return results; + ) + ); } } From 21f206b70c0b140f1737ca3fa0260399796adfa4 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Thu, 21 Nov 2024 13:36:20 +0100 Subject: [PATCH 132/386] ESQL: fix the column position in errors (#117153) This fixes the off-by-one error of the column position in some of the error messages. --- docs/changelog/117153.yaml | 5 ++ .../xpack/esql/analysis/Verifier.java | 2 +- .../xpack/esql/parser/ParsingException.java | 4 +- .../xpack/esql/analysis/VerifierTests.java | 20 ++++---- .../optimizer/LogicalPlanOptimizerTests.java | 4 +- .../xpack/esql/parser/ExpressionTests.java | 18 +++---- .../esql/parser/StatementParserTests.java | 50 +++++++++---------- 7 files changed, 54 insertions(+), 49 deletions(-) create mode 100644 docs/changelog/117153.yaml diff --git a/docs/changelog/117153.yaml b/docs/changelog/117153.yaml new file mode 100644 index 0000000000000..f7640c0a7ed6a --- /dev/null +++ b/docs/changelog/117153.yaml @@ -0,0 +1,5 @@ +pr: 117153 +summary: "ESQL: fix the column position in errors" +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index 54661fa42ccbe..694328e57b5ae 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -511,7 +511,7 @@ private static void checkRow(LogicalPlan p, Set failures) { if (p instanceof Row row) { row.fields().forEach(a -> { if (DataType.isRepresentable(a.dataType()) == false) { - failures.add(fail(a, "cannot use [{}] directly in a row assignment", a.child().sourceText())); + failures.add(fail(a.child(), "cannot use [{}] directly in a row assignment", a.child().sourceText())); } }); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java index 0705ae7f778cd..484a655fc2988 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java @@ -18,7 +18,7 @@ public class ParsingException extends EsqlClientException { public ParsingException(String message, Exception cause, int line, int charPositionInLine) { super(message, cause); this.line = line; - this.charPositionInLine = charPositionInLine; + this.charPositionInLine = charPositionInLine + 1; } ParsingException(String message, Object... args) { @@ -42,7 +42,7 @@ public int getLineNumber() { } public int getColumnNumber() { - return charPositionInLine + 1; + return charPositionInLine; } public String getErrorMessage() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 06d8cb244ef19..8f8d95a33429c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -771,40 +771,40 @@ public void testWrongInputParam() { public void testPeriodAndDurationInRowAssignment() { for (var unit : TIME_DURATIONS) { - assertEquals("1:5: cannot use [1 " + unit + "] directly in a row assignment", error("row a = 1 " + unit)); + assertEquals("1:9: cannot use [1 " + unit + "] directly in a row assignment", error("row a = 1 " + unit)); assertEquals( - "1:5: cannot use [1 " + unit + "::time_duration] directly in a row assignment", + "1:9: cannot use [1 " + unit + "::time_duration] directly in a row assignment", error("row a = 1 " + unit + "::time_duration") ); assertEquals( - "1:5: cannot use [\"1 " + unit + "\"::time_duration] directly in a row assignment", + "1:9: cannot use [\"1 " + unit + "\"::time_duration] directly in a row assignment", error("row a = \"1 " + unit + "\"::time_duration") ); assertEquals( - "1:5: cannot use [to_timeduration(1 " + unit + ")] directly in a row assignment", + "1:9: cannot use [to_timeduration(1 " + unit + ")] directly in a row assignment", error("row a = to_timeduration(1 " + unit + ")") ); assertEquals( - "1:5: cannot use [to_timeduration(\"1 " + unit + "\")] directly in a row assignment", + "1:9: cannot use [to_timeduration(\"1 " + unit + "\")] directly in a row assignment", error("row a = to_timeduration(\"1 " + unit + "\")") ); } for (var unit : DATE_PERIODS) { - assertEquals("1:5: cannot use [1 " + unit + "] directly in a row assignment", error("row a = 1 " + unit)); + assertEquals("1:9: cannot use [1 " + unit + "] directly in a row assignment", error("row a = 1 " + unit)); assertEquals( - "1:5: cannot use [1 " + unit + "::date_period] directly in a row assignment", + "1:9: cannot use [1 " + unit + "::date_period] directly in a row assignment", error("row a = 1 " + unit + "::date_period") ); assertEquals( - "1:5: cannot use [\"1 " + unit + "\"::date_period] directly in a row assignment", + "1:9: cannot use [\"1 " + unit + "\"::date_period] directly in a row assignment", error("row a = \"1 " + unit + "\"::date_period") ); assertEquals( - "1:5: cannot use [to_dateperiod(1 " + unit + ")] directly in a row assignment", + "1:9: cannot use [to_dateperiod(1 " + unit + ")] directly in a row assignment", error("row a = to_dateperiod(1 " + unit + ")") ); assertEquals( - "1:5: cannot use [to_dateperiod(\"1 " + unit + "\")] directly in a row assignment", + "1:9: cannot use [to_dateperiod(\"1 " + unit + "\")] directly in a row assignment", error("row a = to_dateperiod(\"1 " + unit + "\")") ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 2a55379db69d1..96951ee15d48b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -2563,7 +2563,7 @@ public void testSimplifyRLikeMatchAll() { public void testRLikeWrongPattern() { String query = "from test | where first_name rlike \"(?i)(^|[^a-zA-Z0-9_-])nmap($|\\\\.)\""; - String error = "line 1:20: Invalid regex pattern for RLIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + String error = "line 1:19: Invalid regex pattern for RLIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + "[invalid range: from (95) cannot be > to (93)]"; ParsingException e = expectThrows(ParsingException.class, () -> plan(query)); assertThat(e.getMessage(), is(error)); @@ -2571,7 +2571,7 @@ public void testRLikeWrongPattern() { public void testLikeWrongPattern() { String query = "from test | where first_name like \"(?i)(^|[^a-zA-Z0-9_-])nmap($|\\\\.)\""; - String error = "line 1:20: Invalid pattern for LIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + String error = "line 1:19: Invalid pattern for LIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + "[Invalid sequence - escape character is not followed by special wildcard char]"; ParsingException e = expectThrows(ParsingException.class, () -> plan(query)); assertThat(e.getMessage(), is(error)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java index 0177747d27243..710637c05a900 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java @@ -134,7 +134,7 @@ public void testStringLiteralsExceptions() { ); var number = "1" + IntStream.range(0, 309).mapToObj(ignored -> "0").collect(Collectors.joining()); - assertParsingException(() -> parse("row foo == " + number), "line 1:13: Number [" + number + "] is too large"); + assertParsingException(() -> parse("row foo == " + number), "line 1:12: Number [" + number + "] is too large"); } public void testBooleanLiteralsCondition() { @@ -442,20 +442,20 @@ public void testOverflowingValueForDuration() { for (String unit : List.of("milliseconds", "seconds", "minutes", "hours")) { assertParsingException( () -> parse("row x = 9223372036854775808 " + unit), // unsigned_long (Long.MAX_VALUE + 1) - "line 1:10: Number [9223372036854775808] outside of [" + unit + "] range" + "line 1:9: Number [9223372036854775808] outside of [" + unit + "] range" ); assertParsingException( () -> parse("row x = 18446744073709551616 " + unit), // double (UNSIGNED_LONG_MAX + 1) - "line 1:10: Number [18446744073709551616] outside of [" + unit + "] range" + "line 1:9: Number [18446744073709551616] outside of [" + unit + "] range" ); } assertParsingException( () -> parse("row x = 153722867280912931 minutes"), // Long.MAX_VALUE / 60 + 1 - "line 1:10: Number [153722867280912931] outside of [minutes] range" + "line 1:9: Number [153722867280912931] outside of [minutes] range" ); assertParsingException( () -> parse("row x = 2562047788015216 hours"), // Long.MAX_VALUE / 3600 + 1 - "line 1:10: Number [2562047788015216] outside of [hours] range" + "line 1:9: Number [2562047788015216] outside of [hours] range" ); } @@ -463,12 +463,12 @@ public void testOverflowingValueForPeriod() { for (String unit : List.of("days", "weeks", "months", "years")) { assertParsingException( () -> parse("row x = 2147483648 " + unit), // long (Integer.MAX_VALUE + 1) - "line 1:10: Number [2147483648] outside of [" + unit + "] range" + "line 1:9: Number [2147483648] outside of [" + unit + "] range" ); } assertParsingException( () -> parse("row x = 306783379 weeks"), // Integer.MAX_VALUE / 7 + 1 - "line 1:10: Number [306783379] outside of [weeks] range" + "line 1:9: Number [306783379] outside of [weeks] range" ); } @@ -544,7 +544,7 @@ public void testWildcardProjectAwayPatterns() { } public void testForbidWildcardProjectAway() { - assertParsingException(() -> dropExpression("foo, *"), "line 1:21: Removing all fields is not allowed [*]"); + assertParsingException(() -> dropExpression("foo, *"), "line 1:20: Removing all fields is not allowed [*]"); } public void testForbidMultipleIncludeStar() { @@ -608,7 +608,7 @@ public void testMultipleProjectPatterns() { } public void testForbidWildcardProjectRename() { - assertParsingException(() -> renameExpression("b* AS a*"), "line 1:18: Using wildcards [*] in RENAME is not allowed [b* AS a*]"); + assertParsingException(() -> renameExpression("b* AS a*"), "line 1:17: Using wildcards [*] in RENAME is not allowed [b* AS a*]"); } public void testSimplifyInWithSingleElementList() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 7b6c0048f2980..69c00eb395fdb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -525,10 +525,10 @@ private void clusterAndIndexAsLookupIndexPattern(String clusterAndIndex) { public void testInvalidCharacterInIndexPattern() { Map commands = new HashMap<>(); - commands.put("FROM {}", "line 1:7: "); + commands.put("FROM {}", "line 1:6: "); if (Build.current().isSnapshot()) { - commands.put("METRICS {}", "line 1:10: "); - commands.put("ROW x = 1 | LOOKUP_🐔 {} ON j", "line 1:23: "); + commands.put("METRICS {}", "line 1:9: "); + commands.put("ROW x = 1 | LOOKUP_🐔 {} ON j", "line 1:22: "); } String lineNumber; for (String command : commands.keySet()) { @@ -572,7 +572,7 @@ public void testInvalidCharacterInIndexPattern() { continue; } - lineNumber = command.contains("FROM") ? "line 1:21: " : "line 1:24: "; + lineNumber = command.contains("FROM") ? "line 1:20: " : "line 1:23: "; expectInvalidIndexNameErrorWithLineNumber(command, "indexpattern, --indexpattern", lineNumber, "-indexpattern"); expectInvalidIndexNameErrorWithLineNumber(command, "indexpattern, \"--indexpattern\"", lineNumber, "-indexpattern"); expectInvalidIndexNameErrorWithLineNumber(command, "\"indexpattern, --indexpattern\"", commands.get(command), "-indexpattern"); @@ -585,7 +585,7 @@ public void testInvalidCharacterInIndexPattern() { if (command.contains("LOOKUP_🐔")) { continue; } - lineNumber = command.contains("FROM") ? "line 1:10: " : "line 1:13: "; + lineNumber = command.contains("FROM") ? "line 1:9: " : "line 1:12: "; clustersAndIndices(command, "*", "-index#pattern"); clustersAndIndices(command, "index*", "-index#pattern"); clustersAndIndices(command, "*", "-<--logstash-{now/M{yyyy.MM}}>"); @@ -885,18 +885,18 @@ public void testSuggestAvailableProcessingCommandsOnParsingError() { public void testDeprecatedIsNullFunction() { expectError( "from test | eval x = is_null(f)", - "line 1:23: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" + "line 1:22: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" ); expectError( "row x = is_null(f)", - "line 1:10: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" + "line 1:9: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" ); if (Build.current().isSnapshot()) { expectError( "from test | eval x = ?fn1(f)", List.of(paramAsIdentifier("fn1", "IS_NULL")), - "line 1:23: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" + "line 1:22: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" ); } } @@ -911,23 +911,23 @@ public void testMetadataFieldOnOtherSources() { } public void testMetadataFieldMultipleDeclarations() { - expectError("from test metadata _index, _version, _index", "1:39: metadata field [_index] already declared [@1:20]"); + expectError("from test metadata _index, _version, _index", "1:38: metadata field [_index] already declared [@1:20]"); } public void testMetadataFieldUnsupportedPrimitiveType() { - expectError("from test metadata _tier", "line 1:21: unsupported metadata field [_tier]"); + expectError("from test metadata _tier", "line 1:20: unsupported metadata field [_tier]"); } public void testMetadataFieldUnsupportedCustomType() { - expectError("from test metadata _feature", "line 1:21: unsupported metadata field [_feature]"); + expectError("from test metadata _feature", "line 1:20: unsupported metadata field [_feature]"); } public void testMetadataFieldNotFoundNonExistent() { - expectError("from test metadata _doesnot_compute", "line 1:21: unsupported metadata field [_doesnot_compute]"); + expectError("from test metadata _doesnot_compute", "line 1:20: unsupported metadata field [_doesnot_compute]"); } public void testMetadataFieldNotFoundNormalField() { - expectError("from test metadata emp_no", "line 1:21: unsupported metadata field [emp_no]"); + expectError("from test metadata emp_no", "line 1:20: unsupported metadata field [emp_no]"); } public void testDissectPattern() { @@ -985,13 +985,13 @@ public void testGrokPattern() { expectError( "row a = \"foo bar\" | GROK a \"%{NUMBER:foo} %{WORD:foo}\"", - "line 1:22: Invalid GROK pattern [%{NUMBER:foo} %{WORD:foo}]:" + "line 1:21: Invalid GROK pattern [%{NUMBER:foo} %{WORD:foo}]:" + " the attribute [foo] is defined multiple times with different types" ); expectError( "row a = \"foo\" | GROK a \"(?P.+)\"", - "line 1:18: Invalid grok pattern [(?P.+)]: [undefined group option]" + "line 1:17: Invalid grok pattern [(?P.+)]: [undefined group option]" ); } @@ -1015,7 +1015,7 @@ public void testLikeRLike() { expectError( "from a | where foo like \"(?i)(^|[^a-zA-Z0-9_-])nmap($|\\\\.)\"", - "line 1:17: Invalid pattern for LIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + "line 1:16: Invalid pattern for LIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + "[Invalid sequence - escape character is not followed by special wildcard char]" ); } @@ -1076,7 +1076,7 @@ public void testEnrich() { ); expectError( "from a | enrich typo:countries on foo", - "line 1:18: Unrecognized value [typo], ENRICH policy qualifier needs to be one of [_ANY, _COORDINATOR, _REMOTE]" + "line 1:17: Unrecognized value [typo], ENRICH policy qualifier needs to be one of [_ANY, _COORDINATOR, _REMOTE]" ); } @@ -1261,8 +1261,8 @@ public void testInvalidPositionalParams() { expectError( "from test | where x < ?0 and y < ?2", List.of(paramAsConstant(null, 5)), - "line 1:24: No parameter is defined for position 0, did you mean position 1?; " - + "line 1:35: No parameter is defined for position 2, did you mean position 1?" + "line 1:23: No parameter is defined for position 0, did you mean position 1?; " + + "line 1:34: No parameter is defined for position 2, did you mean position 1?" ); expectError( @@ -2107,11 +2107,11 @@ public void testEnrichOnMatchField() { } public void testInlineConvertWithNonexistentType() { - expectError("ROW 1::doesnotexist", "line 1:9: Unknown data type named [doesnotexist]"); - expectError("ROW \"1\"::doesnotexist", "line 1:11: Unknown data type named [doesnotexist]"); - expectError("ROW false::doesnotexist", "line 1:13: Unknown data type named [doesnotexist]"); - expectError("ROW abs(1)::doesnotexist", "line 1:14: Unknown data type named [doesnotexist]"); - expectError("ROW (1+2)::doesnotexist", "line 1:13: Unknown data type named [doesnotexist]"); + expectError("ROW 1::doesnotexist", "line 1:8: Unknown data type named [doesnotexist]"); + expectError("ROW \"1\"::doesnotexist", "line 1:10: Unknown data type named [doesnotexist]"); + expectError("ROW false::doesnotexist", "line 1:12: Unknown data type named [doesnotexist]"); + expectError("ROW abs(1)::doesnotexist", "line 1:13: Unknown data type named [doesnotexist]"); + expectError("ROW (1+2)::doesnotexist", "line 1:12: Unknown data type named [doesnotexist]"); } public void testLookup() { @@ -2131,7 +2131,7 @@ public void testLookup() { } public void testInlineConvertUnsupportedType() { - expectError("ROW 3::BYTE", "line 1:6: Unsupported conversion to type [BYTE]"); + expectError("ROW 3::BYTE", "line 1:5: Unsupported conversion to type [BYTE]"); } public void testMetricsWithoutStats() { From c190c5762bf659461dc8aa455b01fa1789001c39 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Thu, 21 Nov 2024 13:40:49 +0100 Subject: [PATCH 133/386] ESQL: Add tests for single count with filter (#117180) Test that filters work on sigle count(...) with no group. Related #115522 --- .../src/main/resources/stats.csv-spec | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 7a046786a4f19..cba5ace0dfe86 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -2642,6 +2642,26 @@ c2:l |c2_f:l |m2:i |m2_f:i |c:l 1 |1 |5 |5 |21 ; +simpleCountOnFieldWithFilteringAndNoGrouping +required_capability: per_agg_filtering +from employees +| stats c1 = count(emp_no) where emp_no < 10042 +; + +c1:long +41 +; + +simpleCountOnStarWithFilteringAndNoGrouping +required_capability: per_agg_filtering +from employees +| stats c1 = count(*) where emp_no < 10042 +; + +c1:long +41 +; + commonFilterExtractionWithAliasing required_capability: per_agg_filtering from employees From fefa0f009fbcb786651dc2a12571f696f2f74363 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Thu, 21 Nov 2024 13:46:27 +0100 Subject: [PATCH 134/386] ESQL: add tests on use of grouping functions in agg filters (#117184) Add tests on use of grouping functions in agg filters: check that reusing the BUCKET expression from grouping is allowed, but no other variation. Related: #115521 --- .../src/main/resources/bucket.csv-spec | 16 ++++++++++++++++ .../xpack/esql/analysis/VerifierTests.java | 5 +++++ 2 files changed, 21 insertions(+) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec index 3be3decaf351c..7bbf011176693 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec @@ -760,3 +760,19 @@ c:long |b:date 3 |2025-10-01T00:00:00.000Z 4 |2023-11-01T00:00:00.000Z ; + +bucketWithFilteredCountRefingBucket +required_capability: implicit_casting_string_literal_to_temporal_amount + +FROM employees +| STATS c = COUNT(*) WHERE b > "1953-01-01T00:00:00.000Z" AND emp_no > 10020 BY b = BUCKET(birth_date, 1 year) +| SORT c, b +| LIMIT 4 +; + +c:long |b:date +0 |1952-01-01T00:00:00.000Z +0 |1953-01-01T00:00:00.000Z +0 |null +1 |1965-01-01T00:00:00.000Z +; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 8f8d95a33429c..8da6863465d39 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -404,6 +404,11 @@ public void testAggFilterOnBucketingOrAggFunctions() { query("from test | stats max(languages) WHERE bucket(salary, 10) > 1 by bucket(salary, 10)"); // but fails if it's different + assertEquals( + "1:32: can only use grouping function [bucket(a, 3)] part of the BY clause", + error("row a = 1 | stats sum(a) where bucket(a, 3) > -1 by bucket(a,2)") + ); + assertEquals( "1:40: can only use grouping function [bucket(salary, 10)] part of the BY clause", error("from test | stats max(languages) WHERE bucket(salary, 10) > 1 by emp_no") From 35d6af6973cb4a6d11e5e6b3f7a29cb7fae147ff Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 22 Nov 2024 00:29:17 +1100 Subject: [PATCH 135/386] Mute org.elasticsearch.versioning.ConcurrentSeqNoVersioningIT testSeqNoCASLinearizability #117249 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index fa467896a7b34..6bdae21f1fa24 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -247,6 +247,9 @@ tests: - class: org.elasticsearch.ingest.geoip.EnterpriseGeoIpDownloaderIT method: testEnterpriseDownloaderTask issue: https://github.com/elastic/elasticsearch/issues/115163 +- class: org.elasticsearch.versioning.ConcurrentSeqNoVersioningIT + method: testSeqNoCASLinearizability + issue: https://github.com/elastic/elasticsearch/issues/117249 # Examples: # From b9bac368dc962d312615d9444e9840051b14fd16 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 22 Nov 2024 00:35:19 +1100 Subject: [PATCH 136/386] Mute org.elasticsearch.discovery.ClusterDisruptionIT testAckedIndexing #117024 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 6bdae21f1fa24..f8ab532dcaa94 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -250,6 +250,9 @@ tests: - class: org.elasticsearch.versioning.ConcurrentSeqNoVersioningIT method: testSeqNoCASLinearizability issue: https://github.com/elastic/elasticsearch/issues/117249 +- class: org.elasticsearch.discovery.ClusterDisruptionIT + method: testAckedIndexing + issue: https://github.com/elastic/elasticsearch/issues/117024 # Examples: # From 7dc2cc6b7f68ca7bbdb138fc239dda8204df4556 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Thu, 21 Nov 2024 14:55:38 +0100 Subject: [PATCH 137/386] IndexNameExpressionResolver refactoring (#116085) * Refactor DateMathExpressionResolver. In this commit we reduce the scope of the DateMathExpressionResolver to only handle one expression at a time. This simplifies the code since it move the preprocessing from the date math calculation. Furthermore, we simplify the API, so it does not need a context. Finally, the reduced scope allowed us to reduce the test footprint. The tests are targeted only to the single expression date math resolution and any test with expression combinations will be moved to the IndexNameExpressionResolverTests. * Create SystemResourceAccess. In this class we collect all the related access checks to system indices. These checks are not straight forward and there are different rules that apply on different parts of the code. In this PR, we just collect them in one place to allow further analysis to determine if these differences are a feature or a bug. * Refactor WildcardExpressionResolver. In this PR we reduced the scope of the WildcardExpressionResolver to resolve one expression at a time. It also still supports the `*`. This allows us to reduce the scope of the test as well. Furthermore, we switched the usage of streams to more imperative code to reduce the object creation. * Refactor expression resolution to resources. In this PR we bring all the previous steps together. We change the expression resolution, instead of processing lists of expressions to completely resolve one expression to its resources before moving to the next. This intends to increase the maintainability of the code, because we can debug it easier and we reduce the code duplication when dealing with exclusions and other pre-processing tasks. * Fix format * Bug fix: do the empty check on wildcard expressions on each wildcard * Polishing * Optimise for no wildcards * Fix test name typo * Replace for-each loops with for-i loops --------- Co-authored-by: Elastic Machine Co-authored-by: James Baiera --- .../metadata/IndexNameExpressionResolver.java | 835 ++++++++++-------- .../DateMathExpressionResolverTests.java | 201 ++--- .../IndexNameExpressionResolverTests.java | 182 ++-- .../WildcardExpressionResolverTests.java | 306 ++----- .../core/ilm/GenerateSnapshotNameStep.java | 6 +- .../ilm/GenerateSnapshotNameStepTests.java | 10 +- 6 files changed, 677 insertions(+), 863 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index bf80c38d64a4e..279243eeff7cf 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -48,17 +48,24 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.SortedMap; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.LongSupplier; import java.util.function.Predicate; -import java.util.stream.Collectors; -import java.util.stream.Stream; +/** + * This class main focus is to resolve multi-syntax target expressions to resources or concrete indices. This resolution is influenced + * by IndicesOptions and other flags passed through the method call. Examples of the functionality it provides: + * - Resolve expressions to concrete indices + * - Resolve expressions to data stream names + * - Resolve expressions to resources (meaning indices, data streams and aliases) + * Note: This class is performance sensitive, so we pay extra attention on the data structure usage and we avoid streams and iterators + * when possible in favor of the classic for-i loops. + */ public class IndexNameExpressionResolver { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(IndexNameExpressionResolver.class); @@ -190,7 +197,7 @@ public List dataStreamNames(ClusterState state, IndicesOptions options, getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - final Collection expressions = resolveExpressions(context, indexExpressions); + final Collection expressions = resolveExpressionsToResources(context, indexExpressions); return expressions.stream() .map(x -> state.metadata().getIndicesLookup().get(x)) .filter(Objects::nonNull) @@ -220,7 +227,7 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit getNetNewSystemIndexPredicate() ); - final Collection expressions = resolveExpressions(context, request.index()); + final Collection expressions = resolveExpressionsToResources(context, request.index()); if (expressions.size() == 1) { IndexAbstraction ia = state.metadata().getIndicesLookup().get(expressions.iterator().next()); @@ -236,7 +243,7 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit ); } } - checkSystemIndexAccess(context, Set.of(ia.getWriteIndex())); + SystemResourceAccess.checkSystemIndexAccess(context, threadContext, ia.getWriteIndex()); return ia; } else { throw new IllegalArgumentException( @@ -245,30 +252,110 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit } } - protected static Collection resolveExpressions(Context context, String... expressions) { - if (context.getOptions().expandWildcardExpressions() == false) { + /** + * Resolve the expression to the set of indices, aliases, and, optionally, data streams that the expression matches. + * If {@param preserveDataStreams} is {@code true}, data streams that are covered by the wildcards from the + * {@param expressions} are returned as-is, without expanding them further to their respective backing indices. + */ + protected static Collection resolveExpressionsToResources(Context context, String... expressions) { + // If we do not expand wildcards, then empty or _all expression result in an empty list + boolean expandWildcards = context.getOptions().expandWildcardExpressions(); + if (expandWildcards == false) { if (expressions == null || expressions.length == 0 || expressions.length == 1 && Metadata.ALL.equals(expressions[0])) { return List.of(); - } else { - return ExplicitResourceNameFilter.filterUnavailable( - context, - DateMathExpressionResolver.resolve(context, Arrays.asList(expressions)) - ); } } else { if (expressions == null || expressions.length == 0 || expressions.length == 1 && (Metadata.ALL.equals(expressions[0]) || Regex.isMatchAllPattern(expressions[0]))) { return WildcardExpressionResolver.resolveAll(context); + } else if (isNoneExpression(expressions)) { + return List.of(); + } + } + + // Using ArrayList when we know we do not have wildcards is an optimisation, given that one expression result in 0 or 1 resources. + Collection resources = expandWildcards && WildcardExpressionResolver.hasWildcards(expressions) + ? new LinkedHashSet<>() + : new ArrayList<>(expressions.length); + boolean wildcardSeen = false; + for (int i = 0, n = expressions.length; i < n; i++) { + String originalExpression = expressions[i]; + + // Resolve exclusion, a `-` prefixed expression is an exclusion only if it succeeds a wildcard. + boolean isExclusion = wildcardSeen && originalExpression.startsWith("-"); + String baseExpression = isExclusion ? originalExpression.substring(1) : originalExpression; + + // Resolve date math + baseExpression = DateMathExpressionResolver.resolveExpression(baseExpression, context::getStartTime); + + // Validate base expression + validateResourceExpression(context, baseExpression, expressions); + + // Check if it's wildcard + boolean isWildcard = expandWildcards && WildcardExpressionResolver.isWildcard(originalExpression); + wildcardSeen |= isWildcard; + + if (isWildcard) { + Set matchingResources = WildcardExpressionResolver.matchWildcardToResources(context, baseExpression); + + if (context.getOptions().allowNoIndices() == false && matchingResources.isEmpty()) { + throw notFoundException(baseExpression); + } + + if (isExclusion) { + resources.removeAll(matchingResources); + } else { + resources.addAll(matchingResources); + } } else { - return WildcardExpressionResolver.resolve( - context, - ExplicitResourceNameFilter.filterUnavailable( - context, - DateMathExpressionResolver.resolve(context, Arrays.asList(expressions)) - ) - ); + if (isExclusion) { + resources.remove(baseExpression); + } else if (ensureAliasOrIndexExists(context, baseExpression)) { + resources.add(baseExpression); + } + } + } + return resources; + } + + /** + * Validates the requested expression by performing the following checks: + * - Ensure it's not empty + * - Ensure it doesn't start with `_` + * - Ensure it's not a remote expression unless the allow unavailable targets is enabled. + */ + private static void validateResourceExpression(Context context, String current, String[] expressions) { + if (Strings.isEmpty(current)) { + throw notFoundException(current); + } + // Expressions can not start with an underscore. This is reserved for APIs. If the check gets here, the API + // does not exist and the path is interpreted as an expression. If the expression begins with an underscore, + // throw a specific error that is different from the [[IndexNotFoundException]], which is typically thrown + // if the expression can't be found. + if (current.charAt(0) == '_') { + throw new InvalidIndexNameException(current, "must not start with '_'."); + } + ensureRemoteExpressionRequireIgnoreUnavailable(context.getOptions(), current, expressions); + } + + /** + * Throws an exception if the expression is a remote expression and we do not allow unavailable targets + */ + private static void ensureRemoteExpressionRequireIgnoreUnavailable(IndicesOptions options, String current, String[] expressions) { + if (options.ignoreUnavailable()) { + return; + } + if (RemoteClusterAware.isRemoteIndexName(current)) { + List crossClusterIndices = new ArrayList<>(); + for (int i = 0; i < expressions.length; i++) { + if (RemoteClusterAware.isRemoteIndexName(expressions[i])) { + crossClusterIndices.add(expressions[i]); + } } + throw new IllegalArgumentException( + "Cross-cluster calls are not supported in this context but remote indices were requested: " + crossClusterIndices + ); } } @@ -341,7 +428,7 @@ String[] concreteIndexNames(Context context, String... indexExpressions) { } Index[] concreteIndices(Context context, String... indexExpressions) { - final Collection expressions = resolveExpressions(context, indexExpressions); + final Collection expressions = resolveExpressionsToResources(context, indexExpressions); final Set concreteIndicesResult = Sets.newLinkedHashSetWithExpectedSize(expressions.size()); final Map indicesLookup = context.getState().metadata().getIndicesLookup(); @@ -395,7 +482,9 @@ Index[] concreteIndices(Context context, String... indexExpressions) { && context.getOptions().includeFailureIndices()) { // Collect the data streams involved Set aliasDataStreams = new HashSet<>(); - for (Index index : indexAbstraction.getIndices()) { + List indices = indexAbstraction.getIndices(); + for (int i = 0, n = indices.size(); i < n; i++) { + Index index = indices.get(i); aliasDataStreams.add(indicesLookup.get(index.getName()).getParentDataStream()); } for (DataStream dataStream : aliasDataStreams) { @@ -416,13 +505,16 @@ Index[] concreteIndices(Context context, String... indexExpressions) { if (context.getOptions().allowNoIndices() == false && concreteIndicesResult.isEmpty()) { throw notFoundException(indexExpressions); } - checkSystemIndexAccess(context, concreteIndicesResult); - return concreteIndicesResult.toArray(Index.EMPTY_ARRAY); + Index[] resultArray = concreteIndicesResult.toArray(Index.EMPTY_ARRAY); + SystemResourceAccess.checkSystemIndexAccess(context, threadContext, resultArray); + return resultArray; } private static void resolveIndicesForDataStream(Context context, DataStream dataStream, Set concreteIndicesResult) { if (shouldIncludeRegularIndices(context.getOptions())) { - for (Index index : dataStream.getIndices()) { + List indices = dataStream.getIndices(); + for (int i = 0, n = indices.size(); i < n; i++) { + Index index = indices.get(i); if (shouldTrackConcreteIndex(context, index)) { concreteIndicesResult.add(index); } @@ -431,7 +523,9 @@ private static void resolveIndicesForDataStream(Context context, DataStream data if (shouldIncludeFailureIndices(context.getOptions())) { // We short-circuit here, if failure indices are not allowed and they can be skipped if (context.getOptions().allowFailureIndices() || context.getOptions().ignoreUnavailable() == false) { - for (Index index : dataStream.getFailureIndices().getIndices()) { + List failureIndices = dataStream.getFailureIndices().getIndices(); + for (int i = 0, n = failureIndices.size(); i < n; i++) { + Index index = failureIndices.get(i); if (shouldTrackConcreteIndex(context, index)) { concreteIndicesResult.add(index); } @@ -482,64 +576,6 @@ private static boolean resolvesToMoreThanOneIndex(IndexAbstraction indexAbstract return indexAbstraction.getIndices().size() > 1; } - private void checkSystemIndexAccess(Context context, Set concreteIndices) { - final Predicate systemIndexAccessPredicate = context.getSystemIndexAccessPredicate(); - if (systemIndexAccessPredicate == Predicates.always()) { - return; - } - doCheckSystemIndexAccess(context, concreteIndices, systemIndexAccessPredicate); - } - - private void doCheckSystemIndexAccess(Context context, Set concreteIndices, Predicate systemIndexAccessPredicate) { - final Metadata metadata = context.getState().metadata(); - final List resolvedSystemIndices = new ArrayList<>(); - final List resolvedNetNewSystemIndices = new ArrayList<>(); - final Set resolvedSystemDataStreams = new HashSet<>(); - final SortedMap indicesLookup = metadata.getIndicesLookup(); - boolean matchedIndex = false; - for (Index concreteIndex : concreteIndices) { - IndexMetadata idxMetadata = metadata.index(concreteIndex); - String name = concreteIndex.getName(); - if (idxMetadata.isSystem() && systemIndexAccessPredicate.test(name) == false) { - matchedIndex = true; - IndexAbstraction indexAbstraction = indicesLookup.get(name); - if (indexAbstraction.getParentDataStream() != null) { - resolvedSystemDataStreams.add(indexAbstraction.getParentDataStream().getName()); - } else if (systemIndices.isNetNewSystemIndex(name)) { - resolvedNetNewSystemIndices.add(name); - } else { - resolvedSystemIndices.add(name); - } - } - } - if (matchedIndex) { - handleMatchedSystemIndices(resolvedSystemIndices, resolvedSystemDataStreams, resolvedNetNewSystemIndices); - } - } - - private void handleMatchedSystemIndices( - List resolvedSystemIndices, - Set resolvedSystemDataStreams, - List resolvedNetNewSystemIndices - ) { - if (resolvedSystemIndices.isEmpty() == false) { - Collections.sort(resolvedSystemIndices); - deprecationLogger.warn( - DeprecationCategory.API, - "open_system_index_access", - "this request accesses system indices: {}, but in a future major version, direct access to system " - + "indices will be prevented by default", - resolvedSystemIndices - ); - } - if (resolvedSystemDataStreams.isEmpty() == false) { - throw SystemIndices.dataStreamAccessException(threadContext, resolvedSystemDataStreams); - } - if (resolvedNetNewSystemIndices.isEmpty() == false) { - throw SystemIndices.netNewSystemIndexAccessException(threadContext, resolvedNetNewSystemIndices); - } - } - private static IndexNotFoundException notFoundException(String... indexExpressions) { final IndexNotFoundException infe; if (indexExpressions == null @@ -568,16 +604,16 @@ private static IndexNotFoundException notFoundException(String... indexExpressio } private static boolean shouldTrackConcreteIndex(Context context, Index index) { - if (context.systemIndexAccessLevel == SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY - && context.netNewSystemIndexPredicate.test(index.getName())) { + if (SystemResourceAccess.isNetNewInBackwardCompatibleMode(context, index)) { // Exclude this one as it's a net-new system index, and we explicitly don't want those. return false; } + IndicesOptions options = context.getOptions(); if (DataStream.isFailureStoreFeatureFlagEnabled() && context.options.allowFailureIndices() == false) { DataStream parentDataStream = context.getState().metadata().getIndicesLookup().get(index.getName()).getParentDataStream(); if (parentDataStream != null && parentDataStream.isFailureStoreEnabled()) { if (parentDataStream.isFailureStoreIndex(index.getName())) { - if (context.options.ignoreUnavailable()) { + if (options.ignoreUnavailable()) { return false; } else { throw new FailureIndexNotSupportedException(index); @@ -587,7 +623,6 @@ private static boolean shouldTrackConcreteIndex(Context context, Index index) { } final IndexMetadata imd = context.state.metadata().index(index); if (imd.getState() == IndexMetadata.State.CLOSE) { - IndicesOptions options = context.options; if (options.forbidClosedIndices() && options.ignoreUnavailable() == false) { throw new IndexClosedException(index); } else { @@ -721,21 +756,6 @@ public boolean hasIndexAbstraction(String indexAbstraction, ClusterState state) return state.metadata().hasIndexAbstraction(resolvedAliasOrIndex); } - /** - * @return If the specified string is data math expression then this method returns the resolved expression. - */ - public static String resolveDateMathExpression(String dateExpression) { - return DateMathExpressionResolver.resolveExpression(dateExpression); - } - - /** - * @param time instant to consider when parsing the expression - * @return If the specified string is data math expression then this method returns the resolved expression. - */ - public static String resolveDateMathExpression(String dateExpression, long time) { - return DateMathExpressionResolver.resolveExpression(dateExpression, () -> time); - } - /** * Resolve an array of expressions to the set of indices and aliases that these expressions match. */ @@ -765,7 +785,8 @@ public Set resolveExpressions( getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - Collection resolved = resolveExpressions(context, expressions); + // unmodifiable without creating a new collection as it might contain many items + Collection resolved = resolveExpressionsToResources(context, expressions); if (resolved instanceof Set) { // unmodifiable without creating a new collection as it might contain many items return Collections.unmodifiableSet((Set) resolved); @@ -779,7 +800,7 @@ public Set resolveExpressions( * given index. *

    Only aliases with filters are returned. If the indices list contains a non-filtering reference to * the index itself - null is returned. Returns {@code null} if no filtering is required. - * NOTE: The provided expressions must have been resolved already via {@link #resolveExpressions}. + * NOTE: The provided expressions must have been resolved already via {@link #resolveExpressionsToResources(Context, String...)}. */ public String[] filteringAliases(ClusterState state, String index, Set resolvedExpressions) { return indexAliases(state, index, AliasMetadata::filteringRequired, DataStreamAlias::filteringRequired, false, resolvedExpressions); @@ -799,7 +820,8 @@ boolean iterateIndexAliases(int indexAliasesSize, int resolvedExpressionsSize) { * Iterates through the list of indices and selects the effective list of required aliases for the given index. *

    Only aliases where the given predicate tests successfully are returned. If the indices list contains a non-required reference to * the index itself - null is returned. Returns {@code null} if no filtering is required. - *

    NOTE: the provided expressions must have been resolved already via {@link #resolveExpressions}. + *

    NOTE: the provided expressions must have been resolved already via + * {@link #resolveExpressionsToResources(Context, String...)}. */ public String[] indexAliases( ClusterState state, @@ -878,7 +900,8 @@ public String[] indexAliases( .toArray(AliasMetadata[]::new); } List aliases = null; - for (AliasMetadata aliasMetadata : aliasCandidates) { + for (int i = 0; i < aliasCandidates.length; i++) { + AliasMetadata aliasMetadata = aliasCandidates[i]; if (requiredAlias.test(aliasMetadata)) { // If required - add it to the list of aliases if (aliases == null) { @@ -914,7 +937,7 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - final Collection resolvedExpressions = resolveExpressions(context, expressions); + final Collection resolvedExpressions = resolveExpressionsToResources(context, expressions); // TODO: it appears that this can never be true? if (isAllIndices(resolvedExpressions)) { @@ -932,7 +955,8 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab for (String expression : resolvedExpressions) { IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(expression); if (indexAbstraction != null && indexAbstraction.getType() == Type.ALIAS) { - for (Index index : indexAbstraction.getIndices()) { + for (int i = 0, n = indexAbstraction.getIndices().size(); i < n; i++) { + Index index = indexAbstraction.getIndices().get(i); String concreteIndex = index.getName(); if (norouting.contains(concreteIndex) == false) { AliasMetadata aliasMetadata = state.metadata().index(concreteIndex).getAliases().get(indexAbstraction.getName()); @@ -961,7 +985,8 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab continue; } if (dataStream.getIndices() != null) { - for (Index index : dataStream.getIndices()) { + for (int i = 0, n = dataStream.getIndices().size(); i < n; i++) { + Index index = dataStream.getIndices().get(i); String concreteIndex = index.getName(); routings = collectRoutings(routings, paramRouting, norouting, concreteIndex); } @@ -1006,8 +1031,8 @@ public static Map> resolveSearchRoutingAllIndices(Metadata m Set r = Sets.newHashSet(Strings.splitStringByCommaToArray(routing)); Map> routings = new HashMap<>(); String[] concreteIndices = metadata.getConcreteAllIndices(); - for (String index : concreteIndices) { - routings.put(index, r); + for (int i = 0; i < concreteIndices.length; i++) { + routings.put(concreteIndices[i], r); } return routings; } @@ -1036,6 +1061,16 @@ static boolean isExplicitAllPattern(Collection aliasesOrIndices) { return aliasesOrIndices != null && aliasesOrIndices.size() == 1 && Metadata.ALL.equals(aliasesOrIndices.iterator().next()); } + /** + * Identifies if this expression list is *,-* which effectively means a request that requests no indices. + */ + static boolean isNoneExpression(String[] expressions) { + return expressions.length == 2 && "*".equals(expressions[0]) && "-*".equals(expressions[1]); + } + + /** + * @return the system access level that will be applied in this resolution. See {@link SystemIndexAccessLevel} for details. + */ public SystemIndexAccessLevel getSystemIndexAccessLevel() { final SystemIndexAccessLevel accessLevel = SystemIndices.getSystemIndexAccessLevel(threadContext); assert accessLevel != SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY @@ -1043,6 +1078,14 @@ public SystemIndexAccessLevel getSystemIndexAccessLevel() { return accessLevel; } + /** + * Determines the right predicate based on the {@link IndexNameExpressionResolver#getSystemIndexAccessLevel()}. Specifically: + * - NONE implies no access to net-new system indices and data streams + * - BACKWARDS_COMPATIBLE_ONLY allows access also to net-new system resources + * - ALL allows access to everything + * - otherwise we fall back to {@link SystemIndices#getProductSystemIndexNamePredicate(ThreadContext)} + * @return the predicate that defines the access to system indices. + */ public Predicate getSystemIndexAccessPredicate() { final SystemIndexAccessLevel systemIndexAccessLevel = getSystemIndexAccessLevel(); final Predicate systemIndexAccessLevelPredicate; @@ -1067,6 +1110,43 @@ public Predicate getNetNewSystemIndexPredicate() { return systemIndices::isNetNewSystemIndex; } + /** + * This returns `true` if the given {@param name} is of a resource that exists. + * Otherwise, it returns `false` if the `ignore_unvailable` option is `true`, or, if `false`, it throws a "not found" type of + * exception. + */ + @Nullable + private static boolean ensureAliasOrIndexExists(Context context, String name) { + boolean ignoreUnavailable = context.getOptions().ignoreUnavailable(); + IndexAbstraction indexAbstraction = context.getState().getMetadata().getIndicesLookup().get(name); + if (indexAbstraction == null) { + if (ignoreUnavailable) { + return false; + } else { + throw notFoundException(name); + } + } + // treat aliases as unavailable indices when ignoreAliases is set to true (e.g. delete index and update aliases api) + if (indexAbstraction.getType() == Type.ALIAS && context.getOptions().ignoreAliases()) { + if (ignoreUnavailable) { + return false; + } else { + throw aliasesNotSupportedException(name); + } + } + if (indexAbstraction.isDataStreamRelated() && context.includeDataStreams() == false) { + if (ignoreUnavailable) { + return false; + } else { + IndexNotFoundException infe = notFoundException(name); + // Allows callers to handle IndexNotFoundException differently based on whether data streams were excluded. + infe.addMetadata(EXCLUDED_DATA_STREAMS_KEY, "true"); + throw infe; + } + } + return true; + } + public static class Context { private final ClusterState state; @@ -1242,7 +1322,7 @@ public Predicate getSystemIndexAccessPredicate() { } /** - * Resolves alias/index name expressions with wildcards into the corresponding concrete indices/aliases + * Resolves name expressions with wildcards into the corresponding concrete indices/aliases/data streams */ static final class WildcardExpressionResolver { @@ -1251,8 +1331,8 @@ private WildcardExpressionResolver() { } /** - * Returns all the indices, datastreams, and aliases, considering the open/closed, system, and hidden context parameters. - * Depending on the context, returns the names of the datastreams themselves or their backing indices. + * Returns all the indices, data streams, and aliases, considering the open/closed, system, and hidden context parameters. + * Depending on the context, returns the names of the data streams themselves or their backing indices. */ public static Collection resolveAll(Context context) { List concreteIndices = resolveEmptyOrTrivialWildcard(context); @@ -1261,16 +1341,17 @@ public static Collection resolveAll(Context context) { return concreteIndices; } - Stream ias = context.getState() + Set resolved = new HashSet<>(concreteIndices.size()); + context.getState() .metadata() .getIndicesLookup() .values() .stream() .filter(ia -> context.getOptions().expandWildcardsHidden() || ia.isHidden() == false) .filter(ia -> shouldIncludeIfDataStream(ia, context) || shouldIncludeIfAlias(ia, context)) - .filter(ia -> ia.isSystem() == false || context.systemIndexAccessPredicate.test(ia.getName())); + .filter(ia -> ia.isSystem() == false || context.systemIndexAccessPredicate.test(ia.getName())) + .forEach(ia -> resolved.addAll(expandToOpenClosed(context, ia))); - Set resolved = expandToOpenClosed(context, ias).collect(Collectors.toSet()); resolved.addAll(concreteIndices); return resolved; } @@ -1283,73 +1364,6 @@ private static boolean shouldIncludeIfAlias(IndexAbstraction ia, IndexNameExpres return context.getOptions().ignoreAliases() == false && ia.getType() == Type.ALIAS; } - /** - * Returns all the existing resource (index, alias and datastream) names that the {@param expressions} list resolves to. - * The passed-in {@param expressions} can contain wildcards and exclusions, as well as plain resource names. - *
    - * The return is a {@code Collection} (usually a {@code Set} but can also be a {@code List}, for performance reasons) of plain - * resource names only. All the returned resources are "accessible", in the given context, i.e. the resources exist - * and are not an alias or a datastream if the context does not permit it. - * Wildcard expressions, depending on the context: - *

      - *
    1. might throw an exception if they don't resolve to anything
    2. - *
    3. might not resolve to hidden or system resources (but plain names can refer to hidden or system resources)
    4. - *
    5. might resolve to aliases and datastreams, and it could be (depending on the context) that their backing indices are what's - * ultimately returned, instead of the alias or datastream name
    6. - *
    - */ - public static Collection resolve(Context context, List expressions) { - // fast exit if there are no wildcards to evaluate - if (context.getOptions().expandWildcardExpressions() == false) { - return expressions; - } - int firstWildcardIndex = 0; - for (; firstWildcardIndex < expressions.size(); firstWildcardIndex++) { - String expression = expressions.get(firstWildcardIndex); - if (isWildcard(expression)) { - break; - } - } - if (firstWildcardIndex == expressions.size()) { - return expressions; - } - Set result = new HashSet<>(); - for (int i = 0; i < firstWildcardIndex; i++) { - result.add(expressions.get(i)); - } - AtomicBoolean emptyWildcardExpansion = context.getOptions().allowNoIndices() ? null : new AtomicBoolean(); - for (int i = firstWildcardIndex; i < expressions.size(); i++) { - String expression = expressions.get(i); - boolean isExclusion = i > firstWildcardIndex && expression.charAt(0) == '-'; - if (i == firstWildcardIndex || isWildcard(expression)) { - Stream matchingResources = matchResourcesToWildcard( - context, - isExclusion ? expression.substring(1) : expression - ); - Stream matchingOpenClosedNames = expandToOpenClosed(context, matchingResources); - if (emptyWildcardExpansion != null) { - emptyWildcardExpansion.set(true); - matchingOpenClosedNames = matchingOpenClosedNames.peek(x -> emptyWildcardExpansion.set(false)); - } - if (isExclusion) { - matchingOpenClosedNames.forEach(result::remove); - } else { - matchingOpenClosedNames.forEach(result::add); - } - if (emptyWildcardExpansion != null && emptyWildcardExpansion.get()) { - throw notFoundException(expression); - } - } else { - if (isExclusion) { - result.remove(expression.substring(1)); - } else { - result.add(expression); - } - } - } - return result; - } - private static IndexMetadata.State excludeState(IndicesOptions options) { final IndexMetadata.State excludeState; if (options.expandWildcardsOpen() && options.expandWildcardsClosed()) { @@ -1366,55 +1380,82 @@ private static IndexMetadata.State excludeState(IndicesOptions options) { } /** - * Given a single wildcard {@param expression}, return the {@code Stream} that contains all the resources (i.e. indices, aliases, - * and datastreams), that exist in the cluster at this moment in time, and that the wildcard "resolves" to (i.e. the resource's + * Given a single wildcard {@param expression}, return a {@code Set} that contains all the resources (i.e. indices, aliases, + * and data streams), that exist in the cluster at this moment in time, and that the wildcard "resolves" to (i.e. the resource's * name matches the {@param expression} wildcard). * The {@param context} provides the current time-snapshot view of cluster state, as well as conditions - * on whether to consider alias, datastream, system, and hidden resources. - * It does NOT consider the open or closed status of index resources. + * on whether to consider alias, data stream, system, and hidden resources. */ - private static Stream matchResourcesToWildcard(Context context, String wildcardExpression) { + static Set matchWildcardToResources(Context context, String wildcardExpression) { assert isWildcard(wildcardExpression); final SortedMap indicesLookup = context.getState().getMetadata().getIndicesLookup(); - Stream matchesStream; + Set matchedResources = new HashSet<>(); + // this applies an initial pre-filtering in the case where the expression is a common suffix wildcard, eg "test*" if (Regex.isSuffixMatchPattern(wildcardExpression)) { - // this is an initial pre-filtering in the case where the expression is a common suffix wildcard, eg "test*" - matchesStream = filterIndicesLookupForSuffixWildcard(indicesLookup, wildcardExpression).values().stream(); - } else { - matchesStream = indicesLookup.values().stream(); - if (Regex.isMatchAllPattern(wildcardExpression) == false) { - matchesStream = matchesStream.filter( - indexAbstraction -> Regex.simpleMatch(wildcardExpression, indexAbstraction.getName()) - ); + for (IndexAbstraction ia : filterIndicesLookupForSuffixWildcard(indicesLookup, wildcardExpression).values()) { + maybeAddToResult(context, wildcardExpression, ia, matchedResources); + } + return matchedResources; + } + // In case of match all it fetches all index abstractions + if (Regex.isMatchAllPattern(wildcardExpression)) { + for (IndexAbstraction ia : indicesLookup.values()) { + maybeAddToResult(context, wildcardExpression, ia, matchedResources); } + return matchedResources; } - if (context.getOptions().ignoreAliases()) { - matchesStream = matchesStream.filter(indexAbstraction -> indexAbstraction.getType() != Type.ALIAS); + for (IndexAbstraction indexAbstraction : indicesLookup.values()) { + if (Regex.simpleMatch(wildcardExpression, indexAbstraction.getName())) { + maybeAddToResult(context, wildcardExpression, indexAbstraction, matchedResources); + } } - if (context.includeDataStreams() == false) { - matchesStream = matchesStream.filter(indexAbstraction -> indexAbstraction.isDataStreamRelated() == false); + return matchedResources; + } + + private static void maybeAddToResult( + Context context, + String wildcardExpression, + IndexAbstraction indexAbstraction, + Set matchedResources + ) { + if (shouldExpandToIndexAbstraction(context, wildcardExpression, indexAbstraction)) { + matchedResources.addAll(expandToOpenClosed(context, indexAbstraction)); } - // historic, i.e. not net-new, system indices are included irrespective of the system access predicate - // the system access predicate is based on the endpoint kind and HTTP request headers that identify the stack feature - matchesStream = matchesStream.filter( - indexAbstraction -> indexAbstraction.isSystem() == false - || (indexAbstraction.getType() != Type.DATA_STREAM - && indexAbstraction.getParentDataStream() == null - && context.netNewSystemIndexPredicate.test(indexAbstraction.getName()) == false) - || context.systemIndexAccessPredicate.test(indexAbstraction.getName()) - ); + } + + /** + * Checks if this index abstraction should be included because it matched the wildcard expression. + * @param context the options of this request that influence the decision if this index abstraction should be included in the result + * @param wildcardExpression the wildcard expression that matched this index abstraction + * @param indexAbstraction the index abstraction in question + * @return true, if the index abstraction should be included in the result + */ + private static boolean shouldExpandToIndexAbstraction( + Context context, + String wildcardExpression, + IndexAbstraction indexAbstraction + ) { + if (context.getOptions().ignoreAliases() && indexAbstraction.getType() == Type.ALIAS) { + return false; + } + if (context.includeDataStreams() == false && indexAbstraction.isDataStreamRelated()) { + return false; + } + + if (indexAbstraction.isSystem() + && SystemResourceAccess.shouldExpandToSystemIndexAbstraction(context, indexAbstraction) == false) { + return false; + } + if (context.getOptions().expandWildcardsHidden() == false) { - if (wildcardExpression.startsWith(".")) { - // there is this behavior that hidden indices that start with "." are not hidden if the wildcard expression also - // starts with "." - matchesStream = matchesStream.filter( - indexAbstraction -> indexAbstraction.isHidden() == false || indexAbstraction.getName().startsWith(".") - ); - } else { - matchesStream = matchesStream.filter(indexAbstraction -> indexAbstraction.isHidden() == false); + // there is this behavior that hidden indices that start with "." are not hidden if the wildcard expression also + // starts with "." + if (indexAbstraction.isHidden() + && (wildcardExpression.startsWith(".") && indexAbstraction.getName().startsWith(".")) == false) { + return false; } } - return matchesStream; + return true; } private static Map filterIndicesLookupForSuffixWildcard( @@ -1430,35 +1471,39 @@ private static Map filterIndicesLookupForSuffixWildcar } /** - * Return the {@code Stream} of open and/or closed index names for the given {@param resources}. + * Return the {@code Set} of open and/or closed index names for the given {@param resources}. * Data streams and aliases are interpreted to refer to multiple indices, * then all index resources are filtered by their open/closed status. */ - private static Stream expandToOpenClosed(Context context, Stream resources) { + private static Set expandToOpenClosed(Context context, IndexAbstraction indexAbstraction) { final IndexMetadata.State excludeState = excludeState(context.getOptions()); - return resources.flatMap(indexAbstraction -> { - if (context.isPreserveAliases() && indexAbstraction.getType() == Type.ALIAS) { - return Stream.of(indexAbstraction.getName()); - } else if (context.isPreserveDataStreams() && indexAbstraction.getType() == Type.DATA_STREAM) { - return Stream.of(indexAbstraction.getName()); - } else { - Stream indicesStateStream = Stream.of(); - if (shouldIncludeRegularIndices(context.getOptions())) { - indicesStateStream = indexAbstraction.getIndices().stream().map(context.state.metadata()::index); - } - if (indexAbstraction.getType() == Type.DATA_STREAM && shouldIncludeFailureIndices(context.getOptions())) { - DataStream dataStream = (DataStream) indexAbstraction; - indicesStateStream = Stream.concat( - indicesStateStream, - dataStream.getFailureIndices().getIndices().stream().map(context.state.metadata()::index) - ); + Set resources = new HashSet<>(); + if (context.isPreserveAliases() && indexAbstraction.getType() == Type.ALIAS) { + resources.add(indexAbstraction.getName()); + } else if (context.isPreserveDataStreams() && indexAbstraction.getType() == Type.DATA_STREAM) { + resources.add(indexAbstraction.getName()); + } else { + if (shouldIncludeRegularIndices(context.getOptions())) { + for (int i = 0, n = indexAbstraction.getIndices().size(); i < n; i++) { + Index index = indexAbstraction.getIndices().get(i); + IndexMetadata indexMetadata = context.state.metadata().index(index); + if (indexMetadata.getState() != excludeState) { + resources.add(index.getName()); + } } - if (excludeState != null) { - indicesStateStream = indicesStateStream.filter(indexMeta -> indexMeta.getState() != excludeState); + } + if (indexAbstraction.getType() == Type.DATA_STREAM && shouldIncludeFailureIndices(context.getOptions())) { + DataStream dataStream = (DataStream) indexAbstraction; + for (int i = 0, n = dataStream.getFailureIndices().getIndices().size(); i < n; i++) { + Index index = dataStream.getFailureIndices().getIndices().get(i); + IndexMetadata indexMetadata = context.state.metadata().index(index); + if (indexMetadata.getState() != excludeState) { + resources.add(index.getName()); + } } - return indicesStateStream.map(indexMeta -> indexMeta.getIndex().getName()); } - }); + } + return resources; } private static List resolveEmptyOrTrivialWildcard(Context context) { @@ -1471,26 +1516,26 @@ private static List resolveEmptyOrTrivialWildcard(Context context) { } private static List resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(Context context, String[] allIndices) { - return Arrays.stream(allIndices).filter(name -> { - if (name.startsWith(".")) { - IndexAbstraction abstraction = context.state.metadata().getIndicesLookup().get(name); - assert abstraction != null : "null abstraction for " + name + " but was in array of all indices"; - if (abstraction.isSystem()) { - if (context.netNewSystemIndexPredicate.test(name)) { - if (SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY.equals(context.systemIndexAccessLevel)) { - return false; - } else { - return context.systemIndexAccessPredicate.test(name); - } - } else if (abstraction.getType() == Type.DATA_STREAM || abstraction.getParentDataStream() != null) { - return context.systemIndexAccessPredicate.test(name); - } - } else { - return true; - } + List filteredIndices = new ArrayList<>(allIndices.length); + for (int i = 0; i < allIndices.length; i++) { + if (shouldIncludeIndexAbstraction(context, allIndices[i])) { + filteredIndices.add(allIndices[i]); } + } + return filteredIndices; + } + + private static boolean shouldIncludeIndexAbstraction(Context context, String name) { + if (name.startsWith(".") == false) { return true; - }).toList(); + } + + IndexAbstraction abstraction = context.state.metadata().getIndicesLookup().get(name); + assert abstraction != null : "null abstraction for " + name + " but was in array of all indices"; + if (abstraction.isSystem() == false) { + return true; + } + return SystemResourceAccess.isSystemIndexAbstractionAccessible(context, abstraction); } private static String[] resolveEmptyOrTrivialWildcardToAllIndices(IndicesOptions options, Metadata metadata) { @@ -1513,8 +1558,39 @@ private static String[] resolveEmptyOrTrivialWildcardToAllIndices(IndicesOptions return Strings.EMPTY_ARRAY; } } + + static boolean isWildcard(String expression) { + return Regex.isSimpleMatchPattern(expression); + } + + static boolean hasWildcards(String[] expressions) { + for (int i = 0; i < expressions.length; i++) { + if (isWildcard(expressions[i])) { + return true; + } + } + return false; + } + } + + /** + * @return If the specified string is data math expression then this method returns the resolved expression. + */ + public static String resolveDateMathExpression(String dateExpression) { + return DateMathExpressionResolver.resolveExpression(dateExpression); + } + + /** + * @param time instant to consider when parsing the expression + * @return If the specified string is data math expression then this method returns the resolved expression. + */ + public static String resolveDateMathExpression(String dateExpression, long time) { + return DateMathExpressionResolver.resolveExpression(dateExpression, () -> time); } + /** + * Resolves a date math expression based on the requested time. + */ public static final class DateMathExpressionResolver { private static final DateFormatter DEFAULT_DATE_FORMATTER = DateFormatter.forPattern("uuuu.MM.dd"); @@ -1530,35 +1606,18 @@ private DateMathExpressionResolver() { } /** - * Resolves date math expressions. If this is a noop the given {@code expressions} list is returned without copying. - * As a result callers of this method should not mutate the returned list. Mutating it may come with unexpected side effects. + * Resolves a date math expression using the current time. This method recognises a date math expression iff when they start with + * %3C and end with %3E. Otherwise, it returns the expression intact. */ - public static List resolve(Context context, List expressions) { - boolean wildcardSeen = false; - final boolean expandWildcards = context.getOptions().expandWildcardExpressions(); - String[] result = null; - for (int i = 0, n = expressions.size(); i < n; i++) { - String expression = expressions.get(i); - // accepts date-math exclusions that are of the form "-<...{}>",f i.e. the "-" is outside the "<>" date-math template - boolean isExclusion = wildcardSeen && expression.startsWith("-"); - wildcardSeen = wildcardSeen || (expandWildcards && isWildcard(expression)); - String toResolve = isExclusion ? expression.substring(1) : expression; - String resolved = resolveExpression(toResolve, context::getStartTime); - if (toResolve != resolved) { - if (result == null) { - result = expressions.toArray(Strings.EMPTY_ARRAY); - } - result[i] = isExclusion ? "-" + resolved : resolved; - } - } - return result == null ? expressions : Arrays.asList(result); - } - - static String resolveExpression(String expression) { + public static String resolveExpression(String expression) { return resolveExpression(expression, System::currentTimeMillis); } - static String resolveExpression(String expression, LongSupplier getTime) { + /** + * Resolves a date math expression using the provided time. This method recognises a date math expression iff when they start with + * %3C and end with %3E. Otherwise, it returns the expression intact. + */ + public static String resolveExpression(String expression, LongSupplier getTime) { if (expression.startsWith(EXPRESSION_LEFT_BOUND) == false || expression.endsWith(EXPRESSION_RIGHT_BOUND) == false) { return expression; } @@ -1707,135 +1766,133 @@ private static String doResolveExpression(String expression, LongSupplier getTim } } - public static final class ExplicitResourceNameFilter { + /** + * In this class we collect the system access relevant code. The helper methods provide the following functionalities: + * - determining the access to a system index abstraction + * - verifying the access to system abstractions and adding the necessary warnings + * - determining the access to a system index based on its name + * WARNING: we have observed differences in how the access is determined. For now this behaviour is documented and preserved. + */ + public static final class SystemResourceAccess { - private ExplicitResourceNameFilter() { + private SystemResourceAccess() { // Utility class } /** - * Returns an expression list with "unavailable" (missing or not acceptable) resource names filtered out. - * Only explicit resource names are considered for filtering. Wildcard and exclusion expressions are kept in. + * Checks if this system index abstraction should be included when resolving via {@link + * IndexNameExpressionResolver.WildcardExpressionResolver#resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(Context, String[])}. + * NOTE: it behaves differently than {@link SystemResourceAccess#shouldExpandToSystemIndexAbstraction(Context, IndexAbstraction)} + * because in the case that the access level is BACKWARDS_COMPATIBLE_ONLY it does not include the net-new indices, this is + * questionable. */ - public static List filterUnavailable(Context context, List expressions) { - ensureRemoteIndicesRequireIgnoreUnavailable(context.getOptions(), expressions); - final boolean expandWildcards = context.getOptions().expandWildcardExpressions(); - boolean wildcardSeen = false; - List result = null; - for (int i = 0; i < expressions.size(); i++) { - String expression = expressions.get(i); - if (Strings.isEmpty(expression)) { - throw notFoundException(expression); - } - // Expressions can not start with an underscore. This is reserved for APIs. If the check gets here, the API - // does not exist and the path is interpreted as an expression. If the expression begins with an underscore, - // throw a specific error that is different from the [[IndexNotFoundException]], which is typically thrown - // if the expression can't be found. - if (expression.charAt(0) == '_') { - throw new InvalidIndexNameException(expression, "must not start with '_'."); - } - final boolean isWildcard = expandWildcards && isWildcard(expression); - if (isWildcard || (wildcardSeen && expression.charAt(0) == '-') || ensureAliasOrIndexExists(context, expression)) { - if (result != null) { - result.add(expression); - } + public static boolean isSystemIndexAbstractionAccessible(Context context, IndexAbstraction abstraction) { + assert abstraction.isSystem() : "We should only check this for system resources"; + if (context.netNewSystemIndexPredicate.test(abstraction.getName())) { + if (SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY.equals(context.systemIndexAccessLevel)) { + return false; } else { - if (result == null) { - result = new ArrayList<>(expressions.size() - 1); - result.addAll(expressions.subList(0, i)); - } + return context.systemIndexAccessPredicate.test(abstraction.getName()); } - wildcardSeen |= isWildcard; + } else if (abstraction.getType() == Type.DATA_STREAM || abstraction.getParentDataStream() != null) { + return context.systemIndexAccessPredicate.test(abstraction.getName()); } - return result == null ? expressions : result; + return true; } /** - * This returns `true` if the given {@param name} is of a resource that exists. - * Otherwise, it returns `false` if the `ignore_unvailable` option is `true`, or, if `false`, it throws a "not found" type of - * exception. + * Historic, i.e. not net-new, system indices are included irrespective of the system access predicate + * the system access predicate is based on the endpoint kind and HTTP request headers that identify the stack feature. + * A historic system resource, can only be an index since system data streams were added later. */ - @Nullable - private static boolean ensureAliasOrIndexExists(Context context, String name) { - boolean ignoreUnavailable = context.getOptions().ignoreUnavailable(); - IndexAbstraction indexAbstraction = context.getState().getMetadata().getIndicesLookup().get(name); - if (indexAbstraction == null) { - if (ignoreUnavailable) { - return false; - } else { - throw notFoundException(name); - } - } - // treat aliases as unavailable indices when ignoreAliases is set to true (e.g. delete index and update aliases api) - if (indexAbstraction.getType() == Type.ALIAS && context.getOptions().ignoreAliases()) { - if (ignoreUnavailable) { - return false; - } else { - throw aliasesNotSupportedException(name); - } - } - if (indexAbstraction.isDataStreamRelated() && context.includeDataStreams() == false) { - if (ignoreUnavailable) { - return false; - } else { - IndexNotFoundException infe = notFoundException(name); - // Allows callers to handle IndexNotFoundException differently based on whether data streams were excluded. - infe.addMetadata(EXCLUDED_DATA_STREAMS_KEY, "true"); - throw infe; - } - } - return true; + private static boolean shouldExpandToSystemIndexAbstraction(Context context, IndexAbstraction indexAbstraction) { + assert indexAbstraction.isSystem() : "We should only check this for system resources"; + boolean isHistoric = indexAbstraction.getType() != Type.DATA_STREAM + && indexAbstraction.getParentDataStream() == null + && context.netNewSystemIndexPredicate.test(indexAbstraction.getName()) == false; + return isHistoric || context.systemIndexAccessPredicate.test(indexAbstraction.getName()); } - private static void ensureRemoteIndicesRequireIgnoreUnavailable(IndicesOptions options, List indexExpressions) { - if (options.ignoreUnavailable()) { + /** + * Checks if any system indices that should not have been accessible according to the + * {@link Context#getSystemIndexAccessPredicate()} are accessed, and it performs the following actions: + * - if there are historic (aka not net-new) system indices, then it adds a deprecation warning + * - if it contains net-new system indices or system data streams, it throws an exception. + */ + private static void checkSystemIndexAccess(Context context, ThreadContext threadContext, Index... concreteIndices) { + final Predicate systemIndexAccessPredicate = context.getSystemIndexAccessPredicate(); + if (systemIndexAccessPredicate == Predicates.always()) { return; } - for (String index : indexExpressions) { - if (RemoteClusterAware.isRemoteIndexName(index)) { - failOnRemoteIndicesNotIgnoringUnavailable(indexExpressions); - } - } + doCheckSystemIndexAccess(context, systemIndexAccessPredicate, threadContext, concreteIndices); } - private static void failOnRemoteIndicesNotIgnoringUnavailable(List indexExpressions) { - List crossClusterIndices = new ArrayList<>(); - for (String index : indexExpressions) { - if (RemoteClusterAware.isRemoteIndexName(index)) { - crossClusterIndices.add(index); + private static void doCheckSystemIndexAccess( + Context context, + Predicate systemIndexAccessPredicate, + ThreadContext threadContext, + Index... concreteIndices + ) { + final Metadata metadata = context.getState().metadata(); + final List resolvedSystemIndices = new ArrayList<>(); + final List resolvedNetNewSystemIndices = new ArrayList<>(); + final Set resolvedSystemDataStreams = new HashSet<>(); + final SortedMap indicesLookup = metadata.getIndicesLookup(); + boolean matchedIndex = false; + for (int i = 0; i < concreteIndices.length; i++) { + Index concreteIndex = concreteIndices[i]; + IndexMetadata idxMetadata = metadata.index(concreteIndex); + String name = concreteIndex.getName(); + if (idxMetadata.isSystem() && systemIndexAccessPredicate.test(name) == false) { + matchedIndex = true; + IndexAbstraction indexAbstraction = indicesLookup.get(name); + if (indexAbstraction.getParentDataStream() != null) { + resolvedSystemDataStreams.add(indexAbstraction.getParentDataStream().getName()); + } else if (context.netNewSystemIndexPredicate.test(name)) { + resolvedNetNewSystemIndices.add(name); + } else { + resolvedSystemIndices.add(name); + } } } - throw new IllegalArgumentException( - "Cross-cluster calls are not supported in this context but remote indices were requested: " + crossClusterIndices - ); - } - } - - /** - * This is a context for the DateMathExpressionResolver which does not require {@code IndicesOptions} or {@code ClusterState} - * since it uses only the start time to resolve expressions. - */ - public static final class ResolverContext extends Context { - public ResolverContext() { - this(System.currentTimeMillis()); - } - - public ResolverContext(long startTime) { - super(null, null, startTime, false, false, false, false, SystemIndexAccessLevel.ALL, Predicates.never(), Predicates.never()); + if (matchedIndex) { + handleMatchedSystemIndices(resolvedSystemIndices, resolvedSystemDataStreams, resolvedNetNewSystemIndices, threadContext); + } } - @Override - public ClusterState getState() { - throw new UnsupportedOperationException("should never be called"); + private static void handleMatchedSystemIndices( + List resolvedSystemIndices, + Set resolvedSystemDataStreams, + List resolvedNetNewSystemIndices, + ThreadContext threadContext + ) { + if (resolvedSystemIndices.isEmpty() == false) { + Collections.sort(resolvedSystemIndices); + deprecationLogger.warn( + DeprecationCategory.API, + "open_system_index_access", + "this request accesses system indices: {}, but in a future major version, direct access to system " + + "indices will be prevented by default", + resolvedSystemIndices + ); + } + if (resolvedSystemDataStreams.isEmpty() == false) { + throw SystemIndices.dataStreamAccessException(threadContext, resolvedSystemDataStreams); + } + if (resolvedNetNewSystemIndices.isEmpty() == false) { + throw SystemIndices.netNewSystemIndexAccessException(threadContext, resolvedNetNewSystemIndices); + } } - @Override - public IndicesOptions getOptions() { - throw new UnsupportedOperationException("should never be called"); + /** + * Used in {@link IndexNameExpressionResolver#shouldTrackConcreteIndex(Context, Index)} to exclude net-new indices + * when we are in backwards compatible only access level. + * This also feels questionable as well. + */ + private static boolean isNetNewInBackwardCompatibleMode(Context context, Index index) { + return context.systemIndexAccessLevel == SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY + && context.netNewSystemIndexPredicate.test(index.getName()); } } - private static boolean isWildcard(String expression) { - return Regex.isSimpleMatchPattern(expression); - } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java index 6be5b48f9d723..57c360dc6a92a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java @@ -10,163 +10,90 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.Context; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.DateMathExpressionResolver; -import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; import org.elasticsearch.test.ESTestCase; -import org.hamcrest.Matchers; import java.time.Instant; import java.time.ZoneId; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; import java.util.Locale; +import java.util.function.LongSupplier; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class DateMathExpressionResolverTests extends ESTestCase { - private final Context context = new Context( - ClusterState.builder(new ClusterName("_name")).build(), - IndicesOptions.strictExpand(), - SystemIndexAccessLevel.NONE - ); + private final long now = randomMillisUpToYear9999(); + private final LongSupplier getTime = () -> now; - private static ZonedDateTime dateFromMillis(long millis) { - return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC); - } + public void testNoDateMathExpression() { + String expression = randomAlphaOfLength(10); + assertThat(DateMathExpressionResolver.resolveExpression(expression, getTime), equalTo(expression)); - private static String formatDate(String pattern, ZonedDateTime zonedDateTime) { - DateTimeFormatter dateFormatter = DateTimeFormatter.ofPattern(pattern, Locale.ROOT); - return dateFormatter.format(zonedDateTime); + expression = "*"; + assertThat(DateMathExpressionResolver.resolveExpression(expression, getTime), equalTo(expression)); } - public void testNormal() throws Exception { - int numIndexExpressions = randomIntBetween(1, 9); - List indexExpressions = new ArrayList<>(numIndexExpressions); - for (int i = 0; i < numIndexExpressions; i++) { - indexExpressions.add(randomAlphaOfLength(10)); - } - List result = DateMathExpressionResolver.resolve(context, indexExpressions); - assertThat(result.size(), equalTo(indexExpressions.size())); - for (int i = 0; i < indexExpressions.size(); i++) { - assertThat(result.get(i), equalTo(indexExpressions.get(i))); - } - } + public void testExpression() { + String result = DateMathExpressionResolver.resolveExpression("<.marvel-{now}>", getTime); + assertThat(result, equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); - public void testExpression() throws Exception { - List indexExpressions = Arrays.asList("<.marvel-{now}>", "<.watch_history-{now}>", ""); - List result = DateMathExpressionResolver.resolve(context, indexExpressions); - assertThat(result.size(), equalTo(3)); - assertThat(result.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(1), equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(2), equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + result = DateMathExpressionResolver.resolveExpression("<.watch_history-{now}>", getTime); + assertThat(result, equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); + + result = DateMathExpressionResolver.resolveExpression("", getTime); + assertThat(result, equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); } public void testExpressionWithWildcardAndExclusions() { - List indexExpressions = Arrays.asList( - "<-before-inner-{now}>", - "-", - "", - "<-after-inner-{now}>", - "-" - ); - List result = DateMathExpressionResolver.resolve(context, indexExpressions); - assertThat( - result, - Matchers.contains( - equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), - equalTo("-"), // doesn't evaluate because it doesn't start with "<" and it is not an exclusion - equalTo("wild*card-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())) + "*"), - equalTo("-after-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), - equalTo("-after-outer-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))) - ) - ); - Context noWildcardExpandContext = new Context( - ClusterState.builder(new ClusterName("_name")).build(), - IndicesOptions.strictSingleIndexNoExpandForbidClosed(), - SystemIndexAccessLevel.NONE - ); - result = DateMathExpressionResolver.resolve(noWildcardExpandContext, indexExpressions); - assertThat( - result, - Matchers.contains( - equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), - // doesn't evaluate because it doesn't start with "<" and there can't be exclusions without wildcard expansion - equalTo("-"), - equalTo("wild*card-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())) + "*"), - equalTo("-after-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), - // doesn't evaluate because it doesn't start with "<" and there can't be exclusions without wildcard expansion - equalTo("-") - ) - ); - } + String result = DateMathExpressionResolver.resolveExpression("<-before-inner-{now}>", getTime); + assertThat(result, equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); + + result = DateMathExpressionResolver.resolveExpression("", getTime); + assertThat(result, equalTo("wild*card-" + formatDate("uuuu.MM.dd", dateFromMillis(now)) + "*")); + + result = DateMathExpressionResolver.resolveExpression("<-after-inner-{now}>", getTime); + assertThat(result, equalTo("-after-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); - public void testEmpty() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Collections.emptyList()); - assertThat(result.size(), equalTo(0)); } - public void testExpression_Static() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-test>")); - assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".marvel-test")); + public void testExpression_Static() { + String result = DateMathExpressionResolver.resolveExpression("<.marvel-test>", getTime); + assertThat(result, equalTo(".marvel-test")); } - public void testExpression_MultiParts() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.text1-{now/d}-text2-{now/M}>")); - assertThat(result.size(), equalTo(1)); + public void testExpression_MultiParts() { + String result = DateMathExpressionResolver.resolveExpression("<.text1-{now/d}-text2-{now/M}>", getTime); assertThat( - result.get(0), + result, equalTo( ".text1-" - + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())) + + formatDate("uuuu.MM.dd", dateFromMillis(now)) + "-text2-" - + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()).withDayOfMonth(1)) + + formatDate("uuuu.MM.dd", dateFromMillis(now).withDayOfMonth(1)) ) ); } - public void testExpression_CustomFormat() throws Exception { - List results = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{yyyy.MM.dd}}>")); - assertThat(results.size(), equalTo(1)); - assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - } - - public void testExpression_EscapeStatic() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.mar\\{v\\}el-{now/d}>")); - assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + public void testExpression_CustomFormat() { + String result = DateMathExpressionResolver.resolveExpression("<.marvel-{now/d{yyyy.MM.dd}}>", getTime); + assertThat(result, equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); } - public void testExpression_EscapeDateFormat() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{'\\{year\\}'yyyy}}>")); - assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(context.getStartTime())))); + public void testExpression_EscapeStatic() { + String result = DateMathExpressionResolver.resolveExpression("<.mar\\{v\\}el-{now/d}>", getTime); + assertThat(result, equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); } - public void testExpression_MixedArray() throws Exception { - List result = DateMathExpressionResolver.resolve( - context, - Arrays.asList("name1", "<.marvel-{now/d}>", "name2", "<.logstash-{now/M{uuuu.MM}}>") - ); - assertThat(result.size(), equalTo(4)); - assertThat(result.get(0), equalTo("name1")); - assertThat(result.get(1), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(2), equalTo("name2")); - assertThat(result.get(3), equalTo(".logstash-" + formatDate("uuuu.MM", dateFromMillis(context.getStartTime()).withDayOfMonth(1)))); + public void testExpression_EscapeDateFormat() { + String result = DateMathExpressionResolver.resolveExpression("<.marvel-{now/d{'\\{year\\}'yyyy}}>", getTime); + assertThat(result, equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(now)))); } - public void testExpression_CustomTimeZoneInIndexName() throws Exception { + public void testExpression_CustomTimeZoneInIndexName() { ZoneId timeZone; int hoursOffset; int minutesOffset = 0; @@ -194,57 +121,57 @@ public void testExpression_CustomTimeZoneInIndexName() throws Exception { // rounding to today 00:00 now = ZonedDateTime.now(ZoneOffset.UTC).withHour(0).withMinute(0).withSecond(0); } - Context context = new Context( - this.context.getState(), - this.context.getOptions(), - now.toInstant().toEpochMilli(), - SystemIndexAccessLevel.NONE, - name -> false, - name -> false - ); - List results = DateMathExpressionResolver.resolve( - context, - Arrays.asList("<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>") + + String result = DateMathExpressionResolver.resolveExpression( + "<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>", + () -> now.toInstant().toEpochMilli() ); - assertThat(results.size(), equalTo(1)); - logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, results.get(0)); - assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone)))); + logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, result); + assertThat(result, equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone)))); } - public void testExpressionInvalidUnescaped() throws Exception { + public void testExpressionInvalidUnescaped() { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.mar}vel-{now/d}>")) + () -> DateMathExpressionResolver.resolveExpression("<.mar}vel-{now/d}>", getTime) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("invalid character at position [")); } - public void testExpressionInvalidDateMathFormat() throws Exception { + public void testExpressionInvalidDateMathFormat() { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}>")) + () -> DateMathExpressionResolver.resolveExpression("<.marvel-{now/d{}>", getTime) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("date math placeholder is open ended")); } - public void testExpressionInvalidEmptyDateMathFormat() throws Exception { + public void testExpressionInvalidEmptyDateMathFormat() { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}}>")) + () -> DateMathExpressionResolver.resolveExpression("<.marvel-{now/d{}}>", getTime) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("missing date format")); } - public void testExpressionInvalidOpenEnded() throws Exception { + public void testExpressionInvalidOpenEnded() { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d>")) + () -> DateMathExpressionResolver.resolveExpression("<.marvel-{now/d>", getTime) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("date math placeholder is open ended")); } + static ZonedDateTime dateFromMillis(long millis) { + return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC); + } + + static String formatDate(String pattern, ZonedDateTime zonedDateTime) { + DateTimeFormatter dateFormatter = DateTimeFormatter.ofPattern(pattern, Locale.ROOT); + return dateFormatter.format(zonedDateTime); + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index 99470918ce063..30895767c33c2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -47,6 +48,7 @@ import java.time.LocalDate; import java.time.ZoneOffset; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -58,6 +60,8 @@ import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createBackingIndex; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createFailureStore; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.newInstance; +import static org.elasticsearch.cluster.metadata.DateMathExpressionResolverTests.dateFromMillis; +import static org.elasticsearch.cluster.metadata.DateMathExpressionResolverTests.formatDate; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_HIDDEN_SETTING; import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.elasticsearch.indices.SystemIndices.EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY; @@ -885,10 +889,7 @@ public void testConcreteIndicesIgnoreIndicesEmptyRequest() { IndicesOptions.lenientExpandOpen(), SystemIndexAccessLevel.NONE ); - assertThat( - newHashSet(indexNameExpressionResolver.concreteIndexNames(context, new String[] {})), - equalTo(newHashSet("kuku", "testXXX")) - ); + assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context)), equalTo(newHashSet("kuku", "testXXX"))); } public void testConcreteIndicesNoIndicesErrorMessage() { @@ -1408,52 +1409,56 @@ public void testConcreteIndicesWildcardNoMatch() { } } - public void testIsAllIndicesNull() throws Exception { + public void testIsAllIndicesNull() { assertThat(IndexNameExpressionResolver.isAllIndices(null), equalTo(true)); } - public void testIsAllIndicesEmpty() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Collections.emptyList()), equalTo(true)); + public void testIsAllIndicesEmpty() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of()), equalTo(true)); + } + + public void testIsAllIndicesExplicitAll() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of("_all")), equalTo(true)); } - public void testIsAllIndicesExplicitAll() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Arrays.asList("_all")), equalTo(true)); + public void testIsAllIndicesExplicitAllPlusOther() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of("_all", "other")), equalTo(false)); } - public void testIsAllIndicesExplicitAllPlusOther() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Arrays.asList("_all", "other")), equalTo(false)); + public void testIsNoneIndices() { + assertThat(IndexNameExpressionResolver.isNoneExpression(new String[] { "*", "-*" }), equalTo(true)); } - public void testIsAllIndicesNormalIndexes() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Arrays.asList("index1", "index2", "index3")), equalTo(false)); + public void testIsAllIndicesNormalIndexes() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of("index1", "index2", "index3")), equalTo(false)); } - public void testIsAllIndicesWildcard() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Arrays.asList("*")), equalTo(false)); + public void testIsAllIndicesWildcard() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of("*")), equalTo(false)); } - public void testIsExplicitAllIndicesNull() throws Exception { + public void testIsExplicitAllIndicesNull() { assertThat(IndexNameExpressionResolver.isExplicitAllPattern(null), equalTo(false)); } - public void testIsExplicitAllIndicesEmpty() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Collections.emptyList()), equalTo(false)); + public void testIsExplicitAllIndicesEmpty() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of()), equalTo(false)); } - public void testIsExplicitAllIndicesExplicitAll() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Arrays.asList("_all")), equalTo(true)); + public void testIsExplicitAllIndicesExplicitAll() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("_all")), equalTo(true)); } - public void testIsExplicitAllIndicesExplicitAllPlusOther() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Arrays.asList("_all", "other")), equalTo(false)); + public void testIsExplicitAllIndicesExplicitAllPlusOther() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("_all", "other")), equalTo(false)); } - public void testIsExplicitAllIndicesNormalIndexes() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Arrays.asList("index1", "index2", "index3")), equalTo(false)); + public void testIsExplicitAllIndicesNormalIndexes() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("index1", "index2", "index3")), equalTo(false)); } - public void testIsExplicitAllIndicesWildcard() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Arrays.asList("*")), equalTo(false)); + public void testIsExplicitAllIndicesWildcard() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("*")), equalTo(false)); } public void testIndexOptionsFailClosedIndicesAndAliases() { @@ -1580,16 +1585,13 @@ public void testResolveExpressions() { .put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("alias-1"))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - assertEquals(new HashSet<>(Arrays.asList("alias-0", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "alias-*")); + assertEquals(Set.of("alias-0", "alias-1"), indexNameExpressionResolver.resolveExpressions(state, "alias-*")); + assertEquals(Set.of("test-0", "alias-0", "alias-1"), indexNameExpressionResolver.resolveExpressions(state, "test-0", "alias-*")); assertEquals( - new HashSet<>(Arrays.asList("test-0", "alias-0", "alias-1")), - indexNameExpressionResolver.resolveExpressions(state, "test-0", "alias-*") - ); - assertEquals( - new HashSet<>(Arrays.asList("test-0", "test-1", "alias-0", "alias-1")), + Set.of("test-0", "test-1", "alias-0", "alias-1"), indexNameExpressionResolver.resolveExpressions(state, "test-*", "alias-*") ); - assertEquals(new HashSet<>(Arrays.asList("test-1", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "*-1")); + assertEquals(Set.of("test-1", "alias-1"), indexNameExpressionResolver.resolveExpressions(state, "*-1")); } public void testFilteringAliases() { @@ -1598,16 +1600,16 @@ public void testFilteringAliases() { .put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("alias-1"))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = new HashSet<>(Arrays.asList("alias-0", "alias-1")); + Set resolvedExpressions = Set.of("alias-0", "alias-1"); String[] strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertArrayEquals(new String[] { "alias-0" }, strings); // concrete index supersedes filtering alias - resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "alias-0", "alias-1")); + resolvedExpressions = Set.of("test-0", "alias-0", "alias-1"); strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertNull(strings); - resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "test-1", "alias-0", "alias-1")); + resolvedExpressions = Set.of("test-0", "test-1", "alias-0", "alias-1"); strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertNull(strings); } @@ -1742,7 +1744,7 @@ public void testIndexAliasesSkipIdentity() { ); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "test-alias")); + Set resolvedExpressions = Set.of("test-0", "test-alias"); String[] aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, false, resolvedExpressions); assertNull(aliases); aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); @@ -1769,7 +1771,7 @@ public void testConcreteWriteIndexSuccessful() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-alias")) + Set.of("test-0", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1851,7 +1853,7 @@ public void testConcreteWriteIndexWithWildcardExpansion() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of("test-0", "test-1", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1889,7 +1891,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithSingleIndex() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-alias")) + Set.of("test-0", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1925,7 +1927,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithMultipleIndices() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of("test-0", "test-1", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1966,7 +1968,7 @@ public void testAliasResolutionNotAllowingMultipleIndices() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of("test-0", "test-1", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -2328,40 +2330,40 @@ public void testFullWildcardSystemIndexResolutionWithExpandHiddenAllowed() { SearchRequest request = new SearchRequest(randomFrom("*", "_all")); request.indicesOptions(IndicesOptions.strictExpandHidden()); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches")); } public void testWildcardSystemIndexResolutionMultipleMatchesAllowed() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".w*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".watches")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".watches")); } public void testWildcardSystemIndexResolutionSingleMatchAllowed() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".ml-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".ml-meta", ".ml-stuff")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".ml-meta", ".ml-stuff")); } public void testSingleSystemIndexResolutionAllowed() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".ml-meta"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".ml-meta")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".ml-meta")); } public void testFullWildcardSystemIndicesAreHidden() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(randomFrom("*", "_all")); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder("some-other-index")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining("some-other-index")); } public void testFullWildcardSystemIndexResolutionDeprecated() { @@ -2370,8 +2372,8 @@ public void testFullWildcardSystemIndexResolutionDeprecated() { SearchRequest request = new SearchRequest(randomFrom("*", "_all")); request.indicesOptions(IndicesOptions.strictExpandHidden()); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches")); assertWarnings( true, new DeprecationWarning( @@ -2388,8 +2390,8 @@ public void testSingleSystemIndexResolutionDeprecated() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".ml-meta"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".ml-meta")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".ml-meta")); assertWarnings( true, new DeprecationWarning( @@ -2405,8 +2407,8 @@ public void testWildcardSystemIndexResolutionSingleMatchDeprecated() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".w*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".watches")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".watches")); assertWarnings( true, new DeprecationWarning( @@ -2423,8 +2425,8 @@ public void testWildcardSystemIndexResolutionMultipleMatchesDeprecated() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".ml-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".ml-meta", ".ml-stuff")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".ml-meta", ".ml-stuff")); assertWarnings( true, new DeprecationWarning( @@ -2479,8 +2481,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.FALSE.toString()); SearchRequest request = new SearchRequest(".external-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings( true, new DeprecationWarning( @@ -2496,8 +2498,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.FALSE.toString()); SearchRequest request = new SearchRequest(".external-sys-idx"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings( true, new DeprecationWarning( @@ -2515,8 +2517,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "stack-component"); SearchRequest request = new SearchRequest(".external-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings(); } } @@ -2526,8 +2528,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "stack-component"); SearchRequest request = new SearchRequest(".external-sys-idx"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings(); } } @@ -2538,8 +2540,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "other"); SearchRequest request = new SearchRequest(".external-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings(); } } @@ -2549,8 +2551,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "other"); SearchRequest request = new SearchRequest(".external-sys-idx"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings(); } } @@ -3073,7 +3075,6 @@ public void testDataStreamsWithWildcardExpression() { assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream1, 2, epochMillis))); assertThat(result[2].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 1, epochMillis))); assertThat(result[3].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 2, epochMillis))); - ; } { IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN; @@ -3239,6 +3240,37 @@ public void testDataStreamsNames() { assertThat(names, empty()); } + public void testDateMathMixedArray() { + long now = System.currentTimeMillis(); + String dataMathIndex1 = ".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(now)); + String dateMathIndex2 = ".logstash-" + formatDate("uuuu.MM", dateFromMillis(now).withDayOfMonth(1)); + IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( + ClusterState.builder(new ClusterName("_name")) + .metadata( + Metadata.builder() + .put(indexBuilder("name1")) + .put(indexBuilder("name2")) + .put(indexBuilder(dataMathIndex1)) + .put(indexBuilder(dateMathIndex2)) + ) + .build(), + IndicesOptions.strictExpand(), + now, + SystemIndexAccessLevel.NONE, + Predicates.never(), + Predicates.never() + ); + Collection result = IndexNameExpressionResolver.resolveExpressionsToResources( + context, + "name1", + "<.marvel-{now/d}>", + "name2", + "<.logstash-{now/M{uuuu.MM}}>" + ); + assertThat(result.size(), equalTo(4)); + assertThat(result, contains("name1", dataMathIndex1, "name2", dateMathIndex2)); + } + public void testMathExpressionSupport() { Instant instant = LocalDate.of(2021, 01, 11).atStartOfDay().toInstant(ZoneOffset.UTC); String resolved = IndexNameExpressionResolver.resolveDateMathExpression("", instant.toEpochMilli()); @@ -3418,10 +3450,6 @@ private ClusterState systemIndexTestClusterState() { return ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); } - private List resolveConcreteIndexNameList(ClusterState state, SearchRequest request) { - return Arrays.stream(indexNameExpressionResolver.concreteIndices(state, request)).map(Index::getName).toList(); - } - private static IndexMetadata.Builder indexBuilder(String index, Settings additionalSettings) { return IndexMetadata.builder(index).settings(indexSettings(IndexVersion.current(), 1, 0).put(additionalSettings)); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java index 982394ca31b1c..6a26e7948784c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java @@ -13,23 +13,20 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata.State; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; import org.elasticsearch.test.ESTestCase; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.List; +import java.util.Set; import java.util.function.Predicate; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createBackingIndex; import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; public class WildcardExpressionResolverTests extends ESTestCase { @@ -50,107 +47,31 @@ public void testConvertWildcardsJustIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testXXX"))), - equalTo(newHashSet("testXXX")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "ku*")), + equalTo(newHashSet("kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "testYYY"))), - equalTo(newHashSet("testXXX", "testYYY")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "ku*"))), - equalTo(newHashSet("testXXX", "kuku")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*")), equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*")), equalTo(newHashSet("testXXX", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testX*", "kuku"))), - equalTo(newHashSet("testXXX", "testXYY", "kuku")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*")), equalTo(newHashSet("testXXX", "testXYY", "testYYY", "kuku")) ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("*", "-kuku"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); - assertThat( - newHashSet( - IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - context, - Arrays.asList("testX*", "-doe", "-testXXX", "-testYYY") - ) - ), - equalTo(newHashSet("testXYY")) - ); - if (indicesOptions == IndicesOptions.lenientExpandOpen()) { - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "-testXXX"))), - equalTo(newHashSet("testXXX", "-testXXX")) - ); - } else if (indicesOptions == IndicesOptions.strictExpandOpen()) { - IndexNotFoundException infe = expectThrows( - IndexNotFoundException.class, - () -> IndexNameExpressionResolver.resolveExpressions(context, "testXXX", "-testXXX") - ); - assertEquals("-testXXX", infe.getIndex().getName()); - } - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "-testX*"))), - equalTo(newHashSet("testXXX")) - ); - } - - public void testConvertWildcardsTests() { - Metadata.Builder mdBuilder = Metadata.builder() - .put(indexBuilder("testXXX").putAlias(AliasMetadata.builder("alias1")).putAlias(AliasMetadata.builder("alias2"))) - .put(indexBuilder("testXYY").putAlias(AliasMetadata.builder("alias2"))) - .put(indexBuilder("testYYY").putAlias(AliasMetadata.builder("alias3"))) - .put(indexBuilder("kuku")); - ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - - IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( - state, - IndicesOptions.lenientExpandOpen(), - SystemIndexAccessLevel.NONE - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testYY*", "alias*"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("-kuku"))), - equalTo(newHashSet("-kuku")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("test*", "-testYYY"))), - equalTo(newHashSet("testXXX", "testXYY")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testX*", "testYYY"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testYYY", "testX*"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); } public void testConvertWildcardsOpenClosedIndicesTests() { Metadata.Builder mdBuilder = Metadata.builder() - .put(indexBuilder("testXXX").state(IndexMetadata.State.OPEN)) - .put(indexBuilder("testXXY").state(IndexMetadata.State.OPEN)) - .put(indexBuilder("testXYY").state(IndexMetadata.State.CLOSE)) - .put(indexBuilder("testYYY").state(IndexMetadata.State.OPEN)) - .put(indexBuilder("testYYX").state(IndexMetadata.State.CLOSE)) - .put(indexBuilder("kuku").state(IndexMetadata.State.OPEN)); + .put(indexBuilder("testXXX").state(State.OPEN)) + .put(indexBuilder("testXXY").state(State.OPEN)) + .put(indexBuilder("testXYY").state(State.CLOSE)) + .put(indexBuilder("testYYY").state(State.OPEN)) + .put(indexBuilder("testYYX").state(State.CLOSE)) + .put(indexBuilder("kuku").state(State.OPEN)); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( @@ -159,7 +80,7 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*")), equalTo(newHashSet("testXXX", "testXXY", "testXYY")) ); context = new IndexNameExpressionResolver.Context( @@ -168,7 +89,7 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*")), equalTo(newHashSet("testXYY")) ); context = new IndexNameExpressionResolver.Context( @@ -177,26 +98,9 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*")), equalTo(newHashSet("testXXX", "testXXY")) ); - context = new IndexNameExpressionResolver.Context( - state, - IndicesOptions.fromOptions(true, true, false, false), - SystemIndexAccessLevel.NONE - ); - assertThat(IndexNameExpressionResolver.resolveExpressions(context, "testX*").size(), equalTo(0)); - context = new IndexNameExpressionResolver.Context( - state, - IndicesOptions.fromOptions(false, true, false, false), - SystemIndexAccessLevel.NONE - ); - IndexNameExpressionResolver.Context finalContext = context; - IndexNotFoundException infe = expectThrows( - IndexNotFoundException.class, - () -> IndexNameExpressionResolver.resolveExpressions(finalContext, "testX*") - ); - assertThat(infe.getIndex().getName(), is("testX*")); } // issue #13334 @@ -217,28 +121,27 @@ public void testMultipleWildcards() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*X*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*X*")), equalTo(newHashSet("testXXX", "testXXY", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*X*Y"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*X*Y")), equalTo(newHashSet("testXXY", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("kuku*Y*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "kuku*Y*")), equalTo(newHashSet("kukuYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*Y*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*Y*")), equalTo(newHashSet("testXXY", "testXYY", "testYYY", "kukuYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*Y*X"))) - .size(), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*Y*X")).size(), equalTo(0) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*Y*X"))).size(), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*Y*X")).size(), equalTo(0) ); } @@ -259,26 +162,6 @@ public void testAll() { newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); - assertThat( - newHashSet(IndexNameExpressionResolver.resolveExpressions(context, "_all")), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); - IndicesOptions noExpandOptions = IndicesOptions.fromOptions( - randomBoolean(), - true, - false, - false, - randomBoolean(), - randomBoolean(), - randomBoolean(), - randomBoolean() - ); - IndexNameExpressionResolver.Context noExpandContext = new IndexNameExpressionResolver.Context( - state, - noExpandOptions, - SystemIndexAccessLevel.NONE - ); - assertThat(IndexNameExpressionResolver.resolveExpressions(noExpandContext, "_all").size(), equalTo(0)); } public void testAllAliases() { @@ -506,112 +389,47 @@ public void testResolveAliases() { ); { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAndAliasesContext, - Collections.singletonList("foo_a*") + "foo_a*" ); assertThat(indices, containsInAnyOrder("foo_index", "bar_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( skipAliasesLenientContext, - Collections.singletonList("foo_a*") + "foo_a*" ); assertEquals(0, indices.size()); } { - IndexNotFoundException infe = expectThrows( - IndexNotFoundException.class, - () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - skipAliasesStrictContext, - Collections.singletonList("foo_a*") - ) + Set indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( + skipAliasesStrictContext, + "foo_a*" ); - assertEquals("foo_a*", infe.getIndex().getName()); + assertThat(indices, empty()); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAndAliasesContext, - Collections.singletonList("foo*") + "foo*" ); assertThat(indices, containsInAnyOrder("foo_foo", "foo_index", "bar_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( skipAliasesLenientContext, - Collections.singletonList("foo*") + "foo*" ); assertThat(indices, containsInAnyOrder("foo_foo", "foo_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( skipAliasesStrictContext, - Collections.singletonList("foo*") + "foo*" ); assertThat(indices, containsInAnyOrder("foo_foo", "foo_index")); } - { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - indicesAndAliasesContext, - Collections.singletonList("foo_alias") - ); - assertThat(indices, containsInAnyOrder("foo_alias")); - } - { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - skipAliasesLenientContext, - Collections.singletonList("foo_alias") - ); - assertThat(indices, containsInAnyOrder("foo_alias")); - } - { - IllegalArgumentException iae = expectThrows( - IllegalArgumentException.class, - () -> IndexNameExpressionResolver.resolveExpressions(skipAliasesStrictContext, "foo_alias") - ); - assertEquals( - "The provided expression [foo_alias] matches an alias, specify the corresponding concrete indices instead.", - iae.getMessage() - ); - } - IndicesOptions noExpandNoAliasesIndicesOptions = IndicesOptions.fromOptions(true, false, false, false, true, false, true, false); - IndexNameExpressionResolver.Context noExpandNoAliasesContext = new IndexNameExpressionResolver.Context( - state, - noExpandNoAliasesIndicesOptions, - SystemIndexAccessLevel.NONE - ); - { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - noExpandNoAliasesContext, - List.of("foo_alias") - ); - assertThat(indices, containsInAnyOrder("foo_alias")); - } - IndicesOptions strictNoExpandNoAliasesIndicesOptions = IndicesOptions.fromOptions( - false, - true, - false, - false, - true, - false, - true, - false - ); - IndexNameExpressionResolver.Context strictNoExpandNoAliasesContext = new IndexNameExpressionResolver.Context( - state, - strictNoExpandNoAliasesIndicesOptions, - SystemIndexAccessLevel.NONE - ); - { - IllegalArgumentException iae = expectThrows( - IllegalArgumentException.class, - () -> IndexNameExpressionResolver.resolveExpressions(strictNoExpandNoAliasesContext, "foo_alias") - ); - assertEquals( - "The provided expression [foo_alias] matches an alias, specify the corresponding concrete indices instead.", - iae.getMessage() - ); - } } public void testResolveDataStreams() { @@ -654,17 +472,14 @@ public void testResolveDataStreams() { ); // data streams are not included but expression matches the data stream - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAndAliasesContext, - Collections.singletonList("foo_*") + "foo_*" ); assertThat(indices, containsInAnyOrder("foo_index", "foo_foo", "bar_index")); // data streams are not included and expression doesn't match the data steram - indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - indicesAndAliasesContext, - Collections.singletonList("bar_*") - ); + indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(indicesAndAliasesContext, "bar_*"); assertThat(indices, containsInAnyOrder("bar_bar", "bar_index")); } @@ -691,9 +506,9 @@ public void testResolveDataStreams() { ); // data stream's corresponding backing indices are resolved - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesAndDataStreamsContext, - Collections.singletonList("foo_*") + "foo_*" ); assertThat( indices, @@ -707,9 +522,9 @@ public void testResolveDataStreams() { ); // include all wildcard adds the data stream's backing indices - indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesAndDataStreamsContext, - Collections.singletonList("*") + "*" ); assertThat( indices, @@ -748,9 +563,9 @@ public void testResolveDataStreams() { ); // data stream's corresponding backing indices are resolved - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesDataStreamsAndHiddenIndices, - Collections.singletonList("foo_*") + "foo_*" ); assertThat( indices, @@ -764,9 +579,9 @@ public void testResolveDataStreams() { ); // include all wildcard adds the data stream's backing indices - indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesDataStreamsAndHiddenIndices, - Collections.singletonList("*") + "*" ); assertThat( indices, @@ -808,24 +623,17 @@ public void testMatchesConcreteIndicesWildcardAndAliases() { SystemIndexAccessLevel.NONE ); - Collection matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("*")); + Collection matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( + indicesAndAliasesContext, + "*" + ); assertThat(matches, containsInAnyOrder("bar_bar", "foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of("*")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(onlyIndicesContext, "*"); assertThat(matches, containsInAnyOrder("bar_bar", "foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("foo*")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(indicesAndAliasesContext, "foo*"); assertThat(matches, containsInAnyOrder("foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of("foo*")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(onlyIndicesContext, "foo*"); assertThat(matches, containsInAnyOrder("foo_foo", "foo_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("foo_alias")); - assertThat(matches, containsInAnyOrder("foo_alias")); - IllegalArgumentException iae = expectThrows( - IllegalArgumentException.class, - () -> IndexNameExpressionResolver.resolveExpressions(onlyIndicesContext, "foo_alias") - ); - assertThat( - iae.getMessage(), - containsString("The provided expression [foo_alias] matches an alias, specify the corresponding concrete indices instead") - ); } private static IndexMetadata.Builder indexBuilder(String index, boolean hidden) { @@ -838,10 +646,6 @@ private static IndexMetadata.Builder indexBuilder(String index) { } private static void assertWildcardResolvesToEmpty(IndexNameExpressionResolver.Context context, String wildcardExpression) { - IndexNotFoundException infe = expectThrows( - IndexNotFoundException.class, - () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, List.of(wildcardExpression)) - ); - assertEquals(wildcardExpression, infe.getIndex().getName()); + assertThat(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, wildcardExpression), empty()); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java index cd44aaafbfae2..05eb7551330b2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java @@ -130,11 +130,11 @@ public boolean equals(Object obj) { * still result in unique snapshot names. */ public static String generateSnapshotName(String name) { - return generateSnapshotName(name, new IndexNameExpressionResolver.ResolverContext()); + return generateSnapshotName(name, System.currentTimeMillis()); } - public static String generateSnapshotName(String name, IndexNameExpressionResolver.Context context) { - String candidate = IndexNameExpressionResolver.resolveDateMathExpression(name, context.getStartTime()); + public static String generateSnapshotName(String name, long now) { + String candidate = IndexNameExpressionResolver.resolveDateMathExpression(name, now); // TODO: we are breaking the rules of UUIDs by lowercasing this here, find an alternative (snapshot names must be lowercase) return candidate + "-" + UUIDs.randomBase64UUID().toLowerCase(Locale.ROOT); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java index ce8cd5ae46ace..bee6351582bc9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.LifecycleExecutionState; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.RepositoriesMetadata; @@ -185,13 +184,12 @@ public void testNameGeneration() { assertThat(generateSnapshotName("name"), startsWith("name-")); assertThat(generateSnapshotName("name").length(), greaterThan("name-".length())); - IndexNameExpressionResolver.ResolverContext resolverContext = new IndexNameExpressionResolver.ResolverContext(time); - assertThat(generateSnapshotName("", resolverContext), startsWith("name-2019.03.15-")); - assertThat(generateSnapshotName("", resolverContext).length(), greaterThan("name-2019.03.15-".length())); + assertThat(generateSnapshotName("", time), startsWith("name-2019.03.15-")); + assertThat(generateSnapshotName("", time).length(), greaterThan("name-2019.03.15-".length())); - assertThat(generateSnapshotName("", resolverContext), startsWith("name-2019.03.01-")); + assertThat(generateSnapshotName("", time), startsWith("name-2019.03.01-")); - assertThat(generateSnapshotName("", resolverContext), startsWith("name-2019-03-15.21:09:00-")); + assertThat(generateSnapshotName("", time), startsWith("name-2019-03-15.21:09:00-")); } public void testNameValidation() { From 06840ba54dc741debd6f112bb417ff62b0136540 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 21 Nov 2024 09:02:40 -0500 Subject: [PATCH 138/386] ESQL: Remove historical features (#116966) Our friends working on cluster features are looking to remove the infrastructure for historical features. In `main` all historical features are always enabled because historical features weren't allowed to support versions before 8.a_long_time_ago. All of ours are certainly enabled for versions `main` is wire compatible with. --- .../src/main/resources/boolean.csv-spec | 1 - .../src/main/resources/date.csv-spec | 1 - .../src/main/resources/enrich.csv-spec | 1 - .../src/main/resources/floats.csv-spec | 8 ---- .../src/main/resources/ints.csv-spec | 16 ------- .../src/main/resources/ip.csv-spec | 13 ------ .../src/main/resources/math.csv-spec | 20 --------- .../src/main/resources/spatial.csv-spec | 1 - .../src/main/resources/string.csv-spec | 14 ------ .../src/main/resources/unsigned_long.csv-spec | 8 ---- .../xpack/esql/action/EsqlCapabilities.java | 6 +++ .../xpack/esql/plugin/EsqlFeatures.java | 43 ------------------- .../esql/plugin/TransportEsqlStatsAction.java | 11 +---- .../elasticsearch/xpack/esql/CsvTests.java | 5 ++- 14 files changed, 11 insertions(+), 137 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec index adbf24cee10b0..1e23cf62917fc 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec @@ -63,7 +63,6 @@ avg(salary):double | always_false:boolean in -required_capability: mv_warn from employees | keep emp_no, is_rehired, still_hired | where is_rehired in (still_hired, true) | where is_rehired != still_hired; ignoreOrder:true diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 7e7c561fac3a5..734e2ef5e475e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -216,7 +216,6 @@ string:keyword |datetime:date ; convertFromUnsignedLong -required_capability: convert_warn row ul = [9223372036854775808, 520128000000] | eval dt = to_datetime(ul); warningRegex:Line 1:58: evaluation of \[to_datetime\(ul\)\] failed, treating result as null. Only first 20 failures recorded. diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec index 3c38bd190b0b1..25b114b5d1daf 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec @@ -580,7 +580,6 @@ CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark spatialEnrichmentGeoMatchStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] required_capability: enrich_load -required_capability: mv_warn FROM airports | ENRICH city_boundaries ON city_location WITH airport, region, city_boundary diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index 537b69547c6be..3505b52e5599e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -99,7 +99,6 @@ int:integer |dbl:double ; lessThanMultivalue -required_capability: mv_warn from employees | where salary_change < 1 | keep emp_no, salary_change | sort emp_no | limit 5; warningRegex:evaluation of \[salary_change < 1\] failed, treating result as null. Only first 20 failures recorded. @@ -115,7 +114,6 @@ emp_no:integer |salary_change:double ; greaterThanMultivalue -required_capability: mv_warn from employees | where salary_change > 1 | keep emp_no, salary_change | sort emp_no | limit 5; warningRegex:evaluation of \[salary_change > 1\] failed, treating result as null. Only first 20 failures recorded. @@ -131,7 +129,6 @@ emp_no:integer |salary_change:double ; equalToMultivalue -required_capability: mv_warn from employees | where salary_change == 1.19 | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change == 1.19] failed, treating result as null. Only first 20 failures recorded. @@ -143,7 +140,6 @@ emp_no:integer |salary_change:double ; equalToOrEqualToMultivalue -required_capability: mv_warn from employees | where salary_change == 1.19 or salary_change == 7.58 | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change] failed, treating result as null. Only first 20 failures recorded. @@ -156,7 +152,6 @@ emp_no:integer |salary_change:double ; inMultivalue -required_capability: mv_warn from employees | where salary_change in (1.19, 7.58) | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change in (1.19, 7.58)] failed, treating result as null. Only first 20 failures recorded. @@ -169,7 +164,6 @@ emp_no:integer |salary_change:double ; notLessThanMultivalue -required_capability: mv_warn from employees | where not(salary_change < 1) | keep emp_no, salary_change | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change < 1.*\] failed, treating result as null. Only first 20 failures recorded. @@ -185,7 +179,6 @@ emp_no:integer |salary_change:double ; notGreaterThanMultivalue -required_capability: mv_warn from employees | where not(salary_change > 1) | keep emp_no, salary_change | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change > 1.*\] failed, treating result as null. Only first 20 failures recorded. @@ -201,7 +194,6 @@ emp_no:integer |salary_change:double ; notEqualToMultivalue -required_capability: mv_warn from employees | where not(salary_change == 1.19) | keep emp_no, salary_change | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change == 1.19.*\] failed, treating result as null. Only first 20 failures recorded. diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index b399734151412..f4b6d41a7a027 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -1,7 +1,6 @@ // Integral types-specific tests inLongAndInt -required_capability: mv_warn from employees | where avg_worked_seconds in (372957040, salary_change.long, 236703986) | where emp_no in (10017, emp_no - 1) | keep emp_no, avg_worked_seconds; warningRegex:evaluation of \[avg_worked_seconds in \(372957040, salary_change.long, 236703986\)\] failed, treating result as null. Only first 20 failures recorded. @@ -68,7 +67,6 @@ long:long |ul:ul ; convertDoubleToUL -required_capability: convert_warn row d = 123.4 | eval ul = to_ul(d), overflow = to_ul(1e20); warningRegex:Line 1:48: evaluation of \[to_ul\(1e20\)\] failed, treating result as null. Only first 20 failures recorded. @@ -127,7 +125,6 @@ int:integer |long:long ; convertULToLong -required_capability: convert_warn row ul = [9223372036854775807, 9223372036854775808] | eval long = to_long(ul); warningRegex:Line 1:67: evaluation of \[to_long\(ul\)\] failed, treating result as null. Only first 20 failures recorded. @@ -170,7 +167,6 @@ str1:keyword |str2:keyword |str3:keyword |long1:long |long2:long |long3:long ; convertDoubleToLong -required_capability: convert_warn row d = 123.4 | eval d2l = to_long(d), overflow = to_long(1e19); warningRegex:Line 1:51: evaluation of \[to_long\(1e19\)\] failed, treating result as null. Only first 20 failures recorded. @@ -190,7 +186,6 @@ int:integer |ii:integer ; convertLongToInt -required_capability: convert_warn // tag::to_int-long[] ROW long = [5013792, 2147483647, 501379200000] @@ -207,7 +202,6 @@ long:long |int:integer ; convertULToInt -required_capability: convert_warn row ul = [2147483647, 9223372036854775808] | eval int = to_int(ul); warningRegex:Line 1:57: evaluation of \[to_int\(ul\)\] failed, treating result as null. Only first 20 failures recorded. @@ -239,7 +233,6 @@ int_str:keyword |int_dbl_str:keyword |is2i:integer|ids2i:integer ; convertStringToIntFail#[skip:-8.13.99, reason:warning changed in 8.14] -required_capability: mv_warn row str1 = "2147483647.2", str2 = "2147483648", non = "no number" | eval i1 = to_integer(str1), i2 = to_integer(str2), noi = to_integer(non); warningRegex:Line 1:79: evaluation of \[to_integer\(str1\)\] failed, treating result as null. Only first 20 failures recorded. @@ -254,7 +247,6 @@ str1:keyword |str2:keyword |non:keyword |i1:integer |i2:integer | ; convertDoubleToInt -required_capability: convert_warn row d = 123.4 | eval d2i = to_integer(d), overflow = to_integer(1e19); warningRegex:Line 1:54: evaluation of \[to_integer\(1e19\)\] failed, treating result as null. Only first 20 failures recorded. @@ -265,7 +257,6 @@ d:double |d2i:integer |overflow:integer ; lessThanMultivalue -required_capability: mv_warn from employees | where salary_change.int < 1 | keep emp_no, salary_change.int | sort emp_no | limit 5; warningRegex:evaluation of \[salary_change.int < 1\] failed, treating result as null. Only first 20 failures recorded. @@ -281,7 +272,6 @@ emp_no:integer |salary_change.int:integer ; greaterThanMultivalue -required_capability: mv_warn from employees | where salary_change.int > 1 | keep emp_no, salary_change.int | sort emp_no | limit 5; warningRegex:evaluation of \[salary_change.int > 1\] failed, treating result as null. Only first 20 failures recorded. @@ -297,7 +287,6 @@ emp_no:integer |salary_change.int:integer ; equalToMultivalue -required_capability: mv_warn from employees | where salary_change.int == 0 | keep emp_no, salary_change.int | sort emp_no; warningRegex:evaluation of \[salary_change.int == 0\] failed, treating result as null. Only first 20 failures recorded. @@ -312,7 +301,6 @@ emp_no:integer |salary_change.int:integer ; equalToOrEqualToMultivalue -required_capability: mv_warn from employees | where salary_change.int == 1 or salary_change.int == 8 | keep emp_no, salary_change.int | sort emp_no; warningRegex:evaluation of \[salary_change.int\] failed, treating result as null. Only first 20 failures recorded. @@ -325,7 +313,6 @@ emp_no:integer |salary_change.int:integer ; inMultivalue -required_capability: mv_warn from employees | where salary_change.int in (1, 7) | keep emp_no, salary_change.int | sort emp_no; warningRegex:evaluation of \[salary_change.int in \(1, 7\)\] failed, treating result as null. Only first 20 failures recorded. @@ -338,7 +325,6 @@ emp_no:integer |salary_change.int:integer ; notLessThanMultivalue -required_capability: mv_warn from employees | where not(salary_change.int < 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change.int < 1.*\] failed, treating result as null. Only first 20 failures recorded. @@ -354,7 +340,6 @@ emp_no:integer |salary_change.int:integer ; notGreaterThanMultivalue -required_capability: mv_warn from employees | where not(salary_change.int > 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change.int > 1.*\] failed, treating result as null. Only first 20 failures recorded. @@ -370,7 +355,6 @@ emp_no:integer |salary_change.int:integer ; notEqualToMultivalue -required_capability: mv_warn from employees | where not(salary_change.int == 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change.int == 1.*\] failed, treating result as null. Only first 20 failures recorded diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index 0fb6994ef759f..4418f7e0aa7ed 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -16,7 +16,6 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; equals -required_capability: mv_warn from hosts | sort host, card | where ip0 == ip1 | keep card, host, ip0, ip1; warningRegex:evaluation of \[ip0 == ip1\] failed, treating result as null. Only first 20 failures recorded. @@ -60,7 +59,6 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; lessThan -required_capability: mv_warn from hosts | sort host, card, ip1 | where ip0 < ip1 | keep card, host, ip0, ip1; warningRegex:evaluation of \[ip0 < ip1\] failed, treating result as null. Only first 20 failures recorded. @@ -73,7 +71,6 @@ lo0 |gamma |fe80::cae2:65ff:fece:feb9|fe81::cae2:65ff:fece:f ; notEquals -required_capability: mv_warn from hosts | sort host, card, ip1 | where ip0 != ip1 | keep card, host, ip0, ip1; warningRegex:evaluation of \[ip0 != ip1\] failed, treating result as null. Only first 20 failures recorded. @@ -125,7 +122,6 @@ null |[127.0.0.1, 127.0.0.2, 127.0.0.3] ; conditional -required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | keep eq, ip0, ip1; ignoreOrder:true @@ -146,7 +142,6 @@ fe80::cae2:65ff:fece:fec1 |[fe80::cae2:65ff:fece:feb ; in -required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | where eq in (ip0, ip1) | keep card, host, ip0, ip1, eq; ignoreOrder:true @@ -168,7 +163,6 @@ eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece inWithWarningsRegex#[skip:-8.13.99, reason:regex warnings in tests introduced in v 8.14.0] -required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | where eq in (ip0, ip1) | keep card, host, ip0, ip1, eq; ignoreOrder:true @@ -188,7 +182,6 @@ eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece ; cidrMatchSimple -required_capability: mv_warn from hosts | where cidr_match(ip1, "127.0.0.2/32") | keep card, host, ip0, ip1; warningRegex:evaluation of \[cidr_match\(ip1, \\\"127.0.0.2/32\\\"\)\] failed, treating result as null. Only first 20 failures recorded. @@ -199,7 +192,6 @@ eth1 |beta |127.0.0.1 |127.0.0.2 ; cidrMatchNullField -required_capability: mv_warn from hosts | where cidr_match(ip0, "127.0.0.2/32") is null | keep card, host, ip0, ip1; ignoreOrder:true @@ -213,7 +205,6 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; cdirMatchMultipleArgs -required_capability: mv_warn //tag::cdirMatchMultipleArgs[] FROM hosts @@ -233,7 +224,6 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9|127.0.0.3 ; cidrMatchFunctionArg -required_capability: mv_warn from hosts | where cidr_match(ip1, concat("127.0.0.2", "/32"), "127.0.0.3/32") | keep card, host, ip0, ip1; ignoreOrder:true @@ -246,7 +236,6 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9|127.0.0.3 ; cidrMatchFieldArg -required_capability: mv_warn from hosts | eval cidr="127.0.0.2" | where cidr_match(ip1, cidr, "127.0.0.3/32") | keep card, host, ip0, ip1; ignoreOrder:true @@ -366,7 +355,6 @@ eth0 |beta |127.0.0.1 |::1 ; pushDownIPWithIn -required_capability: mv_warn from hosts | where ip1 in (to_ip("::1"), to_ip("127.0.0.1")) | keep card, host, ip0, ip1; ignoreOrder:true @@ -380,7 +368,6 @@ eth0 |beta |127.0.0.1 |::1 ; pushDownIPWithComparision -required_capability: mv_warn from hosts | where ip1 > to_ip("127.0.0.1") | keep card, ip1; ignoreOrder:true diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec index da069836504d4..2fe2feb3bc219 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec @@ -214,8 +214,6 @@ height:double | s:double ; powSalarySquared -required_capability: pow_double - from employees | eval s = pow(to_long(salary) - 75000, 2) + 10000 | keep salary, s | sort salary desc | limit 4; salary:integer | s:double @@ -631,8 +629,6 @@ base:double | exponent:integer | result:double ; powIntInt -required_capability: pow_double - ROW base = 2, exponent = 2 | EVAL s = POW(base, exponent) ; @@ -642,8 +638,6 @@ base:integer | exponent:integer | s:double ; powIntIntPlusInt -required_capability: pow_double - row s = 1 + pow(2, 2); s:double @@ -658,8 +652,6 @@ s:double ; powIntUL -required_capability: pow_double - row x = pow(1, 9223372036854775808); x:double @@ -667,8 +659,6 @@ x:double ; powLongUL -required_capability: pow_double - row x = to_long(1) | eval x = pow(x, 9223372036854775808); x:double @@ -676,8 +666,6 @@ x:double ; powUnsignedLongUL -required_capability: pow_double - row x = to_ul(1) | eval x = pow(x, 9223372036854775808); x:double @@ -701,8 +689,6 @@ null ; powULInt -required_capability: pow_double - row x = pow(to_unsigned_long(9223372036854775807), 1); x:double @@ -710,8 +696,6 @@ x:double ; powULIntOverrun -required_capability: pow_double - ROW x = POW(9223372036854775808, 2) ; @@ -732,8 +716,6 @@ x:double ; powULLong -required_capability: pow_double - row x = to_long(10) | eval x = pow(to_unsigned_long(10), x); x:double @@ -741,8 +723,6 @@ x:double ; powULLongOverrun -required_capability: pow_double - row x = to_long(100) | eval x = pow(to_unsigned_long(10), x); x:double diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index 01e7258e8a6ee..ac9948c90f5e9 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -3,7 +3,6 @@ ############################################### convertFromStringQuantize -required_capability: spatial_points row wkt = "POINT(42.97109629958868 14.7552534006536)" | eval pt = to_geopoint(wkt); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 963245f9f0ea6..e103168d2e589 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -390,7 +390,6 @@ emp_no:integer | name:keyword // Note: no matches in MV returned in -required_capability: mv_warn from employees | where job_positions in ("Internship", first_name) | keep emp_no, job_positions; ignoreOrder:true @@ -582,7 +581,6 @@ emp_no:integer |positions:keyword ; lessThanMultivalue -required_capability: mv_warn from employees | where job_positions < "C" | keep emp_no, job_positions | sort emp_no; warningRegex:evaluation of \[job_positions < \\\"C\\\"\] failed, treating result as null. Only first 20 failures recorded. @@ -595,7 +593,6 @@ emp_no:integer |job_positions:keyword ; greaterThanMultivalue -required_capability: mv_warn from employees | where job_positions > "C" | keep emp_no, job_positions | sort emp_no | limit 6; warningRegex:evaluation of \[job_positions > \\\"C\\\"\] failed, treating result as null. Only first 20 failures recorded. @@ -612,7 +609,6 @@ emp_no:integer |job_positions:keyword ; equalToMultivalue -required_capability: mv_warn from employees | where job_positions == "Accountant" | keep emp_no, job_positions | sort emp_no; warningRegex:evaluation of \[job_positions == \\\"Accountant\\\"\] failed, treating result as null. Only first 20 failures recorded. @@ -624,7 +620,6 @@ emp_no:integer |job_positions:keyword ; equalToOrEqualToMultivalue -required_capability: mv_warn from employees | where job_positions == "Accountant" or job_positions == "Tech Lead" | keep emp_no, job_positions | sort emp_no; warningRegex:evaluation of \[job_positions\] failed, treating result as null. Only first 20 failures recorded. @@ -637,7 +632,6 @@ emp_no:integer |job_positions:keyword ; inMultivalue -required_capability: mv_warn from employees | where job_positions in ("Accountant", "Tech Lead") | keep emp_no, job_positions | sort emp_no; warningRegex:evaluation of \[job_positions in \(\\\"Accountant\\\", \\"Tech Lead\\\"\)\] failed, treating result as null. Only first 20 failures recorded. @@ -650,7 +644,6 @@ emp_no:integer |job_positions:keyword ; notLessThanMultivalue -required_capability: mv_warn from employees | where not(job_positions < "C") | keep emp_no, job_positions | sort emp_no | limit 6; warningRegex:evaluation of \[.*job_positions < \\\"C\\\".*\] failed, treating result as null. Only first 20 failures recorded. @@ -667,7 +660,6 @@ emp_no:integer |job_positions:keyword ; notGreaterThanMultivalue -required_capability: mv_warn from employees | where not(job_positions > "C") | keep emp_no, job_positions | sort emp_no | limit 6; warningRegex:evaluation of \[.*job_positions > \\\"C\\\".*\] failed, treating result as null. Only first 20 failures recorded. @@ -680,7 +672,6 @@ emp_no:integer |job_positions:keyword ; notEqualToMultivalue -required_capability: mv_warn from employees | where not(job_positions == "Accountant") | keep emp_no, job_positions | sort emp_no | limit 6; warningRegex:evaluation of \[.*job_positions == \\\"Accountant\\\".*\] failed, treating result as null. Only first 20 failures recorded. @@ -937,7 +928,6 @@ beta | Kubernetes cluster | [beta k8s server, beta k8s server2 ; lengthOfText -required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = length(host_group), l2 = length(description) | keep l1, l2; ignoreOrder:true @@ -951,7 +941,6 @@ null | 19 ; startsWithText -required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = starts_with(host_group, host), l2 = starts_with(description, host) | keep l1, l2; ignoreOrder:true @@ -965,7 +954,6 @@ false | null ; substringOfText -required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = substring(host_group, 0, 5), l2 = substring(description, 0, 5) | keep l1, l2; ignoreOrder:true @@ -979,7 +967,6 @@ Gatew | null ; concatOfText -required_capability: mv_warn from hosts | where host == "epsilon" | eval l1 = concat(host, "/", host_group), l2 = concat(host_group, "/", description) | sort l1 | keep l1, l2; warning:Line 1:86: evaluation of [concat(host_group, \"/\", description)] failed, treating result as null. Only first 20 failures recorded. @@ -1518,7 +1505,6 @@ min(f_l):integer | max(f_l):integer | job_positions:keyword ; locateWarnings#[skip:-8.13.99,reason:new string function added in 8.14] -required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = locate(host_group, "ate"), l2 = locate(description, "ate") | keep l1, l2; ignoreOrder:true diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec index 03d0b71894d9b..fbddb3d0e6989 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec @@ -46,7 +46,6 @@ from ul_logs | sort bytes_in desc nulls last, id | limit 12; ; filterPushDownGT -required_capability: mv_warn from ul_logs | where bytes_in >= to_ul(74330435873664882) | sort bytes_in | eval div = bytes_in / to_ul(pow(10., 15)) | keep bytes_in, div, id | limit 12; warningRegex:evaluation of \[bytes_in >= to_ul\(74330435873664882\)\] failed, treating result as null. Only first 20 failures recorded. @@ -68,7 +67,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; filterPushDownRange -required_capability: mv_warn from ul_logs | where bytes_in >= to_ul(74330435873664882) | where bytes_in <= to_ul(316080452389500167) | sort bytes_in | eval div = bytes_in / to_ul(pow(10., 15)) | keep bytes_in, div, id | limit 12; warningRegex:evaluation of \[bytes_in .* to_ul\(.*\)\] failed, treating result as null. Only first 20 failures recorded. @@ -82,7 +80,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; filterPushDownIn -required_capability: mv_warn // TODO: testing framework doesn't perform implicit conversion to UL of given values, needs explicit conversion from ul_logs | where bytes_in in (to_ul(74330435873664882), to_ul(154551962150890564), to_ul(195161570976258241)) | sort bytes_in | keep bytes_in, id; @@ -96,7 +93,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; filterOnFieldsEquality -required_capability: mv_warn from ul_logs | where bytes_in == bytes_out; warningRegex:evaluation of \[bytes_in == bytes_out\] failed, treating result as null. Only first 20 failures recorded. @@ -107,7 +103,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; filterOnFieldsInequality -required_capability: mv_warn from ul_logs | sort id | where bytes_in < bytes_out | eval b_in = bytes_in / to_ul(pow(10.,15)), b_out = bytes_out / to_ul(pow(10.,15)) | limit 5; warningRegex:evaluation of \[bytes_in < bytes_out\] failed, treating result as null. Only first 20 failures recorded. @@ -138,7 +133,6 @@ from ul_logs | stats c = count(bytes_in) by bytes_in | sort c desc, bytes_in des ; case -required_capability: mv_warn from ul_logs | where case(bytes_in == to_ul(154551962150890564), true, false); warningRegex:evaluation of \[bytes_in == to_ul\(154551962150890564\)\] failed, treating result as null. Only first 20 failures recorded. @@ -149,7 +143,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; toDegrees -required_capability: mv_warn FROM ul_logs | WHERE bytes_in == bytes_out | EVAL deg = TO_DEGREES(bytes_in) | KEEP bytes_in, deg ; @@ -161,7 +154,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; toRadians -required_capability: mv_warn FROM ul_logs | WHERE bytes_in == bytes_out | EVAL rad = TO_RADIANS(bytes_in) | KEEP bytes_in, rad ; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index c5d3ee29d0bda..c33acf95aa33f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -140,6 +140,12 @@ public enum Cap { */ CASE_MV, + /** + * Support for loading values over enrich. This is supported by all versions of ESQL but not + * the unit test CsvTests. + */ + ENRICH_LOAD, + /** * Optimization for ST_CENTROID changed some results in cartesian data. #108713 */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 266f07d22eaf5..a347a6947bf67 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.plugin; import org.elasticsearch.Build; -import org.elasticsearch.Version; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; @@ -16,7 +15,6 @@ import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import java.util.Collections; -import java.util.Map; import java.util.Set; /** @@ -48,34 +46,11 @@ public class EsqlFeatures implements FeatureSpecification { */ private static final NodeFeature ST_X_Y = new NodeFeature("esql.st_x_y"); - /** - * When we added the warnings for multivalued fields emitting {@code null} - * when they touched multivalued fields. Added in #102417. - */ - private static final NodeFeature MV_WARN = new NodeFeature("esql.mv_warn"); - - /** - * Support for loading {@code geo_point} and {@code cartesian_point} fields. Added in #102177. - */ - private static final NodeFeature SPATIAL_POINTS = new NodeFeature("esql.spatial_points"); - /** * Changed precision of {@code geo_point} and {@code cartesian_point} fields, by loading from source into WKB. Done in #103691. */ private static final NodeFeature SPATIAL_POINTS_FROM_SOURCE = new NodeFeature("esql.spatial_points_from_source"); - /** - * When we added the warnings when conversion functions fail. Like {@code TO_INT('foo')}. - * Added in ESQL-1183. - */ - private static final NodeFeature CONVERT_WARN = new NodeFeature("esql.convert_warn"); - - /** - * When we flipped the return type of {@code POW} to always return a double. Changed - * in #102183. - */ - private static final NodeFeature POW_DOUBLE = new NodeFeature("esql.pow_double"); - /** * Support for loading {@code geo_shape} and {@code cartesian_shape} fields. Done in #104269. */ @@ -152,12 +127,6 @@ public class EsqlFeatures implements FeatureSpecification { */ public static final NodeFeature METADATA_FIELDS = new NodeFeature("esql.metadata_fields"); - /** - * Support for loading values over enrich. This is supported by all versions of ESQL but not - * the unit test CsvTests. - */ - public static final NodeFeature ENRICH_LOAD = new NodeFeature("esql.enrich_load"); - /** * Support for timespan units abbreviations */ @@ -215,16 +184,4 @@ public Set getFeatures() { return features; } } - - @Override - public Map getHistoricalFeatures() { - return Map.ofEntries( - Map.entry(TransportEsqlStatsAction.ESQL_STATS_FEATURE, Version.V_8_11_0), - Map.entry(MV_WARN, Version.V_8_12_0), - Map.entry(SPATIAL_POINTS, Version.V_8_12_0), - Map.entry(CONVERT_WARN, Version.V_8_12_0), - Map.entry(POW_DOUBLE, Version.V_8_12_0), - Map.entry(ENRICH_LOAD, Version.V_8_12_0) - ); - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java index 985dcf118ac54..4067fc5a4e065 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -34,8 +33,6 @@ public class TransportEsqlStatsAction extends TransportNodesAction< EsqlStatsResponse.NodeStatsResponse, Void> { - static final NodeFeature ESQL_STATS_FEATURE = new NodeFeature("esql.stats_node"); - // the plan executor holds the metrics private final FeatureService featureService; private final PlanExecutor planExecutor; @@ -63,13 +60,7 @@ public TransportEsqlStatsAction( @Override protected DiscoveryNode[] resolveRequest(EsqlStatsRequest request, ClusterState clusterState) { - if (featureService.clusterHasFeature(clusterState, ESQL_STATS_FEATURE)) { - // use the whole cluster - return super.resolveRequest(request, clusterState); - } else { - // not all nodes in the cluster have upgraded to esql - just use this node for now - return new DiscoveryNode[] { clusterService.localNode() }; - } + return super.resolveRequest(request, clusterState); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index ff0c0d5a5d14c..012720db9efd9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -236,7 +236,10 @@ public final void test() throws Throwable { * are tested in integration tests. */ assumeFalse("metadata fields aren't supported", testCase.requiredCapabilities.contains(cap(EsqlFeatures.METADATA_FIELDS))); - assumeFalse("enrich can't load fields in csv tests", testCase.requiredCapabilities.contains(cap(EsqlFeatures.ENRICH_LOAD))); + assumeFalse( + "enrich can't load fields in csv tests", + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.ENRICH_LOAD.capabilityName()) + ); assumeFalse( "can't use match in csv tests", testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.capabilityName()) From 1a4b3d37b5271774b866bbcd5c8eba1907dcbeb2 Mon Sep 17 00:00:00 2001 From: Patrick Doyle <810052+prdoyle@users.noreply.github.com> Date: Thu, 21 Nov 2024 09:10:43 -0500 Subject: [PATCH 139/386] File-based settings health indicator (#117081) * Add FileSettingsService health indicator * spotless * YELLOW for any failure, plus most_recent_failure --- .../elasticsearch/node/NodeConstruction.java | 14 ++- .../service/FileSettingsService.java | 99 +++++++++++++++++-- .../ingest/ReservedPipelineActionTests.java | 7 +- ...leSettingsHealthIndicatorServiceTests.java | 90 +++++++++++++++++ .../service/FileSettingsServiceTests.java | 32 +++++- 5 files changed, 230 insertions(+), 12 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index e1fc586424dec..2488ac894a612 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -187,6 +187,7 @@ import org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider; import org.elasticsearch.reservedstate.action.ReservedClusterSettingsAction; import org.elasticsearch.reservedstate.service.FileSettingsService; +import org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService; import org.elasticsearch.rest.action.search.SearchResponseMetrics; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; @@ -1032,10 +1033,12 @@ private void construct( actionModule.getReservedClusterStateService().installStateHandler(new ReservedRepositoryAction(repositoriesService)); actionModule.getReservedClusterStateService().installStateHandler(new ReservedPipelineAction()); + FileSettingsHealthIndicatorService fileSettingsHealthIndicatorService = new FileSettingsHealthIndicatorService(); FileSettingsService fileSettingsService = new FileSettingsService( clusterService, actionModule.getReservedClusterStateService(), - environment + environment, + fileSettingsHealthIndicatorService ); RestoreService restoreService = new RestoreService( @@ -1129,7 +1132,8 @@ private void construct( featureService, threadPool, telemetryProvider, - repositoriesService + repositoriesService, + fileSettingsHealthIndicatorService ) ); @@ -1301,7 +1305,8 @@ private Module loadDiagnosticServices( FeatureService featureService, ThreadPool threadPool, TelemetryProvider telemetryProvider, - RepositoriesService repositoriesService + RepositoriesService repositoriesService, + FileSettingsHealthIndicatorService fileSettingsHealthIndicatorService ) { MasterHistoryService masterHistoryService = new MasterHistoryService(transportService, threadPool, clusterService); @@ -1316,7 +1321,8 @@ private Module loadDiagnosticServices( new StableMasterHealthIndicatorService(coordinationDiagnosticsService, clusterService), new RepositoryIntegrityHealthIndicatorService(clusterService, featureService), new DiskHealthIndicatorService(clusterService, featureService), - new ShardsCapacityHealthIndicatorService(clusterService, featureService) + new ShardsCapacityHealthIndicatorService(clusterService, featureService), + fileSettingsHealthIndicatorService ); var pluginHealthIndicatorServices = pluginsService.filterPlugins(HealthPlugin.class) .flatMap(plugin -> plugin.getHealthIndicatorServices().stream()); diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java index ae9ae6f8b5bf9..5f907572641a6 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java @@ -22,14 +22,27 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.file.MasterNodeFileWatchingService; import org.elasticsearch.env.Environment; +import org.elasticsearch.health.HealthIndicatorDetails; +import org.elasticsearch.health.HealthIndicatorImpact; +import org.elasticsearch.health.HealthIndicatorResult; +import org.elasticsearch.health.HealthIndicatorService; +import org.elasticsearch.health.SimpleHealthIndicatorDetails; +import org.elasticsearch.health.node.HealthInfo; import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentParserConfiguration; import java.io.BufferedInputStream; import java.io.IOException; import java.nio.file.Files; +import java.util.List; +import java.util.Map; import java.util.concurrent.ExecutionException; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; +import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.health.HealthStatus.YELLOW; +import static org.elasticsearch.health.ImpactArea.DEPLOYMENT_MANAGEMENT; import static org.elasticsearch.reservedstate.service.ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION; import static org.elasticsearch.reservedstate.service.ReservedStateVersionCheck.HIGHER_VERSION_ONLY; import static org.elasticsearch.xcontent.XContentType.JSON; @@ -53,6 +66,7 @@ public class FileSettingsService extends MasterNodeFileWatchingService implement public static final String NAMESPACE = "file_settings"; public static final String OPERATOR_DIRECTORY = "operator"; private final ReservedClusterStateService stateService; + private final FileSettingsHealthIndicatorService healthIndicatorService; /** * Constructs the {@link FileSettingsService} @@ -60,10 +74,21 @@ public class FileSettingsService extends MasterNodeFileWatchingService implement * @param clusterService so we can register ourselves as a cluster state change listener * @param stateService an instance of the immutable cluster state controller, so we can perform the cluster state changes * @param environment we need the environment to pull the location of the config and operator directories + * @param healthIndicatorService tracks the success or failure of file-based settings */ - public FileSettingsService(ClusterService clusterService, ReservedClusterStateService stateService, Environment environment) { + public FileSettingsService( + ClusterService clusterService, + ReservedClusterStateService stateService, + Environment environment, + FileSettingsHealthIndicatorService healthIndicatorService + ) { super(clusterService, environment.configFile().toAbsolutePath().resolve(OPERATOR_DIRECTORY).resolve(SETTINGS_FILE_NAME)); this.stateService = stateService; + this.healthIndicatorService = healthIndicatorService; + } + + public FileSettingsHealthIndicatorService healthIndicatorService() { + return healthIndicatorService; } /** @@ -121,6 +146,7 @@ protected boolean shouldRefreshFileState(ClusterState clusterState) { @Override protected void processFileChanges() throws ExecutionException, InterruptedException, IOException { logger.info("processing path [{}] for [{}]", watchedFile(), NAMESPACE); + healthIndicatorService.changeOccurred(); processFileChanges(HIGHER_VERSION_ONLY); } @@ -131,6 +157,7 @@ protected void processFileChanges() throws ExecutionException, InterruptedExcept @Override protected void processFileOnServiceStart() throws IOException, ExecutionException, InterruptedException { logger.info("processing path [{}] for [{}] on service start", watchedFile(), NAMESPACE); + healthIndicatorService.changeOccurred(); processFileChanges(HIGHER_OR_SAME_VERSION); } @@ -146,6 +173,16 @@ private void processFileChanges(ReservedStateVersionCheck versionCheck) throws I completion.get(); } + private void completeProcessing(Exception e, PlainActionFuture completion) { + if (e != null) { + healthIndicatorService.failureOccurred(e.toString()); + completion.onFailure(e); + } else { + completion.onResponse(null); + healthIndicatorService.successOccurred(); + } + } + @Override protected void onProcessFileChangesException(Exception e) { if (e instanceof ExecutionException) { @@ -172,11 +209,61 @@ protected void processInitialFileMissing() throws ExecutionException, Interrupte completion.get(); } - private static void completeProcessing(Exception e, PlainActionFuture completion) { - if (e != null) { - completion.onFailure(e); - } else { - completion.onResponse(null); + public static class FileSettingsHealthIndicatorService implements HealthIndicatorService { + static final String NAME = "file_settings"; + static final String NO_CHANGES_SYMPTOM = "No file-based setting changes have occurred"; + static final String SUCCESS_SYMPTOM = "The most recent file-based settings were applied successfully"; + static final String FAILURE_SYMPTOM = "The most recent file-based settings encountered an error"; + + static final List STALE_SETTINGS_IMPACT = List.of( + new HealthIndicatorImpact( + NAME, + "stale", + 3, + "The most recent file-based settings changes have not been applied.", + List.of(DEPLOYMENT_MANAGEMENT) + ) + ); + + private final AtomicLong changeCount = new AtomicLong(0); + private final AtomicLong failureStreak = new AtomicLong(0); + private final AtomicReference mostRecentFailure = new AtomicReference<>(); + + public void changeOccurred() { + changeCount.incrementAndGet(); + } + + public void successOccurred() { + failureStreak.set(0); + } + + public void failureOccurred(String description) { + failureStreak.incrementAndGet(); + mostRecentFailure.set(description); + } + + @Override + public String name() { + return NAME; + } + + @Override + public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResourcesCount, HealthInfo healthInfo) { + if (0 == changeCount.get()) { + return createIndicator(GREEN, NO_CHANGES_SYMPTOM, HealthIndicatorDetails.EMPTY, List.of(), List.of()); + } + long numFailures = failureStreak.get(); + if (0 == numFailures) { + return createIndicator(GREEN, SUCCESS_SYMPTOM, HealthIndicatorDetails.EMPTY, List.of(), List.of()); + } else { + return createIndicator( + YELLOW, + FAILURE_SYMPTOM, + new SimpleHealthIndicatorDetails(Map.of("failure_streak", numFailures, "most_recent_failure", mostRecentFailure.get())), + STALE_SETTINGS_IMPACT, + List.of() + ); + } } } } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java b/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java index 0bc5c69d8ad4b..dc1698e3459ec 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java @@ -134,7 +134,12 @@ public void setup() { ); fileSettingsService = spy( - new FileSettingsService(clusterService, mock(ReservedClusterStateService.class), newEnvironment(Settings.EMPTY)) + new FileSettingsService( + clusterService, + mock(ReservedClusterStateService.class), + newEnvironment(Settings.EMPTY), + new FileSettingsService.FileSettingsHealthIndicatorService() + ) ); } diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java new file mode 100644 index 0000000000000..03d1adff42c4e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.reservedstate.service; + +import org.elasticsearch.health.HealthIndicatorDetails; +import org.elasticsearch.health.HealthIndicatorResult; +import org.elasticsearch.health.SimpleHealthIndicatorDetails; +import org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.health.HealthStatus.YELLOW; +import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.FAILURE_SYMPTOM; +import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.NO_CHANGES_SYMPTOM; +import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.STALE_SETTINGS_IMPACT; +import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.SUCCESS_SYMPTOM; + +/** + * Here, we test {@link FileSettingsHealthIndicatorService} in isolation; + * we do not test that {@link FileSettingsService} uses it correctly. + */ +public class FileSettingsHealthIndicatorServiceTests extends ESTestCase { + + FileSettingsHealthIndicatorService healthIndicatorService; + + @Before + public void initialize() { + healthIndicatorService = new FileSettingsHealthIndicatorService(); + } + + public void testInitiallyGreen() { + assertEquals( + new HealthIndicatorResult("file_settings", GREEN, NO_CHANGES_SYMPTOM, HealthIndicatorDetails.EMPTY, List.of(), List.of()), + healthIndicatorService.calculate(false, null) + ); + } + + public void testGreenYellowYellowGreen() { + healthIndicatorService.changeOccurred(); + // This is a strange case: a change occurred, but neither success nor failure have been reported yet. + // While the change is still in progress, we don't change the status. + assertEquals( + new HealthIndicatorResult("file_settings", GREEN, SUCCESS_SYMPTOM, HealthIndicatorDetails.EMPTY, List.of(), List.of()), + healthIndicatorService.calculate(false, null) + ); + + healthIndicatorService.failureOccurred("whoopsie 1"); + assertEquals( + new HealthIndicatorResult( + "file_settings", + YELLOW, + FAILURE_SYMPTOM, + new SimpleHealthIndicatorDetails(Map.of("failure_streak", 1L, "most_recent_failure", "whoopsie 1")), + STALE_SETTINGS_IMPACT, + List.of() + ), + healthIndicatorService.calculate(false, null) + ); + + healthIndicatorService.failureOccurred("whoopsie #2"); + assertEquals( + new HealthIndicatorResult( + "file_settings", + YELLOW, + FAILURE_SYMPTOM, + new SimpleHealthIndicatorDetails(Map.of("failure_streak", 2L, "most_recent_failure", "whoopsie #2")), + STALE_SETTINGS_IMPACT, + List.of() + ), + healthIndicatorService.calculate(false, null) + ); + + healthIndicatorService.successOccurred(); + assertEquals( + new HealthIndicatorResult("file_settings", GREEN, SUCCESS_SYMPTOM, HealthIndicatorDetails.EMPTY, List.of(), List.of()), + healthIndicatorService.calculate(false, null) + ); + } +} diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index 0db29588c4298..ae60a21b6fc22 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NodeConnectionsService; +import org.elasticsearch.cluster.coordination.FailedToCommitClusterStateException; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.ReservedStateMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -33,6 +34,7 @@ import org.elasticsearch.env.BuildVersion; import org.elasticsearch.env.Environment; import org.elasticsearch.reservedstate.action.ReservedClusterSettingsAction; +import org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; @@ -78,6 +80,8 @@ import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; +import static org.mockito.Mockito.verifyNoMoreInteractions; public class FileSettingsServiceTests extends ESTestCase { private static final Logger logger = LogManager.getLogger(FileSettingsServiceTests.class); @@ -86,6 +90,7 @@ public class FileSettingsServiceTests extends ESTestCase { private ReservedClusterStateService controller; private ThreadPool threadpool; private FileSettingsService fileSettingsService; + private FileSettingsHealthIndicatorService healthIndicatorService; @Before public void setUp() throws Exception { @@ -131,7 +136,8 @@ public void setUp() throws Exception { List.of(new ReservedClusterSettingsAction(clusterSettings)) ) ); - fileSettingsService = spy(new FileSettingsService(clusterService, controller, env)); + healthIndicatorService = mock(FileSettingsHealthIndicatorService.class); + fileSettingsService = spy(new FileSettingsService(clusterService, controller, env, healthIndicatorService)); } @After @@ -162,6 +168,7 @@ public void testStartStop() { assertTrue(fileSettingsService.watching()); fileSettingsService.stop(); assertFalse(fileSettingsService.watching()); + verifyNoInteractions(healthIndicatorService); } public void testOperatorDirName() { @@ -208,6 +215,10 @@ public void testInitialFileError() throws Exception { verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), any()); // assert we never notified any listeners of successful application of file based settings assertFalse(settingsChanged.get()); + + verify(healthIndicatorService, times(1)).changeOccurred(); + verify(healthIndicatorService, times(1)).failureOccurred(argThat(s -> s.startsWith(IllegalStateException.class.getName()))); + verifyNoMoreInteractions(healthIndicatorService); } @SuppressWarnings("unchecked") @@ -232,6 +243,10 @@ public void testInitialFileWorks() throws Exception { verify(fileSettingsService, times(1)).processFileOnServiceStart(); verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), any()); + + verify(healthIndicatorService, times(1)).changeOccurred(); + verify(healthIndicatorService, times(1)).successOccurred(); + verifyNoMoreInteractions(healthIndicatorService); } @SuppressWarnings("unchecked") @@ -267,6 +282,10 @@ public void testProcessFileChanges() throws Exception { verify(fileSettingsService, times(1)).processFileChanges(); verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_VERSION_ONLY), any()); + + verify(healthIndicatorService, times(2)).changeOccurred(); + verify(healthIndicatorService, times(2)).successOccurred(); + verifyNoMoreInteractions(healthIndicatorService); } @SuppressWarnings("unchecked") @@ -321,6 +340,11 @@ public void testInvalidJSON() throws Exception { // Note: the name "processFileOnServiceStart" is a bit misleading because it is not // referring to fileSettingsService.start(). Rather, it is referring to the initialization // of the watcher thread itself, which occurs asynchronously when clusterChanged is first called. + + verify(healthIndicatorService, times(2)).changeOccurred(); + verify(healthIndicatorService, times(1)).successOccurred(); + verify(healthIndicatorService, times(1)).failureOccurred(argThat(s -> s.startsWith(IllegalArgumentException.class.getName()))); + verifyNoMoreInteractions(healthIndicatorService); } private static void awaitOrBust(CyclicBarrier barrier) { @@ -373,6 +397,12 @@ public void testStopWorksInMiddleOfProcessing() throws Exception { fileSettingsService.close(); // let the deadlocked thread end, so we can cleanly exit the test deadThreadLatch.countDown(); + + verify(healthIndicatorService, times(1)).changeOccurred(); + verify(healthIndicatorService, times(1)).failureOccurred( + argThat(s -> s.startsWith(FailedToCommitClusterStateException.class.getName())) + ); + verifyNoMoreInteractions(healthIndicatorService); } public void testHandleSnapshotRestoreClearsMetadata() throws Exception { From 5500a5ec6838ea02ab45a14e116deafb3a5f71e6 Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Thu, 21 Nov 2024 09:38:22 -0500 Subject: [PATCH 140/386] [ML] Fix deberta tokenizer bug caused by bug in normalizer (#117189) * Fix deberta tokenizer bug caused by bug in normalizer which caused offesets to be negative * Update docs/changelog/117189.yaml --- docs/changelog/117189.yaml | 5 +++++ .../tokenizers/PrecompiledCharMapNormalizer.java | 2 +- .../nlp/tokenizers/DebertaV2TokenizerTests.java | 14 ++++++++++++++ 3 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/117189.yaml diff --git a/docs/changelog/117189.yaml b/docs/changelog/117189.yaml new file mode 100644 index 0000000000000..e89c2d81506d9 --- /dev/null +++ b/docs/changelog/117189.yaml @@ -0,0 +1,5 @@ +pr: 117189 +summary: Fix deberta tokenizer bug caused by bug in normalizer +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java index bbe5bea691c35..5dd7dbbffaa61 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java @@ -194,7 +194,7 @@ Reader normalize(CharSequence str) { if (charDelta < 0) { // normalised form is shorter int lastDiff = getLastCumulativeDiff(); - addOffCorrectMap(normalizedCharPos, lastDiff + charDelta); + addOffCorrectMap(normalizedCharPos, lastDiff - charDelta); } else if (charDelta > 0) { // inserted chars, add the offset in the output stream int lastDiff = getLastCumulativeDiff(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaV2TokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaV2TokenizerTests.java index bbe509da67452..a8461de8630ae 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaV2TokenizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaV2TokenizerTests.java @@ -94,6 +94,20 @@ public void testTokenize() throws IOException { } } + public void testTokenizeWithHiddenControlCharacters() throws IOException { + try ( + DebertaV2Tokenizer tokenizer = DebertaV2Tokenizer.builder( + TEST_CASE_VOCAB, + TEST_CASE_SCORES, + new DebertaV2Tokenization(false, false, null, Tokenization.Truncate.NONE, -1) + ).build() + ) { + TokenizationResult.Tokens tokenization = tokenizer.tokenize("\u009F\u008Fz", Tokenization.Truncate.NONE, -1, 0, null).get(0); + assertThat(tokenStrings(tokenization.tokens().get(0)), contains("▁", "z")); + + } + } + public void testSurrogatePair() throws IOException { try ( DebertaV2Tokenizer tokenizer = DebertaV2Tokenizer.builder( From fa3799f82e628fd086b74f7ac13ed099a036bfd3 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Thu, 21 Nov 2024 15:57:10 +0100 Subject: [PATCH 141/386] [Build] Make JdkDownload Plugin configuration cache compatible (#117120) Also Fix zulu 8 download that we still use for testing old es versions --- .../internal/JdkDownloadPluginFuncTest.groovy | 74 +++++++++++------- .../gradle/internal/fake_zulu_macos.tar.gz | Bin 0 -> 2731 bytes .../elasticsearch/gradle/internal/Jdk.java | 7 +- .../gradle/internal/JdkDownloadPlugin.java | 9 +-- 4 files changed, 53 insertions(+), 37 deletions(-) create mode 100644 build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_zulu_macos.tar.gz diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy index 94df02b280ca6..a4635a7232754 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy @@ -9,7 +9,6 @@ package org.elasticsearch.gradle.internal - import spock.lang.Unroll import com.github.tomakehurst.wiremock.WireMockServer @@ -24,8 +23,7 @@ import java.nio.file.Paths import java.util.regex.Matcher import java.util.regex.Pattern -import static org.elasticsearch.gradle.internal.JdkDownloadPlugin.VENDOR_ADOPTIUM -import static org.elasticsearch.gradle.internal.JdkDownloadPlugin.VENDOR_OPENJDK +import static org.elasticsearch.gradle.internal.JdkDownloadPlugin.* class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { @@ -33,13 +31,11 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { private static final String ADOPT_JDK_VERSION = "12.0.2+10" private static final String ADOPT_JDK_VERSION_11 = "11.0.10+9" private static final String ADOPT_JDK_VERSION_15 = "15.0.2+7" + private static final String AZUL_JDK_VERSION_8 = "8u302+b08" + private static final String AZUL_8_DISTRO_VERSION = "8.56.0.23" private static final String OPEN_JDK_VERSION = "12.0.1+99@123456789123456789123456789abcde" private static final Pattern JDK_HOME_LOGLINE = Pattern.compile("JDK HOME: (.*)") - def setup() { - configurationCacheCompatible = false - } - @Unroll def "jdk #jdkVendor for #platform#suffix are downloaded and extracted"() { given: @@ -56,14 +52,16 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { version = '$jdkVersion' platform = "$platform" architecture = '$arch' + distributionVersion = '$distributionVersion' } } - def theJdks = jdks +// def theJdks = jdks tasks.register("getJdk") { dependsOn jdks.myJdk + def jdk = jdks.myJdk doLast { - println "JDK HOME: " + theJdks.myJdk + println "JDK HOME: " + jdk } } """ @@ -78,22 +76,23 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { assertExtraction(result.output, expectedJavaBin); where: - platform | arch | jdkVendor | jdkVersion | expectedJavaBin | suffix - "linux" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "bin/java" | "" - "linux" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "bin/java" | "" - "linux" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "bin/java" | "(old version)" - "windows" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "bin/java" | "" - "windows" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "bin/java" | "" - "windows" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "bin/java" | "(old version)" - "darwin" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "Contents/Home/bin/java" | "" - "darwin" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "Contents/Home/bin/java" | "" - "darwin" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "Contents/Home/bin/java" | "(old version)" - "mac" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "Contents/Home/bin/java" | "" - "mac" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "Contents/Home/bin/java" | "(old version)" - "darwin" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "Contents/Home/bin/java" | "" - "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "bin/java" | "" - "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION_11 | "bin/java" | "(jdk 11)" - "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION_15 | "bin/java" | "(jdk 15)" + platform | arch | jdkVendor | jdkVersion | distributionVersion | expectedJavaBin | suffix + "linux" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "bin/java" | "" + "linux" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | null | "bin/java" | "" + "linux" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | null | "bin/java" | "(old version)" + "windows" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "bin/java" | "" + "windows" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | null | "bin/java" | "" + "windows" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | null | "bin/java" | "(old version)" + "darwin" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "Contents/Home/bin/java" | "" + "darwin" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | null | "Contents/Home/bin/java" | "" + "darwin" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | null | "Contents/Home/bin/java" | "(old version)" + "mac" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | null | "Contents/Home/bin/java" | "" + "mac" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | null | "Contents/Home/bin/java" | "(old version)" + "darwin" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "Contents/Home/bin/java" | "" + "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "bin/java" | "" + "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION_11 | null | "bin/java" | "(jdk 11)" + "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION_15 | null | "bin/java" | "(jdk 15)" + "darwin" | "aarch64" | VENDOR_ZULU | AZUL_JDK_VERSION_8 | AZUL_8_DISTRO_VERSION | "Contents/Home/bin/java" | "(jdk 8)" } def "transforms are reused across projects"() { @@ -118,9 +117,10 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { } } tasks.register("getJdk") { - dependsOn jdks.myJdk + def jdk = jdks.myJdk + dependsOn jdk doLast { - println "JDK HOME: " + jdks.myJdk + println "JDK HOME: " + jdk } } """ @@ -137,7 +137,7 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { result.output.count("Unpacking linux-12.0.2-x64.tar.gz using ${SymbolicLinkPreservingUntarTransform.simpleName}") == 1 where: - platform | jdkVendor | jdkVersion | expectedJavaBin + platform | jdkVendor | jdkVersion | expectedJavaBin "linux" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "bin/java" } @@ -159,6 +159,7 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { vendor = '$VENDOR_ADOPTIUM' version = '$ADOPT_JDK_VERSION' platform = "$platform" + distributionVersion = '$ADOPT_JDK_VERSION' architecture = "x64" } } @@ -204,6 +205,8 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { assert matcher.find() == true; String jdkHome = matcher.group(1); Path javaPath = Paths.get(jdkHome, javaBin); + println "canonical " + javaPath.toFile().getCanonicalPath() + Paths.get(jdkHome).toFile().listFiles().each { println it } assert Files.exists(javaPath) == true; true } @@ -221,15 +224,26 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { final String versionPath = isOld ? "jdk1/99" : "jdk12.0.1/123456789123456789123456789abcde/99"; final String filename = "openjdk-" + (isOld ? "1" : "12.0.1") + "_" + effectivePlatform + "-x64_bin." + extension(platform); return "/java/GA/" + versionPath + "/GPL/" + filename; + } else if (vendor.equals(VENDOR_ZULU)) { + // we only have a single version of zulu currently in the tests + return "/zulu/bin/zulu8.56.0.23-ca-jdk8.0.302-macosx_aarch64.tar.gz" } } private static byte[] filebytes(final String vendor, final String platform) throws IOException { final String effectivePlatform = getPlatform(vendor, platform); if (vendor.equals(VENDOR_ADOPTIUM)) { - return JdkDownloadPluginFuncTest.class.getResourceAsStream("fake_adoptium_" + effectivePlatform + "." + extension(platform)).getBytes() + return JdkDownloadPluginFuncTest.class.getResourceAsStream( + "fake_adoptium_" + effectivePlatform + "." + extension(platform) + ).getBytes() } else if (vendor.equals(VENDOR_OPENJDK)) { - JdkDownloadPluginFuncTest.class.getResourceAsStream("fake_openjdk_" + effectivePlatform + "." + extension(platform)).getBytes() + return JdkDownloadPluginFuncTest.class.getResourceAsStream( + "fake_openjdk_" + effectivePlatform + "." + extension(platform) + ).getBytes() + } else { + // zulu + String resourcePath = "fake_zulu_" + effectivePlatform + "." + extension(platform) + return JdkDownloadPluginFuncTest.class.getResourceAsStream(resourcePath).getBytes() } } diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_zulu_macos.tar.gz b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_zulu_macos.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..87361b67ec76ca35cd2618994c9025bdc1140765 GIT binary patch literal 2731 zcmZvdX*|?x8^_0T7)Mz<6lJZ*5=rCUSG~_>&m~7#cQEEbVvL##7 zlw^!0M)qB08B7#oGUh+`Gd<^d-aYq=>(%|am+SideqY68Btq=F@fCysCh5w*MNleE4ED5jZLq&Bb1Y&AvrsFPy=o%G~fOER-wd#74PD_OZX>2K8$>%Oo zFAwOxle&%9@=-VvH6{f2{Uwh{jwmb~tUtRI@ap}0k?rWj){Re*39B)nB|s#v!reFG zN4Io)D@SayNIubF@t`ezTKU7p2Q}ui=T|Q@v@L{3`*qd&-Xi1so_H6um-YBN`_LI^ z1_QEfaK1C8xDq{e{vu|wucv&YYdDgdaRTVw;83Q)k_Ysu7Lf4YZmrWl%6b%s5pS(6 zWmTDS7YD3SL@Ql%hsoLbTGSb|ElbGEH6B<;vv)6wb?6qQYpzOT3Yz(m3Isx#ipk(a7-@a zrkJSX-vk!Is#>M1RZsc+!K^fj!U2O%(T6r|j-|+)_;_%*mP)v<{j!N6J8sphfHIl1 zlS?R0CpzD&_c4lo{iOR~dQ&2$MQD;ZW=i=vo1A}tbTVmme{y?7UUP=F@x3cqy^bdm zFc>2Zy_;j+^YrI3#jhs)TGzAEI`<|FdDCqVY<*%Ry4Td^b>jVve(Yym&CdQjN2s`Z zEUZ{=bZqipT~4nuKNP6lN#F9#@ub{diSR_L+y4a~=3DIoF(5?mX+bYrShj z!_BmF*2`JDVORrO`g}0^k87C%^?AeeOPMbe4Tk#hu1yUed=AN`Cw=4>6QemK1Q(D=+byFgNnM$hbri*m00dNat^SQPFFHACPTB=yV}>pc8MOAFx++oYH#nmK0zsxjE1o3M{nv=c`v8I9z5s1dSgf9XbVeCy0#`MurTGpi;MKG%b1<+%FDB=zV&-+ zpE{Ti*$XPSpBuilPYL0=(mQY_6tL3-NUs5YzR$eV%GuE6cAi!g+rXs}@)@K! z*Fz!&hSMo`RA28-_(^WM(z!M5qCVrht(XRudx0g6Nq!b}^gAS1rYcb1Gh$K&FQT8C zr3O>;e)z8tvlRX_Ra|M@PZR=?i{n*-hZ0vDfk*-Z2!yM|ixS|?GL%2o;B4l>4xtS| zBE~|ny)KR$o!68Pi{CMmZs+r0ufBk-UEe+fBJq-NLr2|i>&(_3#irPa4;@ zJo1mY?S6g>!6%7yx$7mkJ2%4 zc?pTvk;~z*_&bbsl$9U>-1cO?bF1vk4XAuIRZ>+cAycL6P8w0CGk=VA279SiR2Vms zU$ftTwaZ`A&-QMXeN1JvAEgV{Nu^nO7mc1)p_*q>{rb1~EtZok9mh4h&wbE+gi5!x zv$xm&Qsf`(|9t&)Ic>nb{pMY?Own&6ndjZ}dNwfRfqm?tvpRCD?2syjzy+4H{C7K& z(_rQp9v!3~xM2|U{J~$}29uf|&tv4Z$F{ahu{mh8TdFHG-dX(dr8mVvyzCLl(;0QV zZ0RaJH#SrQ7VXgufny?zN%2<(90-ci565Vx^##P^&Y6d4AD0uGhiZrW_KB5}YKnSt zWENb1V_0TB-G}91mSd_Sr&j8zS`ixL%AwO~stPjU`|HxD~fB?%HIYA1=@g? zeqA-xpri!8qiWUPuR}^H>EQgLnt}zVmvc00Yx)4P1LCBszgq=yvufN0lW<(PlOq1x z9z2L!RQyNeW*nYh?S9p69f@DIZQ~^AS#+nEs%sIBoeI?S6YXaYYcTv8sbfV)FlzDp z#B0an+;(0+9ADx#f~sfZS8<_rPzuj?Or&KLu#o6pV%zuQwrNlpVWS;d`){sPtU#GU z6PG|x9i*^sEc`1nUjyrhZc@&^6`^_^V { private static final Pattern LEGACY_VERSION_PATTERN = Pattern.compile("(\\d)(u\\d+)\\+(b\\d+?)(@([a-f0-9]{32}))?"); private final String name; - private final Configuration configuration; + private final FileCollection configuration; private final Property vendor; private final Property version; private final Property platform; private final Property architecture; private final Property distributionVersion; + private final String configurationName; private String baseVersion; private String major; private String build; @@ -47,6 +49,7 @@ public class Jdk implements Buildable, Iterable { Jdk(String name, Configuration configuration, ObjectFactory objectFactory) { this.name = name; + this.configurationName = configuration.getName(); this.configuration = configuration; this.vendor = objectFactory.property(String.class); this.version = objectFactory.property(String.class); @@ -137,7 +140,7 @@ public String getPath() { } public String getConfigurationName() { - return configuration.getName(); + return configurationName; } @Override diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java index 5b195cad3388f..3c278128e43f2 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java @@ -21,8 +21,6 @@ import org.gradle.api.artifacts.type.ArtifactTypeDefinition; import org.gradle.api.attributes.Attribute; -import java.util.Arrays; - /** * @deprecated We wanna get rid from this and custom jdk downloads via this plugin and * make leverage the gradle toolchain resolver capabilities. @@ -38,8 +36,8 @@ public class JdkDownloadPlugin implements Plugin { private static final String REPO_NAME_PREFIX = "jdk_repo_"; private static final String EXTENSION_NAME = "jdks"; - public static final String JDK_TRIMMED_PREFIX = "(jdk-?\\d.*)|(zulu-?\\d.+).jdk"; - public static final String ZULU_LINUX_AARCH_PATTERN = "zulu.*linux_aarch64"; + public static final String JDK_TRIMMED_PREFIX = "(jdk-?\\d.*)|(zulu-?\\d.*).jdk"; + public static final String ZULU_LINUX_AARCH_PATTERN = "zulu.*_aarch64"; @Override public void apply(Project project) { @@ -66,7 +64,8 @@ public void apply(Project project) { .attribute(jdkAttribute, true); transformSpec.parameters(parameters -> { parameters.setTrimmedPrefixPattern(JDK_TRIMMED_PREFIX); - parameters.setKeepStructureFor(Arrays.asList(ZULU_LINUX_AARCH_PATTERN)); + // parameters.setAsFiletreeOutput(true); + // parameters.setKeepStructureFor(Arrays.asList(ZULU_LINUX_AARCH_PATTERN)); }); }); From b42e8f27499b4171c714670dd5c4ab1aa2dd9476 Mon Sep 17 00:00:00 2001 From: Dan Rubinstein Date: Thu, 21 Nov 2024 10:11:20 -0500 Subject: [PATCH 142/386] Updating PivotConfig max_page_search_size deprecation warning to critical (#117051) Co-authored-by: Elastic Machine --- .../xpack/core/transform/transforms/pivot/PivotConfig.java | 2 +- .../core/transform/transforms/TransformConfigTests.java | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/pivot/PivotConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/pivot/PivotConfig.java index 47f7fea8dc199..6e78c2e8d3ef3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/pivot/PivotConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/pivot/PivotConfig.java @@ -184,7 +184,7 @@ public void checkForDeprecations(String id, NamedXContentRegistry namedXContentR onDeprecation.accept( // max_page_search_size got deprecated in 7.8, still accepted for 8.x, to be removed in 9.x new DeprecationIssue( - Level.WARNING, + Level.CRITICAL, "Transform [" + id + "] uses the deprecated setting [max_page_search_size]", TransformDeprecations.MAX_PAGE_SEARCH_SIZE_BREAKING_CHANGES_URL, TransformDeprecations.ACTION_MAX_PAGE_SEARCH_SIZE_IS_DEPRECATED, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java index f1c2de11496bf..8cfecc432c661 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java @@ -930,7 +930,7 @@ public void testCheckForDeprecations() { equalTo( Collections.singletonList( new DeprecationIssue( - Level.WARNING, + Level.CRITICAL, "Transform [" + id + "] uses the deprecated setting [max_page_search_size]", TransformDeprecations.MAX_PAGE_SEARCH_SIZE_BREAKING_CHANGES_URL, TransformDeprecations.ACTION_MAX_PAGE_SEARCH_SIZE_IS_DEPRECATED, @@ -952,7 +952,7 @@ public void testCheckForDeprecations() { equalTo( List.of( new DeprecationIssue( - Level.WARNING, + Level.CRITICAL, "Transform [" + id + "] uses the deprecated setting [max_page_search_size]", TransformDeprecations.MAX_PAGE_SEARCH_SIZE_BREAKING_CHANGES_URL, TransformDeprecations.ACTION_MAX_PAGE_SEARCH_SIZE_IS_DEPRECATED, @@ -982,7 +982,7 @@ public void testCheckForDeprecations() { null ), new DeprecationIssue( - Level.WARNING, + Level.CRITICAL, "Transform [" + id + "] uses the deprecated setting [max_page_search_size]", TransformDeprecations.MAX_PAGE_SEARCH_SIZE_BREAKING_CHANGES_URL, TransformDeprecations.ACTION_MAX_PAGE_SEARCH_SIZE_IS_DEPRECATED, From e7a2a203ffa04e57e21319123a0ecf50340d3710 Mon Sep 17 00:00:00 2001 From: Adam Demjen Date: Thu, 21 Nov 2024 11:00:59 -0500 Subject: [PATCH 143/386] Add version prefix to Inference Service API path (#117095) * Add version prefix to EIS API path * Update docs/changelog/117095.yaml --- docs/changelog/117095.yaml | 5 +++++ .../ElasticInferenceServiceSparseEmbeddingsModel.java | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/117095.yaml diff --git a/docs/changelog/117095.yaml b/docs/changelog/117095.yaml new file mode 100644 index 0000000000000..27460924ecb71 --- /dev/null +++ b/docs/changelog/117095.yaml @@ -0,0 +1,5 @@ +pr: 117095 +summary: Add version prefix to Inference Service API path +area: Inference +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java index bbbae736dbeb9..731153b3d5dbc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java @@ -108,6 +108,6 @@ private URI createUri() throws URISyntaxException { default -> throw new IllegalArgumentException("Unsupported model for EIS [" + modelId + "]"); } - return new URI(elasticInferenceServiceComponents().eisGatewayUrl() + "/sparse-text-embedding/" + modelIdUriPath); + return new URI(elasticInferenceServiceComponents().eisGatewayUrl() + "/api/v1/sparse-text-embedding/" + modelIdUriPath); } } From 041022944e7df963d4a3269dfa6ec7791247251e Mon Sep 17 00:00:00 2001 From: Jason Botzas-Coluni <44372106+jaybcee@users.noreply.github.com> Date: Thu, 21 Nov 2024 12:08:14 -0500 Subject: [PATCH 144/386] Remove all mentions of eis and gateway and deprecate flags that do (#116692) --- docs/changelog/116692.yaml | 5 +++ .../xpack/inference/InferenceCrudIT.java | 12 ++++-- .../xpack/inference/InferencePlugin.java | 41 ++++++++++++++++--- .../ElasticInferenceServiceActionCreator.java | 7 +++- ...ServiceSparseEmbeddingsRequestManager.java | 4 +- ...ServiceSparseEmbeddingsResponseEntity.java | 2 +- .../elastic/ElasticInferenceService.java | 5 ++- .../ElasticInferenceServiceComponents.java | 2 +- .../ElasticInferenceServiceFeature.java | 4 +- .../ElasticInferenceServiceSettings.java | 19 ++++++++- ...InferenceServiceSparseEmbeddingsModel.java | 11 ++++- 11 files changed, 92 insertions(+), 20 deletions(-) create mode 100644 docs/changelog/116692.yaml diff --git a/docs/changelog/116692.yaml b/docs/changelog/116692.yaml new file mode 100644 index 0000000000000..30f9e62095436 --- /dev/null +++ b/docs/changelog/116692.yaml @@ -0,0 +1,5 @@ +pr: 116692 +summary: Remove all mentions of eis and gateway and deprecate flags that do +area: Machine Learning +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index 78e064b42bbb2..f5773e73f2b22 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -134,7 +134,8 @@ public void testApisWithoutTaskType() throws IOException { @SuppressWarnings("unchecked") public void testGetServicesWithoutTaskType() throws IOException { List services = getAllServices(); - if (ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { assertThat(services.size(), equalTo(18)); } else { assertThat(services.size(), equalTo(17)); @@ -169,7 +170,8 @@ public void testGetServicesWithoutTaskType() throws IOException { "watsonxai" ) ); - if (ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { providerList.add(6, "elastic"); } assertArrayEquals(providers, providerList.toArray()); @@ -257,7 +259,8 @@ public void testGetServicesWithCompletionTaskType() throws IOException { public void testGetServicesWithSparseEmbeddingTaskType() throws IOException { List services = getServices(TaskType.SPARSE_EMBEDDING); - if (ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { assertThat(services.size(), equalTo(5)); } else { assertThat(services.size(), equalTo(4)); @@ -272,7 +275,8 @@ public void testGetServicesWithSparseEmbeddingTaskType() throws IOException { Arrays.sort(providers); var providerList = new ArrayList<>(Arrays.asList("alibabacloud-ai-search", "elasticsearch", "hugging_face", "test_service")); - if (ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { providerList.add(1, "elastic"); } assertArrayEquals(providers, providerList.toArray()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 62405a2e9f7de..48458bf4f5086 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.inference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -91,7 +93,6 @@ import org.elasticsearch.xpack.inference.services.cohere.CohereService; import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService; import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceComponents; -import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature; import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService; import org.elasticsearch.xpack.inference.services.googleaistudio.GoogleAiStudioService; @@ -113,6 +114,9 @@ import java.util.stream.Stream; import static java.util.Collections.singletonList; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG; public class InferencePlugin extends Plugin implements ActionPlugin, ExtensiblePlugin, SystemIndexPlugin, MapperPlugin, SearchPlugin { @@ -135,11 +139,13 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP public static final String NAME = "inference"; public static final String UTILITY_THREAD_POOL_NAME = "inference_utility"; + private static final Logger log = LogManager.getLogger(InferencePlugin.class); + private final Settings settings; private final SetOnce httpFactory = new SetOnce<>(); private final SetOnce amazonBedrockFactory = new SetOnce<>(); private final SetOnce serviceComponents = new SetOnce<>(); - private final SetOnce eisComponents = new SetOnce<>(); + private final SetOnce elasticInferenceServiceComponents = new SetOnce<>(); private final SetOnce inferenceServiceRegistry = new SetOnce<>(); private final SetOnce shardBulkInferenceActionFilter = new SetOnce<>(); private List inferenceServiceExtensions; @@ -207,12 +213,35 @@ public Collection createComponents(PluginServices services) { var inferenceServices = new ArrayList<>(inferenceServiceExtensions); inferenceServices.add(this::getInferenceServiceFactories); - if (ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { - ElasticInferenceServiceSettings eisSettings = new ElasticInferenceServiceSettings(settings); - eisComponents.set(new ElasticInferenceServiceComponents(eisSettings.getEisGatewayUrl())); + // Set elasticInferenceUrl based on feature flags to support transitioning to the new Elastic Inference Service URL without exposing + // internal names like "eis" or "gateway". + ElasticInferenceServiceSettings inferenceServiceSettings = new ElasticInferenceServiceSettings(settings); + + String elasticInferenceUrl = null; + + if (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + elasticInferenceUrl = inferenceServiceSettings.getElasticInferenceServiceUrl(); + } else if (DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + log.warn( + "Deprecated flag {} detected for enabling {}. Please use {}.", + ELASTIC_INFERENCE_SERVICE_IDENTIFIER, + DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG, + ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG + ); + elasticInferenceUrl = inferenceServiceSettings.getEisGatewayUrl(); + } + + if (elasticInferenceUrl != null) { + elasticInferenceServiceComponents.set(new ElasticInferenceServiceComponents(elasticInferenceUrl)); inferenceServices.add( - () -> List.of(context -> new ElasticInferenceService(httpFactory.get(), serviceComponents.get(), eisComponents.get())) + () -> List.of( + context -> new ElasticInferenceService( + httpFactory.get(), + serviceComponents.get(), + elasticInferenceServiceComponents.get() + ) + ) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreator.java index c8ada6e535b63..fa096901ed67a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreator.java @@ -15,9 +15,11 @@ import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSparseEmbeddingsModel; import org.elasticsearch.xpack.inference.telemetry.TraceContext; +import java.util.Locale; import java.util.Objects; import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; public class ElasticInferenceServiceActionCreator implements ElasticInferenceServiceActionVisitor { @@ -36,7 +38,10 @@ public ElasticInferenceServiceActionCreator(Sender sender, ServiceComponents ser @Override public ExecutableAction create(ElasticInferenceServiceSparseEmbeddingsModel model) { var requestManager = new ElasticInferenceServiceSparseEmbeddingsRequestManager(model, serviceComponents, traceContext); - var errorMessage = constructFailedToSendRequestMessage(model.uri(), "Elastic Inference Service sparse embeddings"); + var errorMessage = constructFailedToSendRequestMessage( + model.uri(), + String.format(Locale.ROOT, "%s sparse embeddings", ELASTIC_INFERENCE_SERVICE_IDENTIFIER) + ); return new SenderExecutableAction(sender, requestManager, errorMessage); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ElasticInferenceServiceSparseEmbeddingsRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ElasticInferenceServiceSparseEmbeddingsRequestManager.java index e7ee41525f07d..bf3409888aaf8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ElasticInferenceServiceSparseEmbeddingsRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ElasticInferenceServiceSparseEmbeddingsRequestManager.java @@ -22,9 +22,11 @@ import org.elasticsearch.xpack.inference.telemetry.TraceContext; import java.util.List; +import java.util.Locale; import java.util.function.Supplier; import static org.elasticsearch.xpack.inference.common.Truncator.truncate; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; public class ElasticInferenceServiceSparseEmbeddingsRequestManager extends ElasticInferenceServiceRequestManager { @@ -40,7 +42,7 @@ public class ElasticInferenceServiceSparseEmbeddingsRequestManager extends Elast private static ResponseHandler createSparseEmbeddingsHandler() { return new ElasticInferenceServiceResponseHandler( - "Elastic Inference Service sparse embeddings", + String.format(Locale.ROOT, "%s sparse embeddings", ELASTIC_INFERENCE_SERVICE_IDENTIFIER), ElasticInferenceServiceSparseEmbeddingsResponseEntity::fromResponse ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceSparseEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceSparseEmbeddingsResponseEntity.java index 2b36cc5d22cd4..42ca45f75a9c0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceSparseEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceSparseEmbeddingsResponseEntity.java @@ -33,7 +33,7 @@ public class ElasticInferenceServiceSparseEmbeddingsResponseEntity { "Failed to find required field [%s] in Elastic Inference Service embeddings response"; /** - * Parses the EIS json response. + * Parses the Elastic Inference Service json response. * * For a request like: * diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java index 98429ed3d001d..e7ce5903163d4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java @@ -62,6 +62,7 @@ public class ElasticInferenceService extends SenderService { public static final String NAME = "elastic"; + public static final String ELASTIC_INFERENCE_SERVICE_IDENTIFIER = "Elastic Inference Service"; private final ElasticInferenceServiceComponents elasticInferenceServiceComponents; @@ -70,10 +71,10 @@ public class ElasticInferenceService extends SenderService { public ElasticInferenceService( HttpRequestSender.Factory factory, ServiceComponents serviceComponents, - ElasticInferenceServiceComponents eisComponents + ElasticInferenceServiceComponents elasticInferenceServiceComponents ) { super(factory, serviceComponents); - this.elasticInferenceServiceComponents = eisComponents; + this.elasticInferenceServiceComponents = elasticInferenceServiceComponents; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceComponents.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceComponents.java index 4386964e927d2..c5b2cb693df13 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceComponents.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceComponents.java @@ -7,4 +7,4 @@ package org.elasticsearch.xpack.inference.services.elastic; -public record ElasticInferenceServiceComponents(String eisGatewayUrl) {} +public record ElasticInferenceServiceComponents(String elasticInferenceServiceUrl) {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java index b0fb6d14ee6f7..324c20d0e48bf 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java @@ -15,6 +15,8 @@ */ public class ElasticInferenceServiceFeature { - public static final FeatureFlag ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG = new FeatureFlag("eis"); + @Deprecated + public static final FeatureFlag DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG = new FeatureFlag("eis"); + public static final FeatureFlag ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG = new FeatureFlag("elastic_inference_service"); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java index 8525710c6cf23..bc2daddc2a346 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java @@ -14,20 +14,37 @@ public class ElasticInferenceServiceSettings { + @Deprecated static final Setting EIS_GATEWAY_URL = Setting.simpleString("xpack.inference.eis.gateway.url", Setting.Property.NodeScope); + static final Setting ELASTIC_INFERENCE_SERVICE_URL = Setting.simpleString( + "xpack.inference.elastic.url", + Setting.Property.NodeScope + ); + // Adjust this variable to be volatile, if the setting can be updated at some point in time + @Deprecated private final String eisGatewayUrl; + private final String elasticInferenceServiceUrl; + public ElasticInferenceServiceSettings(Settings settings) { eisGatewayUrl = EIS_GATEWAY_URL.get(settings); + elasticInferenceServiceUrl = ELASTIC_INFERENCE_SERVICE_URL.get(settings); + } public static List> getSettingsDefinitions() { - return List.of(EIS_GATEWAY_URL); + return List.of(EIS_GATEWAY_URL, ELASTIC_INFERENCE_SERVICE_URL); } + @Deprecated public String getEisGatewayUrl() { return eisGatewayUrl; } + + public String getElasticInferenceServiceUrl() { + return elasticInferenceServiceUrl; + } + } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java index 731153b3d5dbc..cc69df86933de 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java @@ -22,8 +22,11 @@ import java.net.URI; import java.net.URISyntaxException; +import java.util.Locale; import java.util.Map; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; + public class ElasticInferenceServiceSparseEmbeddingsModel extends ElasticInferenceServiceModel { private final URI uri; @@ -105,9 +108,13 @@ private URI createUri() throws URISyntaxException { switch (modelId) { case ElserModels.ELSER_V2_MODEL -> modelIdUriPath = "ELSERv2"; - default -> throw new IllegalArgumentException("Unsupported model for EIS [" + modelId + "]"); + default -> throw new IllegalArgumentException( + String.format(Locale.ROOT, "Unsupported model for %s [%s]", ELASTIC_INFERENCE_SERVICE_IDENTIFIER, modelId) + ); } - return new URI(elasticInferenceServiceComponents().eisGatewayUrl() + "/api/v1/sparse-text-embedding/" + modelIdUriPath); + return new URI( + elasticInferenceServiceComponents().elasticInferenceServiceUrl() + "/api/v1/sparse-text-embedding/" + modelIdUriPath + ); } } From 35116c3b78208ba9f17fc7551b6e8882b564fd4b Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 21 Nov 2024 13:24:22 -0500 Subject: [PATCH 145/386] ESQL: Fix a bug in LuceneQueryExpressionEvaluator (#117252) * ESQL: Fix a bug in LuceneQueryExpressionEvaluator This fixes Lucene usage bug in `LuceneQueryExpressionEvaluator`, the evaluator we plan to use to run things like `MATCH` when we *can't* push it to a source operator. That'll be useful for things like: ``` FROM foo | STATS COUNT(), COUNT() WHERE MATCH(message, "error") ``` Explanation: When using Lucene's `Scorer` and `BulkScorer` you must stay on the same thread. It's a rule. Most of the time nothing bad happens if you shift threads, but sometimes things explode and Lucene doesn't work. Driver can shift from one thread to another - that's just how it's designed. It's a "yield after running a while" kind of thing. In tests we sometimes get a version of the `Scorer` and `BulkScorer` that assert that you don't shift threads. That is what caused this test failure. Anyway! This builds protection into `LuceneQueryExpressionEvaluator` so that if it *does* shift threads then it'll rebuild the `Scorer` and `BulkScorer`. That makes the test happy and makes even the most grump Lucene object happy. Closes #116879 --- muted-tests.yml | 3 --- .../LuceneQueryExpressionEvaluator.java | 23 ++++++++++++++++--- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index f8ab532dcaa94..d1e1976262f55 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -223,9 +223,6 @@ tests: - class: org.elasticsearch.search.basic.SearchWithRandomIOExceptionsIT method: testRandomDirectoryIOExceptions issue: https://github.com/elastic/elasticsearch/issues/114824 -- class: org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluatorTests - method: testTermQuery - issue: https://github.com/elastic/elasticsearch/issues/116879 - class: org.elasticsearch.xpack.restart.QueryBuilderBWCIT method: testQueryBuilderBWC {p0=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/116989 diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java index dcd13671670d8..d7d9da052a962 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java @@ -191,18 +191,29 @@ SegmentState segmentState(int segment) throws IOException { private class SegmentState { private final Weight weight; private final LeafReaderContext ctx; + /** * Lazily initialed {@link Scorer} for this. {@code null} here means uninitialized * or that {@link #noMatch} is true. */ private Scorer scorer; + /** + * Thread that initialized the {@link #scorer}. + */ + private Thread scorerThread; + /** * Lazily initialed {@link BulkScorer} for this. {@code null} here means uninitialized * or that {@link #noMatch} is true. */ private BulkScorer bulkScorer; + /** + * Thread that initialized the {@link #bulkScorer}. + */ + private Thread bulkScorerThread; + /** * Set to {@code true} if, in the process of building a {@link Scorer} or {@link BulkScorer}, * the {@link Weight} tells us there aren't any matches. @@ -223,7 +234,10 @@ BooleanVector scoreDense(int min, int max) throws IOException { if (noMatch) { return blockFactory.newConstantBooleanVector(false, length); } - if (bulkScorer == null) { + if (bulkScorer == null || // The bulkScorer wasn't initialized + Thread.currentThread() != bulkScorerThread // The bulkScorer was initialized on a different thread + ) { + bulkScorerThread = Thread.currentThread(); bulkScorer = weight.bulkScorer(ctx); if (bulkScorer == null) { noMatch = true; @@ -257,8 +271,11 @@ private void initScorer(int minDocId) throws IOException { if (noMatch) { return; } - if (scorer == null || scorer.iterator().docID() > minDocId) { - // The previous block might have been beyond this one, reset the scorer and try again. + if (scorer == null || // Scorer not initialized + scorerThread != Thread.currentThread() || // Scorer initialized on a different thread + scorer.iterator().docID() > minDocId // The previous block came "after" this one + ) { + scorerThread = Thread.currentThread(); scorer = weight.scorer(ctx); if (scorer == null) { noMatch = true; From 97bafb96aa3606c7d3b47c60ec42d571a20f7968 Mon Sep 17 00:00:00 2001 From: Brendan Cully Date: Thu, 21 Nov 2024 10:41:15 -0800 Subject: [PATCH 146/386] Wait for cluster to be green in 40_get_secrets (#117217) Now that fast refresh searches go to search nodes instead of index nodes, the YAML test for `fleet.get_secret` sometimes fails if the shard isn't available yet on the search node before the request arrives. Addresses elasticsearch-serverless #3159, which can unmute the test once this is available. --- .../resources/rest-api-spec/test/fleet/40_secrets_get.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/x-pack/plugin/fleet/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/fleet/40_secrets_get.yml b/x-pack/plugin/fleet/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/fleet/40_secrets_get.yml index e74283bc873e3..ab150e41f310a 100644 --- a/x-pack/plugin/fleet/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/fleet/40_secrets_get.yml +++ b/x-pack/plugin/fleet/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/fleet/40_secrets_get.yml @@ -3,6 +3,12 @@ fleet.post_secret: body: '{"value": "test secret"}' - set: { id: id } + # search node needs to be available for fleet.get_secret to work in stateless. + # The `.fleet-secrets` index is created on demand, and its search replica starts out unassigned, + # so wait_for_no_uninitialized_shards can miss it. + - do: + cluster.health: + wait_for_active_shards: all - do: fleet.get_secret: id: $id From 7aa07f1a2a34c2bdf73863f51485616d598bc7e7 Mon Sep 17 00:00:00 2001 From: Mikhail Berezovskiy Date: Thu, 21 Nov 2024 14:04:42 -0500 Subject: [PATCH 147/386] Explicit HTTP content copy/retain (#116115) --- .../forbidden/es-server-signatures.txt | 2 + docs/changelog/116115.yaml | 5 + .../Netty4IncrementalRequestHandlingIT.java | 5 - .../netty4/Netty4TrashingAllocatorIT.java | 122 ++ .../http/netty4/Netty4HttpRequest.java | 34 - .../transport/netty4/Netty4Utils.java | 2 +- .../transport/netty4/NettyAllocator.java | 113 +- .../transport/netty4/WrappedByteBuf.java | 1036 +++++++++++++++++ .../transport/netty4/NettyAllocatorTests.java | 106 ++ .../common/bytes/BytesReference.java | 23 + .../java/org/elasticsearch/http/HttpBody.java | 9 +- .../org/elasticsearch/http/HttpRequest.java | 6 - .../org/elasticsearch/http/HttpTracer.java | 2 +- .../elasticsearch/rest/BaseRestHandler.java | 9 +- .../elasticsearch/rest/FilterRestHandler.java | 5 - .../elasticsearch/rest/RestController.java | 4 - .../org/elasticsearch/rest/RestHandler.java | 12 - .../org/elasticsearch/rest/RestRequest.java | 61 +- .../elasticsearch/rest/RestRequestFilter.java | 7 +- .../rest/action/document/RestBulkAction.java | 16 +- .../rest/action/document/RestIndexAction.java | 19 +- .../rest/action/search/RestSearchAction.java | 4 - .../common/bytes/BytesArrayTests.java | 6 + .../elasticsearch/http/TestHttpRequest.java | 5 - .../rest/RestControllerTests.java | 5 - .../test/rest/FakeRestRequest.java | 5 - .../EnterpriseSearchBaseRestHandler.java | 2 +- .../logstash/rest/RestPutPipelineAction.java | 2 +- .../xpack/security/audit/AuditUtil.java | 5 +- .../rest/action/SecurityBaseRestHandler.java | 2 +- .../audit/logfile/LoggingAuditTrailTests.java | 8 +- 31 files changed, 1505 insertions(+), 137 deletions(-) create mode 100644 docs/changelog/116115.yaml create mode 100644 modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4TrashingAllocatorIT.java create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/WrappedByteBuf.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyAllocatorTests.java diff --git a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt index a9da7995c2b36..68b97050ea012 100644 --- a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt +++ b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt @@ -167,3 +167,5 @@ org.elasticsearch.cluster.SnapshotDeletionsInProgress$Entry#(java.lang.Str @defaultMessage Use a Thread constructor with a name, anonymous threads are more difficult to debug java.lang.Thread#(java.lang.Runnable) java.lang.Thread#(java.lang.ThreadGroup, java.lang.Runnable) + +org.elasticsearch.common.bytes.BytesReference#copyBytes(org.elasticsearch.common.bytes.BytesReference) @ This method is a subject for removal. Copying bytes is prone to performance regressions and unnecessary allocations. diff --git a/docs/changelog/116115.yaml b/docs/changelog/116115.yaml new file mode 100644 index 0000000000000..33e1735c20ca4 --- /dev/null +++ b/docs/changelog/116115.yaml @@ -0,0 +1,5 @@ +pr: 116115 +summary: Allow http unsafe buffers by default +area: Network +type: enhancement +issues: [] diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java index 3095139ca4685..4bb27af4bd0f5 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java @@ -699,11 +699,6 @@ public Collection getRestHandlers( Predicate clusterSupportsFeature ) { return List.of(new BaseRestHandler() { - @Override - public boolean allowsUnsafeBuffers() { - return true; - } - @Override public String getName() { return ROUTE; diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4TrashingAllocatorIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4TrashingAllocatorIT.java new file mode 100644 index 0000000000000..18c91068ff4f9 --- /dev/null +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4TrashingAllocatorIT.java @@ -0,0 +1,122 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.http.netty4; + +import io.netty.handler.codec.http.HttpResponseStatus; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ESNetty4IntegTestCase; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.function.Predicate; +import java.util.function.Supplier; + +public class Netty4TrashingAllocatorIT extends ESNetty4IntegTestCase { + + @Override + protected Collection> nodePlugins() { + return CollectionUtils.concatLists(List.of(Handler.class), super.nodePlugins()); + } + + @Override + protected boolean addMockHttpTransport() { + return false; + } + + public void testTrashContent() throws InterruptedException { + try (var client = new Netty4HttpClient()) { + var addr = randomFrom(internalCluster().getInstance(HttpServerTransport.class).boundAddress().boundAddresses()).address(); + var content = randomAlphaOfLength(between(1024, 2048)); + var responses = client.post(addr, List.of(new Tuple<>(Handler.ROUTE, content))); + assertEquals(HttpResponseStatus.OK, responses.stream().findFirst().get().status()); + } + } + + public static class Handler extends Plugin implements ActionPlugin { + static final String ROUTE = "/_test/trashing-alloc"; + + @Override + public Collection getRestHandlers( + Settings settings, + NamedWriteableRegistry namedWriteableRegistry, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster, + Predicate clusterSupportsFeature + ) { + return List.of(new BaseRestHandler() { + @Override + public String getName() { + return ROUTE; + } + + @Override + public List routes() { + return List.of(new Route(RestRequest.Method.POST, ROUTE)); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + var content = request.releasableContent(); + var iter = content.iterator(); + return (chan) -> { + request.getHttpRequest().release(); + assertFalse(content.hasReferences()); + BytesRef br; + while ((br = iter.next()) != null) { + for (int i = br.offset; i < br.offset + br.length; i++) { + if (br.bytes[i] != 0) { + fail( + new AssertionError( + "buffer is not trashed, off=" + + br.offset + + " len=" + + br.length + + " pos=" + + i + + " ind=" + + (i - br.offset) + ) + ); + } + } + } + chan.sendResponse(new RestResponse(RestStatus.OK, "")); + }; + } + }); + } + } +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java index a1aa211814520..2662ddf7e1440 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java @@ -9,7 +9,6 @@ package org.elasticsearch.http.netty4; -import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.handler.codec.http.DefaultFullHttpRequest; import io.netty.handler.codec.http.EmptyHttpHeaders; @@ -128,39 +127,6 @@ public void release() { } } - @Override - public HttpRequest releaseAndCopy() { - assert released.get() == false; - if (pooled == false) { - return this; - } - try { - final ByteBuf copiedContent = Unpooled.copiedBuffer(request.content()); - HttpBody newContent; - if (content.isStream()) { - newContent = content; - } else { - newContent = Netty4Utils.fullHttpBodyFrom(copiedContent); - } - return new Netty4HttpRequest( - sequence, - new DefaultFullHttpRequest( - request.protocolVersion(), - request.method(), - request.uri(), - copiedContent, - request.headers(), - request.trailingHeaders() - ), - new AtomicBoolean(false), - false, - newContent - ); - } finally { - release(); - } - } - @Override public final Map> getHeaders() { return headers; diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java index 459b6c77be8c3..81b4fd3fbb9ee 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java @@ -179,7 +179,7 @@ public boolean hasReferences() { } public static HttpBody.Full fullHttpBodyFrom(final ByteBuf buf) { - return new HttpBody.ByteRefHttpBody(toBytesReference(buf)); + return new HttpBody.ByteRefHttpBody(toReleasableBytesReference(buf)); } public static Recycler createRecycler(Settings settings) { diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java index ab38b5f0c4c8c..1eb7e13889338 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java @@ -24,9 +24,11 @@ import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.core.Assertions; import org.elasticsearch.core.Booleans; import org.elasticsearch.monitor.jvm.JvmInfo; +import java.util.Arrays; import java.util.concurrent.atomic.AtomicBoolean; public class NettyAllocator { @@ -44,8 +46,9 @@ public class NettyAllocator { private static final String USE_NETTY_DEFAULT_CHUNK = "es.unsafe.use_netty_default_chunk_and_page_size"; static { + ByteBufAllocator allocator; if (Booleans.parseBoolean(System.getProperty(USE_NETTY_DEFAULT), false)) { - ALLOCATOR = ByteBufAllocator.DEFAULT; + allocator = ByteBufAllocator.DEFAULT; SUGGESTED_MAX_ALLOCATION_SIZE = 1024 * 1024; DESCRIPTION = "[name=netty_default, suggested_max_allocation_size=" + ByteSizeValue.ofBytes(SUGGESTED_MAX_ALLOCATION_SIZE) @@ -127,7 +130,12 @@ public class NettyAllocator { + g1gcRegionSize + "}]"; } - ALLOCATOR = new NoDirectBuffers(delegate); + allocator = new NoDirectBuffers(delegate); + } + if (Assertions.ENABLED) { + ALLOCATOR = new TrashingByteBufAllocator(allocator); + } else { + ALLOCATOR = allocator; } RECYCLER = new Recycler<>() { @@ -353,4 +361,105 @@ public ByteBufAllocator getDelegate() { return delegate; } } + + static class TrashingByteBuf extends WrappedByteBuf { + + private boolean trashed = false; + + protected TrashingByteBuf(ByteBuf buf) { + super(buf); + } + + @Override + public boolean release() { + if (refCnt() == 1) { + // see [NOTE on racy trashContent() calls] + trashContent(); + } + return super.release(); + } + + @Override + public boolean release(int decrement) { + if (refCnt() == decrement && refCnt() > 0) { + // see [NOTE on racy trashContent() calls] + trashContent(); + } + return super.release(decrement); + } + + // [NOTE on racy trashContent() calls]: We trash the buffer content _before_ reducing the ref + // count to zero, which looks racy because in principle a concurrent caller could come along + // and successfully retain() this buffer to keep it alive after it's been trashed. Such a + // caller would sometimes get an IllegalReferenceCountException ofc but that's something it + // could handle - see for instance org.elasticsearch.transport.netty4.Netty4Utils.ByteBufRefCounted.tryIncRef. + // Yet in practice this should never happen, we only ever retain() these buffers while we + // know them to be alive (i.e. via RefCounted#mustIncRef or its moral equivalents) so it'd + // be a bug for a caller to retain() a buffer whose ref count is heading to zero and whose + // contents we've already decided to trash. + private void trashContent() { + if (trashed == false) { + trashed = true; + TrashingByteBufAllocator.trashBuffer(buf); + } + } + } + + static class TrashingCompositeByteBuf extends CompositeByteBuf { + + TrashingCompositeByteBuf(ByteBufAllocator alloc, boolean direct, int maxNumComponents) { + super(alloc, direct, maxNumComponents); + } + + @Override + protected void deallocate() { + TrashingByteBufAllocator.trashBuffer(this); + super.deallocate(); + } + } + + static class TrashingByteBufAllocator extends NoDirectBuffers { + + static int DEFAULT_MAX_COMPONENTS = 16; + + static void trashBuffer(ByteBuf buf) { + for (var nioBuf : buf.nioBuffers()) { + if (nioBuf.hasArray()) { + var from = nioBuf.arrayOffset() + nioBuf.position(); + var to = from + nioBuf.remaining(); + Arrays.fill(nioBuf.array(), from, to, (byte) 0); + } + } + } + + TrashingByteBufAllocator(ByteBufAllocator delegate) { + super(delegate); + } + + @Override + public ByteBuf heapBuffer() { + return new TrashingByteBuf(super.heapBuffer()); + } + + @Override + public ByteBuf heapBuffer(int initialCapacity) { + return new TrashingByteBuf(super.heapBuffer(initialCapacity)); + } + + @Override + public ByteBuf heapBuffer(int initialCapacity, int maxCapacity) { + return new TrashingByteBuf(super.heapBuffer(initialCapacity, maxCapacity)); + } + + @Override + public CompositeByteBuf compositeHeapBuffer() { + return new TrashingCompositeByteBuf(this, false, DEFAULT_MAX_COMPONENTS); + } + + @Override + public CompositeByteBuf compositeHeapBuffer(int maxNumComponents) { + return new TrashingCompositeByteBuf(this, false, maxNumComponents); + } + + } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/WrappedByteBuf.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/WrappedByteBuf.java new file mode 100644 index 0000000000000..50841cec000f1 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/WrappedByteBuf.java @@ -0,0 +1,1036 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.transport.netty4; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufAllocator; +import io.netty.util.ByteProcessor; +import io.netty.util.internal.ObjectUtil; +import io.netty.util.internal.StringUtil; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.channels.FileChannel; +import java.nio.channels.GatheringByteChannel; +import java.nio.channels.ScatteringByteChannel; +import java.nio.charset.Charset; + +/** + * A copy of Netty's WrappedByteBuf. + */ +class WrappedByteBuf extends ByteBuf { + + protected final ByteBuf buf; + + protected WrappedByteBuf(ByteBuf buf) { + this.buf = ObjectUtil.checkNotNull(buf, "buf"); + } + + @Override + public final boolean hasMemoryAddress() { + return buf.hasMemoryAddress(); + } + + @Override + public boolean isContiguous() { + return buf.isContiguous(); + } + + @Override + public final long memoryAddress() { + return buf.memoryAddress(); + } + + @Override + public final int capacity() { + return buf.capacity(); + } + + @Override + public ByteBuf capacity(int newCapacity) { + buf.capacity(newCapacity); + return this; + } + + @Override + public final int maxCapacity() { + return buf.maxCapacity(); + } + + @Override + public final ByteBufAllocator alloc() { + return buf.alloc(); + } + + @Override + public final ByteOrder order() { + return buf.order(); + } + + @Override + public ByteBuf order(ByteOrder endianness) { + return buf.order(endianness); + } + + @Override + public final ByteBuf unwrap() { + return buf; + } + + @Override + public ByteBuf asReadOnly() { + return buf.asReadOnly(); + } + + @Override + public boolean isReadOnly() { + return buf.isReadOnly(); + } + + @Override + public final boolean isDirect() { + return buf.isDirect(); + } + + @Override + public final int readerIndex() { + return buf.readerIndex(); + } + + @Override + public final ByteBuf readerIndex(int readerIndex) { + buf.readerIndex(readerIndex); + return this; + } + + @Override + public final int writerIndex() { + return buf.writerIndex(); + } + + @Override + public final ByteBuf writerIndex(int writerIndex) { + buf.writerIndex(writerIndex); + return this; + } + + @Override + public ByteBuf setIndex(int readerIndex, int writerIndex) { + buf.setIndex(readerIndex, writerIndex); + return this; + } + + @Override + public final int readableBytes() { + return buf.readableBytes(); + } + + @Override + public final int writableBytes() { + return buf.writableBytes(); + } + + @Override + public final int maxWritableBytes() { + return buf.maxWritableBytes(); + } + + @Override + public int maxFastWritableBytes() { + return buf.maxFastWritableBytes(); + } + + @Override + public final boolean isReadable() { + return buf.isReadable(); + } + + @Override + public final boolean isWritable() { + return buf.isWritable(); + } + + @Override + public final ByteBuf clear() { + buf.clear(); + return this; + } + + @Override + public final ByteBuf markReaderIndex() { + buf.markReaderIndex(); + return this; + } + + @Override + public final ByteBuf resetReaderIndex() { + buf.resetReaderIndex(); + return this; + } + + @Override + public final ByteBuf markWriterIndex() { + buf.markWriterIndex(); + return this; + } + + @Override + public final ByteBuf resetWriterIndex() { + buf.resetWriterIndex(); + return this; + } + + @Override + public ByteBuf discardReadBytes() { + buf.discardReadBytes(); + return this; + } + + @Override + public ByteBuf discardSomeReadBytes() { + buf.discardSomeReadBytes(); + return this; + } + + @Override + public ByteBuf ensureWritable(int minWritableBytes) { + buf.ensureWritable(minWritableBytes); + return this; + } + + @Override + public int ensureWritable(int minWritableBytes, boolean force) { + return buf.ensureWritable(minWritableBytes, force); + } + + @Override + public boolean getBoolean(int index) { + return buf.getBoolean(index); + } + + @Override + public byte getByte(int index) { + return buf.getByte(index); + } + + @Override + public short getUnsignedByte(int index) { + return buf.getUnsignedByte(index); + } + + @Override + public short getShort(int index) { + return buf.getShort(index); + } + + @Override + public short getShortLE(int index) { + return buf.getShortLE(index); + } + + @Override + public int getUnsignedShort(int index) { + return buf.getUnsignedShort(index); + } + + @Override + public int getUnsignedShortLE(int index) { + return buf.getUnsignedShortLE(index); + } + + @Override + public int getMedium(int index) { + return buf.getMedium(index); + } + + @Override + public int getMediumLE(int index) { + return buf.getMediumLE(index); + } + + @Override + public int getUnsignedMedium(int index) { + return buf.getUnsignedMedium(index); + } + + @Override + public int getUnsignedMediumLE(int index) { + return buf.getUnsignedMediumLE(index); + } + + @Override + public int getInt(int index) { + return buf.getInt(index); + } + + @Override + public int getIntLE(int index) { + return buf.getIntLE(index); + } + + @Override + public long getUnsignedInt(int index) { + return buf.getUnsignedInt(index); + } + + @Override + public long getUnsignedIntLE(int index) { + return buf.getUnsignedIntLE(index); + } + + @Override + public long getLong(int index) { + return buf.getLong(index); + } + + @Override + public long getLongLE(int index) { + return buf.getLongLE(index); + } + + @Override + public char getChar(int index) { + return buf.getChar(index); + } + + @Override + public float getFloat(int index) { + return buf.getFloat(index); + } + + @Override + public double getDouble(int index) { + return buf.getDouble(index); + } + + @Override + public ByteBuf getBytes(int index, ByteBuf dst) { + buf.getBytes(index, dst); + return this; + } + + @Override + public ByteBuf getBytes(int index, ByteBuf dst, int length) { + buf.getBytes(index, dst, length); + return this; + } + + @Override + public ByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length) { + buf.getBytes(index, dst, dstIndex, length); + return this; + } + + @Override + public ByteBuf getBytes(int index, byte[] dst) { + buf.getBytes(index, dst); + return this; + } + + @Override + public ByteBuf getBytes(int index, byte[] dst, int dstIndex, int length) { + buf.getBytes(index, dst, dstIndex, length); + return this; + } + + @Override + public ByteBuf getBytes(int index, ByteBuffer dst) { + buf.getBytes(index, dst); + return this; + } + + @Override + public ByteBuf getBytes(int index, OutputStream out, int length) throws IOException { + buf.getBytes(index, out, length); + return this; + } + + @Override + public int getBytes(int index, GatheringByteChannel out, int length) throws IOException { + return buf.getBytes(index, out, length); + } + + @Override + public int getBytes(int index, FileChannel out, long position, int length) throws IOException { + return buf.getBytes(index, out, position, length); + } + + @Override + public CharSequence getCharSequence(int index, int length, Charset charset) { + return buf.getCharSequence(index, length, charset); + } + + @Override + public ByteBuf setBoolean(int index, boolean value) { + buf.setBoolean(index, value); + return this; + } + + @Override + public ByteBuf setByte(int index, int value) { + buf.setByte(index, value); + return this; + } + + @Override + public ByteBuf setShort(int index, int value) { + buf.setShort(index, value); + return this; + } + + @Override + public ByteBuf setShortLE(int index, int value) { + buf.setShortLE(index, value); + return this; + } + + @Override + public ByteBuf setMedium(int index, int value) { + buf.setMedium(index, value); + return this; + } + + @Override + public ByteBuf setMediumLE(int index, int value) { + buf.setMediumLE(index, value); + return this; + } + + @Override + public ByteBuf setInt(int index, int value) { + buf.setInt(index, value); + return this; + } + + @Override + public ByteBuf setIntLE(int index, int value) { + buf.setIntLE(index, value); + return this; + } + + @Override + public ByteBuf setLong(int index, long value) { + buf.setLong(index, value); + return this; + } + + @Override + public ByteBuf setLongLE(int index, long value) { + buf.setLongLE(index, value); + return this; + } + + @Override + public ByteBuf setChar(int index, int value) { + buf.setChar(index, value); + return this; + } + + @Override + public ByteBuf setFloat(int index, float value) { + buf.setFloat(index, value); + return this; + } + + @Override + public ByteBuf setDouble(int index, double value) { + buf.setDouble(index, value); + return this; + } + + @Override + public ByteBuf setBytes(int index, ByteBuf src) { + buf.setBytes(index, src); + return this; + } + + @Override + public ByteBuf setBytes(int index, ByteBuf src, int length) { + buf.setBytes(index, src, length); + return this; + } + + @Override + public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) { + buf.setBytes(index, src, srcIndex, length); + return this; + } + + @Override + public ByteBuf setBytes(int index, byte[] src) { + buf.setBytes(index, src); + return this; + } + + @Override + public ByteBuf setBytes(int index, byte[] src, int srcIndex, int length) { + buf.setBytes(index, src, srcIndex, length); + return this; + } + + @Override + public ByteBuf setBytes(int index, ByteBuffer src) { + buf.setBytes(index, src); + return this; + } + + @Override + public int setBytes(int index, InputStream in, int length) throws IOException { + return buf.setBytes(index, in, length); + } + + @Override + public int setBytes(int index, ScatteringByteChannel in, int length) throws IOException { + return buf.setBytes(index, in, length); + } + + @Override + public int setBytes(int index, FileChannel in, long position, int length) throws IOException { + return buf.setBytes(index, in, position, length); + } + + @Override + public ByteBuf setZero(int index, int length) { + buf.setZero(index, length); + return this; + } + + @Override + public int setCharSequence(int index, CharSequence sequence, Charset charset) { + return buf.setCharSequence(index, sequence, charset); + } + + @Override + public boolean readBoolean() { + return buf.readBoolean(); + } + + @Override + public byte readByte() { + return buf.readByte(); + } + + @Override + public short readUnsignedByte() { + return buf.readUnsignedByte(); + } + + @Override + public short readShort() { + return buf.readShort(); + } + + @Override + public short readShortLE() { + return buf.readShortLE(); + } + + @Override + public int readUnsignedShort() { + return buf.readUnsignedShort(); + } + + @Override + public int readUnsignedShortLE() { + return buf.readUnsignedShortLE(); + } + + @Override + public int readMedium() { + return buf.readMedium(); + } + + @Override + public int readMediumLE() { + return buf.readMediumLE(); + } + + @Override + public int readUnsignedMedium() { + return buf.readUnsignedMedium(); + } + + @Override + public int readUnsignedMediumLE() { + return buf.readUnsignedMediumLE(); + } + + @Override + public int readInt() { + return buf.readInt(); + } + + @Override + public int readIntLE() { + return buf.readIntLE(); + } + + @Override + public long readUnsignedInt() { + return buf.readUnsignedInt(); + } + + @Override + public long readUnsignedIntLE() { + return buf.readUnsignedIntLE(); + } + + @Override + public long readLong() { + return buf.readLong(); + } + + @Override + public long readLongLE() { + return buf.readLongLE(); + } + + @Override + public char readChar() { + return buf.readChar(); + } + + @Override + public float readFloat() { + return buf.readFloat(); + } + + @Override + public double readDouble() { + return buf.readDouble(); + } + + @Override + public ByteBuf readBytes(int length) { + return buf.readBytes(length); + } + + @Override + public ByteBuf readSlice(int length) { + return buf.readSlice(length); + } + + @Override + public ByteBuf readRetainedSlice(int length) { + return buf.readRetainedSlice(length); + } + + @Override + public ByteBuf readBytes(ByteBuf dst) { + buf.readBytes(dst); + return this; + } + + @Override + public ByteBuf readBytes(ByteBuf dst, int length) { + buf.readBytes(dst, length); + return this; + } + + @Override + public ByteBuf readBytes(ByteBuf dst, int dstIndex, int length) { + buf.readBytes(dst, dstIndex, length); + return this; + } + + @Override + public ByteBuf readBytes(byte[] dst) { + buf.readBytes(dst); + return this; + } + + @Override + public ByteBuf readBytes(byte[] dst, int dstIndex, int length) { + buf.readBytes(dst, dstIndex, length); + return this; + } + + @Override + public ByteBuf readBytes(ByteBuffer dst) { + buf.readBytes(dst); + return this; + } + + @Override + public ByteBuf readBytes(OutputStream out, int length) throws IOException { + buf.readBytes(out, length); + return this; + } + + @Override + public int readBytes(GatheringByteChannel out, int length) throws IOException { + return buf.readBytes(out, length); + } + + @Override + public int readBytes(FileChannel out, long position, int length) throws IOException { + return buf.readBytes(out, position, length); + } + + @Override + public CharSequence readCharSequence(int length, Charset charset) { + return buf.readCharSequence(length, charset); + } + + @Override + public ByteBuf skipBytes(int length) { + buf.skipBytes(length); + return this; + } + + @Override + public ByteBuf writeBoolean(boolean value) { + buf.writeBoolean(value); + return this; + } + + @Override + public ByteBuf writeByte(int value) { + buf.writeByte(value); + return this; + } + + @Override + public ByteBuf writeShort(int value) { + buf.writeShort(value); + return this; + } + + @Override + public ByteBuf writeShortLE(int value) { + buf.writeShortLE(value); + return this; + } + + @Override + public ByteBuf writeMedium(int value) { + buf.writeMedium(value); + return this; + } + + @Override + public ByteBuf writeMediumLE(int value) { + buf.writeMediumLE(value); + return this; + } + + @Override + public ByteBuf writeInt(int value) { + buf.writeInt(value); + return this; + } + + @Override + public ByteBuf writeIntLE(int value) { + buf.writeIntLE(value); + return this; + } + + @Override + public ByteBuf writeLong(long value) { + buf.writeLong(value); + return this; + } + + @Override + public ByteBuf writeLongLE(long value) { + buf.writeLongLE(value); + return this; + } + + @Override + public ByteBuf writeChar(int value) { + buf.writeChar(value); + return this; + } + + @Override + public ByteBuf writeFloat(float value) { + buf.writeFloat(value); + return this; + } + + @Override + public ByteBuf writeDouble(double value) { + buf.writeDouble(value); + return this; + } + + @Override + public ByteBuf writeBytes(ByteBuf src) { + buf.writeBytes(src); + return this; + } + + @Override + public ByteBuf writeBytes(ByteBuf src, int length) { + buf.writeBytes(src, length); + return this; + } + + @Override + public ByteBuf writeBytes(ByteBuf src, int srcIndex, int length) { + buf.writeBytes(src, srcIndex, length); + return this; + } + + @Override + public ByteBuf writeBytes(byte[] src) { + buf.writeBytes(src); + return this; + } + + @Override + public ByteBuf writeBytes(byte[] src, int srcIndex, int length) { + buf.writeBytes(src, srcIndex, length); + return this; + } + + @Override + public ByteBuf writeBytes(ByteBuffer src) { + buf.writeBytes(src); + return this; + } + + @Override + public int writeBytes(InputStream in, int length) throws IOException { + return buf.writeBytes(in, length); + } + + @Override + public int writeBytes(ScatteringByteChannel in, int length) throws IOException { + return buf.writeBytes(in, length); + } + + @Override + public int writeBytes(FileChannel in, long position, int length) throws IOException { + return buf.writeBytes(in, position, length); + } + + @Override + public ByteBuf writeZero(int length) { + buf.writeZero(length); + return this; + } + + @Override + public int writeCharSequence(CharSequence sequence, Charset charset) { + return buf.writeCharSequence(sequence, charset); + } + + @Override + public int indexOf(int fromIndex, int toIndex, byte value) { + return buf.indexOf(fromIndex, toIndex, value); + } + + @Override + public int bytesBefore(byte value) { + return buf.bytesBefore(value); + } + + @Override + public int bytesBefore(int length, byte value) { + return buf.bytesBefore(length, value); + } + + @Override + public int bytesBefore(int index, int length, byte value) { + return buf.bytesBefore(index, length, value); + } + + @Override + public int forEachByte(ByteProcessor processor) { + return buf.forEachByte(processor); + } + + @Override + public int forEachByte(int index, int length, ByteProcessor processor) { + return buf.forEachByte(index, length, processor); + } + + @Override + public int forEachByteDesc(ByteProcessor processor) { + return buf.forEachByteDesc(processor); + } + + @Override + public int forEachByteDesc(int index, int length, ByteProcessor processor) { + return buf.forEachByteDesc(index, length, processor); + } + + @Override + public ByteBuf copy() { + return buf.copy(); + } + + @Override + public ByteBuf copy(int index, int length) { + return buf.copy(index, length); + } + + @Override + public ByteBuf slice() { + return buf.slice(); + } + + @Override + public ByteBuf retainedSlice() { + return buf.retainedSlice(); + } + + @Override + public ByteBuf slice(int index, int length) { + return buf.slice(index, length); + } + + @Override + public ByteBuf retainedSlice(int index, int length) { + return buf.retainedSlice(index, length); + } + + @Override + public ByteBuf duplicate() { + return buf.duplicate(); + } + + @Override + public ByteBuf retainedDuplicate() { + return buf.retainedDuplicate(); + } + + @Override + public int nioBufferCount() { + return buf.nioBufferCount(); + } + + @Override + public ByteBuffer nioBuffer() { + return buf.nioBuffer(); + } + + @Override + public ByteBuffer nioBuffer(int index, int length) { + return buf.nioBuffer(index, length); + } + + @Override + public ByteBuffer[] nioBuffers() { + return buf.nioBuffers(); + } + + @Override + public ByteBuffer[] nioBuffers(int index, int length) { + return buf.nioBuffers(index, length); + } + + @Override + public ByteBuffer internalNioBuffer(int index, int length) { + return buf.internalNioBuffer(index, length); + } + + @Override + public boolean hasArray() { + return buf.hasArray(); + } + + @Override + public byte[] array() { + return buf.array(); + } + + @Override + public int arrayOffset() { + return buf.arrayOffset(); + } + + @Override + public String toString(Charset charset) { + return buf.toString(charset); + } + + @Override + public String toString(int index, int length, Charset charset) { + return buf.toString(index, length, charset); + } + + @Override + public int hashCode() { + return buf.hashCode(); + } + + @Override + @SuppressWarnings("EqualsWhichDoesntCheckParameterClass") + public boolean equals(Object obj) { + return buf.equals(obj); + } + + @Override + public int compareTo(ByteBuf buffer) { + return buf.compareTo(buffer); + } + + @Override + public String toString() { + return StringUtil.simpleClassName(this) + '(' + buf.toString() + ')'; + } + + @Override + public ByteBuf retain(int increment) { + buf.retain(increment); + return this; + } + + @Override + public ByteBuf retain() { + buf.retain(); + return this; + } + + @Override + public ByteBuf touch() { + buf.touch(); + return this; + } + + @Override + public ByteBuf touch(Object hint) { + buf.touch(hint); + return this; + } + + @Override + public final boolean isReadable(int size) { + return buf.isReadable(size); + } + + @Override + public final boolean isWritable(int size) { + return buf.isWritable(size); + } + + @Override + public final int refCnt() { + return buf.refCnt(); + } + + @Override + public boolean release() { + return buf.release(); + } + + @Override + public boolean release(int decrement) { + return buf.release(decrement); + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyAllocatorTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyAllocatorTests.java new file mode 100644 index 0000000000000..a76eb9fa4875b --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyAllocatorTests.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.transport.netty4; + +import io.netty.buffer.ByteBufAllocator; +import io.netty.buffer.Unpooled; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.List; + +import static org.elasticsearch.transport.netty4.NettyAllocator.TrashingByteBuf; +import static org.elasticsearch.transport.netty4.NettyAllocator.TrashingByteBufAllocator; + +public class NettyAllocatorTests extends ESTestCase { + + static void assertBufferTrashed(BytesReference bytesRef) throws IOException { + var iter = bytesRef.iterator(); + BytesRef br; + while ((br = iter.next()) != null) { + for (var i = br.offset; i < br.offset + br.length; i++) { + assertEquals("off=" + br.offset + " len=" + br.length + " i=" + i, 0, br.bytes[i]); + } + } + } + + public void testTrashArrayByteBuf() { + var arr = randomByteArrayOfLength(between(1024, 2048)); + var buf = Unpooled.wrappedBuffer(arr); + var tBuf = new TrashingByteBuf(buf); + tBuf.release(); + var emptyArr = new byte[arr.length]; + assertArrayEquals(emptyArr, arr); + } + + public void testNioBufsTrashingByteBuf() { + var arrCnt = between(1, 16); + var byteArrs = new byte[arrCnt][]; + var byteBufs = new ByteBuffer[arrCnt]; + for (var i = 0; i < arrCnt; i++) { + byteArrs[i] = randomByteArrayOfLength(between(1024, 2048)); + byteBufs[i] = ByteBuffer.wrap(byteArrs[i]); + } + var buf = Unpooled.wrappedBuffer(byteBufs); + var tBuf = new TrashingByteBuf(buf); + tBuf.release(); + for (int i = 0; i < arrCnt; i++) { + for (int j = 0; j < byteArrs[i].length; j++) { + assertEquals(0, byteArrs[i][j]); + } + } + } + + public void testNioBufOffsetTrashingByteBuf() { + var arr = randomByteArrayOfLength(1024); + var off = 1; + var len = arr.length - 2; + arr[0] = 1; + arr[arr.length - 1] = 1; + var buf = Unpooled.wrappedBuffer(arr, off, len); + var tBuf = new TrashingByteBuf(buf); + tBuf.release(); + assertEquals(1, arr[0]); + assertEquals(1, arr[arr.length - 1]); + for (int i = 1; i < arr.length - 1; i++) { + assertEquals("at index " + i, 0, arr[i]); + } + } + + public void testTrashingByteBufAllocator() throws IOException { + var alloc = new TrashingByteBufAllocator(ByteBufAllocator.DEFAULT); + var size = between(1024 * 1024, 10 * 1024 * 1024); + + // use 3 different heap allocation methods + for (var buf : List.of(alloc.heapBuffer(), alloc.heapBuffer(1024), alloc.heapBuffer(1024, size))) { + buf.writeBytes(randomByteArrayOfLength(size)); + var bytesRef = Netty4Utils.toBytesReference(buf); + buf.release(); + assertBufferTrashed(bytesRef); + } + } + + public void testTrashingCompositeByteBuf() throws IOException { + var alloc = new TrashingByteBufAllocator(ByteBufAllocator.DEFAULT); + var compBuf = alloc.compositeHeapBuffer(); + for (var i = 0; i < between(1, 10); i++) { + var buf = alloc.heapBuffer().writeBytes(randomByteArrayOfLength(between(1024, 8192))); + compBuf.addComponent(true, buf); + } + var bytesRef = Netty4Utils.toBytesReference(compBuf); + compBuf.release(); + assertBufferTrashed(bytesRef); + } + +} diff --git a/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java b/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java index ddcfc1ea7eed8..51e6512072e41 100644 --- a/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java +++ b/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java @@ -74,6 +74,29 @@ static ByteBuffer[] toByteBuffers(BytesReference reference) { } } + /** + * Allocates new buffer and copy bytes from given BytesReference. + * + * @deprecated copying bytes is a right place for performance regression and unnecessary allocations. + * This method exists to serve very few places that struggle to handle reference counted buffers. + */ + @Deprecated(forRemoval = true) + static BytesReference copyBytes(BytesReference bytesReference) { + byte[] arr = new byte[bytesReference.length()]; + int offset = 0; + final BytesRefIterator iterator = bytesReference.iterator(); + try { + BytesRef slice; + while ((slice = iterator.next()) != null) { + System.arraycopy(slice.bytes, slice.offset, arr, offset, slice.length); + offset += slice.length; + } + return new BytesArray(arr); + } catch (IOException e) { + throw new AssertionError(e); + } + } + /** * Returns BytesReference composed of the provided ByteBuffers. */ diff --git a/server/src/main/java/org/elasticsearch/http/HttpBody.java b/server/src/main/java/org/elasticsearch/http/HttpBody.java index a10487502ed3c..6571125677fab 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpBody.java +++ b/server/src/main/java/org/elasticsearch/http/HttpBody.java @@ -9,7 +9,6 @@ package org.elasticsearch.http; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.core.Nullable; @@ -21,11 +20,11 @@ public sealed interface HttpBody extends Releasable permits HttpBody.Full, HttpBody.Stream { static Full fromBytesReference(BytesReference bytesRef) { - return new ByteRefHttpBody(bytesRef); + return new ByteRefHttpBody(ReleasableBytesReference.wrap(bytesRef)); } static Full empty() { - return new ByteRefHttpBody(BytesArray.EMPTY); + return new ByteRefHttpBody(ReleasableBytesReference.empty()); } default boolean isFull() { @@ -56,7 +55,7 @@ default Stream asStream() { * Full content represents a complete http body content that can be accessed immediately. */ non-sealed interface Full extends HttpBody { - BytesReference bytes(); + ReleasableBytesReference bytes(); @Override default void close() {} @@ -114,5 +113,5 @@ interface ChunkHandler extends Releasable { default void close() {} } - record ByteRefHttpBody(BytesReference bytes) implements Full {} + record ByteRefHttpBody(ReleasableBytesReference bytes) implements Full {} } diff --git a/server/src/main/java/org/elasticsearch/http/HttpRequest.java b/server/src/main/java/org/elasticsearch/http/HttpRequest.java index ca6e51f2cec08..b4b1bb84433c9 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpRequest.java +++ b/server/src/main/java/org/elasticsearch/http/HttpRequest.java @@ -52,10 +52,4 @@ enum HttpVersion { */ void release(); - /** - * If this instances uses any pooled resources, creates a copy of this instance that does not use any pooled resources and releases - * any resources associated with this instance. If the instance does not use any shared resources, returns itself. - * @return a safe unpooled http request - */ - HttpRequest releaseAndCopy(); } diff --git a/server/src/main/java/org/elasticsearch/http/HttpTracer.java b/server/src/main/java/org/elasticsearch/http/HttpTracer.java index 3d8360e6ee3fa..d6daf11c0539a 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpTracer.java +++ b/server/src/main/java/org/elasticsearch/http/HttpTracer.java @@ -94,7 +94,7 @@ HttpTracer maybeLogRequest(RestRequest restRequest, @Nullable Exception e) { private void logFullContent(RestRequest restRequest) { try (var stream = HttpBodyTracer.getBodyOutputStream(restRequest.getRequestId(), HttpBodyTracer.Type.REQUEST)) { - restRequest.content().writeTo(stream); + restRequest.releasableContent().writeTo(stream); } catch (Exception e2) { assert false : e2; // no real IO here } diff --git a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index f1b59ed14cefb..4564a37dacf4a 100644 --- a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -122,6 +122,7 @@ public final void handleRequest(RestRequest request, RestChannel channel, NodeCl ); } + usageCount.increment(); if (request.isStreamedContent()) { assert action instanceof RequestBodyChunkConsumer; var chunkConsumer = (RequestBodyChunkConsumer) action; @@ -137,11 +138,11 @@ public void close() { chunkConsumer.streamClose(); } }); + action.accept(channel); + } else { + action.accept(channel); + request.getHttpRequest().release(); } - - usageCount.increment(); - // execute the action - action.accept(channel); } } diff --git a/server/src/main/java/org/elasticsearch/rest/FilterRestHandler.java b/server/src/main/java/org/elasticsearch/rest/FilterRestHandler.java index cb5155cb0de0b..21a44ac9af5c8 100644 --- a/server/src/main/java/org/elasticsearch/rest/FilterRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/FilterRestHandler.java @@ -43,11 +43,6 @@ public boolean canTripCircuitBreaker() { return delegate.canTripCircuitBreaker(); } - @Override - public boolean allowsUnsafeBuffers() { - return delegate.allowsUnsafeBuffers(); - } - @Override public boolean supportsBulkContent() { return delegate.supportsBulkContent(); diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 7446ec5bb6717..49fe794bbe615 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -432,10 +432,6 @@ private void dispatchRequest( } // iff we could reserve bytes for the request we need to send the response also over this channel responseChannel = new ResourceHandlingHttpChannel(channel, circuitBreakerService, contentLength, methodHandlers); - // TODO: Count requests double in the circuit breaker if they need copying? - if (handler.allowsUnsafeBuffers() == false) { - request.ensureSafeBuffers(); - } if (handler.allowSystemIndexAccessByDefault() == false) { // The ELASTIC_PRODUCT_ORIGIN_HTTP_HEADER indicates that the request is coming from an Elastic product and diff --git a/server/src/main/java/org/elasticsearch/rest/RestHandler.java b/server/src/main/java/org/elasticsearch/rest/RestHandler.java index cf66e402d3691..572e92e369a63 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/RestHandler.java @@ -69,18 +69,6 @@ default Scope getServerlessScope() { return serverlessScope == null ? null : serverlessScope.value(); } - /** - * Indicates if the RestHandler supports working with pooled buffers. If the request handler will not escape the return - * {@link RestRequest#content()} or any buffers extracted from it then there is no need to make a copies of any pooled buffers in the - * {@link RestRequest} instance before passing a request to this handler. If this instance does not support pooled/unsafe buffers - * {@link RestRequest#ensureSafeBuffers()} should be called on any request before passing it to {@link #handleRequest}. - * - * @return true iff the handler supports requests that make use of pooled buffers - */ - default boolean allowsUnsafeBuffers() { - return false; - } - /** * The list of {@link Route}s that this RestHandler is responsible for handling. */ diff --git a/server/src/main/java/org/elasticsearch/rest/RestRequest.java b/server/src/main/java/org/elasticsearch/rest/RestRequest.java index 17eda305b5ccf..17d85a8eabb1c 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestRequest.java +++ b/server/src/main/java/org/elasticsearch/rest/RestRequest.java @@ -16,17 +16,21 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.http.HttpBody; import org.elasticsearch.http.HttpChannel; import org.elasticsearch.http.HttpRequest; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.telemetry.tracing.Traceable; import org.elasticsearch.xcontent.ParsedMediaType; import org.elasticsearch.xcontent.ToXContent; @@ -51,6 +55,8 @@ public class RestRequest implements ToXContent.Params, Traceable { + private static final Logger logger = LogManager.getLogger(RestRequest.class); + /** * Internal marker request parameter to indicate that a request was made in serverless mode. Use this parameter, together with * {@link #OPERATOR_REQUEST} if you need to toggle behavior for serverless, for example to enforce partial API restrictions @@ -187,15 +193,6 @@ protected RestRequest(RestRequest other) { } } - /** - * Invoke {@link HttpRequest#releaseAndCopy()} on the http request in this instance and replace a pooled http request - * with an unpooled copy. This is supposed to be used before passing requests to {@link RestHandler} instances that can not safely - * handle http requests that use pooled buffers as determined by {@link RestHandler#allowsUnsafeBuffers()}. - */ - void ensureSafeBuffers() { - httpRequest = httpRequest.releaseAndCopy(); - } - /** * Creates a new REST request. * @@ -306,9 +303,31 @@ public boolean isFullContent() { return httpRequest.body().isFull(); } + /** + * Returns a copy of HTTP content. The copy is GC-managed and does not require reference counting. + * Please use {@link #releasableContent()} to avoid content copy. + */ + @SuppressForbidden(reason = "temporarily support content copy while migrating RestHandlers to ref counted pooled buffers") public BytesReference content() { + return BytesReference.copyBytes(releasableContent()); + } + + /** + * Returns a direct reference to the network buffer containing the request body. The HTTP layers will release their references to this + * buffer as soon as they have finished the synchronous steps of processing the request on the network thread, which will by default + * release the buffer back to the pool where it may be re-used for another request. If you need to keep the buffer alive past the end of + * these synchronous steps, acquire your own reference to this buffer and release it once it's no longer needed. + */ + public ReleasableBytesReference releasableContent() { this.contentConsumed = true; - return httpRequest.body().asFull().bytes(); + var bytes = httpRequest.body().asFull().bytes(); + if (bytes.hasReferences() == false) { + var e = new IllegalStateException("http releasable content accessed after release"); + logger.error(e.getMessage(), e); + assert false : e; + throw e; + } + return bytes; } public boolean isStreamedContent() { @@ -319,18 +338,32 @@ public HttpBody.Stream contentStream() { return httpRequest.body().asStream(); } - /** - * @return content of the request body or throw an exception if the body or content type is missing - */ - public final BytesReference requiredContent() { + private void ensureContent() { if (hasContent() == false) { throw new ElasticsearchParseException("request body is required"); } else if (xContentType.get() == null) { throwValidationException("unknown content type"); } + } + + /** + * @return copy of the request body or throw an exception if the body or content type is missing. + * See {@link #content()}. Please use {@link #requiredReleasableContent()} to avoid content copy. + */ + public final BytesReference requiredContent() { + ensureContent(); return content(); } + /** + * Returns reference to the network buffer of HTTP content or throw an exception if the body or content type is missing. + * See {@link #releasableContent()}. It's a recommended method to handle HTTP content without copying it. + */ + public ReleasableBytesReference requiredReleasableContent() { + ensureContent(); + return releasableContent(); + } + private static void throwValidationException(String msg) { ValidationException unknownContentType = new ValidationException(); unknownContentType.addValidationError(msg); diff --git a/server/src/main/java/org/elasticsearch/rest/RestRequestFilter.java b/server/src/main/java/org/elasticsearch/rest/RestRequestFilter.java index e4105363e1bce..57b4d2990c8e0 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestRequestFilter.java +++ b/server/src/main/java/org/elasticsearch/rest/RestRequestFilter.java @@ -12,6 +12,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Tuple; @@ -44,10 +45,10 @@ public boolean hasContent() { } @Override - public BytesReference content() { + public ReleasableBytesReference releasableContent() { if (filteredBytes == null) { Tuple> result = XContentHelper.convertToMap( - restRequest.requiredContent(), + restRequest.requiredReleasableContent(), true, restRequest.getXContentType() ); @@ -63,7 +64,7 @@ public BytesReference content() { throw new ElasticsearchException("failed to parse request", e); } } - return filteredBytes; + return ReleasableBytesReference.wrap(filteredBytes); } }; } else { diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java index de3fd390ec86d..9428ef5390b2f 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java @@ -10,6 +10,7 @@ package org.elasticsearch.rest.action.document; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestParser; @@ -102,9 +103,11 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC boolean defaultRequireDataStream = request.paramAsBoolean(DocWriteRequest.REQUIRE_DATA_STREAM, false); bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT)); bulkRequest.setRefreshPolicy(request.param("refresh")); + ReleasableBytesReference content = request.requiredReleasableContent(); + try { bulkRequest.add( - request.requiredContent(), + content, defaultIndex, defaultRouting, defaultFetchSourceContext, @@ -119,8 +122,10 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC } catch (Exception e) { return channel -> new RestToXContentListener<>(channel).onFailure(parseFailureException(e)); } - - return channel -> client.bulk(bulkRequest, new RestRefCountedChunkedToXContentListener<>(channel)); + return channel -> { + content.mustIncRef(); + client.bulk(bulkRequest, ActionListener.releaseAfter(new RestRefCountedChunkedToXContentListener<>(channel), content)); + }; } else { String waitForActiveShards = request.param("wait_for_active_shards"); TimeValue timeout = request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT); @@ -270,11 +275,6 @@ public boolean supportsBulkContent() { return true; } - @Override - public boolean allowsUnsafeBuffers() { - return true; - } - @Override public Set supportedCapabilities() { return capabilities; diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java index c2437dcb96fa6..d81ac03492d59 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java @@ -9,12 +9,14 @@ package org.elasticsearch.rest.action.document; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -104,11 +106,12 @@ public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + ReleasableBytesReference source = request.requiredReleasableContent(); IndexRequest indexRequest = new IndexRequest(request.param("index")); indexRequest.id(request.param("id")); indexRequest.routing(request.param("routing")); indexRequest.setPipeline(request.param("pipeline")); - indexRequest.source(request.requiredContent(), request.getXContentType()); + indexRequest.source(source, request.getXContentType()); indexRequest.timeout(request.paramAsTime("timeout", IndexRequest.DEFAULT_TIMEOUT)); indexRequest.setRefreshPolicy(request.param("refresh")); indexRequest.version(RestActions.parseVersion(request)); @@ -126,10 +129,16 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC indexRequest.opType(sOpType); } - return channel -> client.index( - indexRequest, - new RestToXContentListener<>(channel, DocWriteResponse::status, r -> r.getLocation(indexRequest.routing())) - ); + return channel -> { + source.mustIncRef(); + client.index( + indexRequest, + ActionListener.releaseAfter( + new RestToXContentListener<>(channel, DocWriteResponse::status, r -> r.getLocation(indexRequest.routing())), + source + ) + ); + }; } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 80a85d3b9b748..ff062084a3cbb 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -423,8 +423,4 @@ protected Set responseParams() { return RESPONSE_PARAMS; } - @Override - public boolean allowsUnsafeBuffers() { - return true; - } } diff --git a/server/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java b/server/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java index ad298e7aa8307..3fd8535cd5c27 100644 --- a/server/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java +++ b/server/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java @@ -107,4 +107,10 @@ public void testGetDoubleLE() { Exception e = expectThrows(ArrayIndexOutOfBoundsException.class, () -> ref.getDoubleLE(9)); assertThat(e.getMessage(), equalTo("Index 9 out of bounds for length 9")); } + + public void testCopyBytes() { + var data = randomByteArrayOfLength(between(1024, 1024 * 1024 * 50)); + var copy = BytesReference.copyBytes(new BytesArray(data)); + assertArrayEquals(data, BytesReference.toBytes(copy)); + } } diff --git a/server/src/test/java/org/elasticsearch/http/TestHttpRequest.java b/server/src/test/java/org/elasticsearch/http/TestHttpRequest.java index 8cd61453a3391..27dc0be673abb 100644 --- a/server/src/test/java/org/elasticsearch/http/TestHttpRequest.java +++ b/server/src/test/java/org/elasticsearch/http/TestHttpRequest.java @@ -85,11 +85,6 @@ public HttpResponse createResponse(RestStatus status, ChunkedRestResponseBodyPar @Override public void release() {} - @Override - public HttpRequest releaseAndCopy() { - return this; - } - @Override public Exception getInboundException() { return null; diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index b7d38f6f299c7..2fdb3daa26da4 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -906,11 +906,6 @@ public HttpResponse createResponse(RestStatus status, ChunkedRestResponseBodyPar @Override public void release() {} - @Override - public HttpRequest releaseAndCopy() { - return this; - } - @Override public Exception getInboundException() { return null; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java index 9ddcf39d24d98..0c466b9162eb8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java @@ -138,11 +138,6 @@ public HttpResponse createResponse(RestStatus status, ChunkedRestResponseBodyPar @Override public void release() {} - @Override - public HttpRequest releaseAndCopy() { - return this; - } - @Override public Exception getInboundException() { return inboundException; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java index aa200f7ae9acb..214f9150dfcc5 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java @@ -32,7 +32,7 @@ protected final BaseRestHandler.RestChannelConsumer prepareRequest(RestRequest r // We need to consume parameters and content from the REST request in order to bypass unrecognized param errors // and return a license error. request.params().keySet().forEach(key -> request.param(key, "")); - request.content(); + request.releasableContent(); return channel -> channel.sendResponse( new RestResponse(channel, LicenseUtils.newComplianceException(this.licenseState, this.product)) ); diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestPutPipelineAction.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestPutPipelineAction.java index a9992e168bc66..2ea56b147bf9c 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestPutPipelineAction.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestPutPipelineAction.java @@ -49,7 +49,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli } return restChannel -> { - final String content = request.content().utf8ToString(); + final String content = request.releasableContent().utf8ToString(); client.execute( PutPipelineAction.INSTANCE, new PutPipelineRequest(id, content, request.getXContentType()), diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditUtil.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditUtil.java index 13e3e40887d89..429b632cdac18 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditUtil.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditUtil.java @@ -27,10 +27,11 @@ public class AuditUtil { public static String restRequestContent(RestRequest request) { if (request.hasContent()) { + var content = request.releasableContent(); try { - return XContentHelper.convertToJson(request.content(), false, false, request.getXContentType()); + return XContentHelper.convertToJson(content, false, false, request.getXContentType()); } catch (IOException ioe) { - return "Invalid Format: " + request.content().utf8ToString(); + return "Invalid Format: " + content.utf8ToString(); } } return ""; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java index f0405e42f1f22..df21f5d4eeb0b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java @@ -75,7 +75,7 @@ protected final RestChannelConsumer prepareRequest(RestRequest request, NodeClie return innerPrepareRequest(request, client); } else { request.params().keySet().forEach(key -> request.param(key, "")); - request.content(); + request.releasableContent(); // mark content consumed return channel -> channel.sendResponse(new RestResponse(channel, failedFeature)); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java index 5adc1e351931d..3be40c280874d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java @@ -2614,7 +2614,7 @@ public void testAuthenticationSuccessRest() throws Exception { checkedFields.put(LoggingAuditTrail.REQUEST_METHOD_FIELD_NAME, request.method().toString()); checkedFields.put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); checkedFields.put(LoggingAuditTrail.URL_PATH_FIELD_NAME, "_uri"); - if (includeRequestBody && Strings.hasLength(request.content())) { + if (includeRequestBody && request.hasContent()) { checkedFields.put(LoggingAuditTrail.REQUEST_BODY_FIELD_NAME, request.content().utf8ToString()); } if (params.isEmpty() == false) { @@ -2643,8 +2643,8 @@ public void testAuthenticationSuccessRest() throws Exception { checkedFields.put(LoggingAuditTrail.REQUEST_METHOD_FIELD_NAME, request.method().toString()); checkedFields.put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); checkedFields.put(LoggingAuditTrail.URL_PATH_FIELD_NAME, "_uri"); - if (includeRequestBody && Strings.hasLength(request.content())) { - checkedFields.put(LoggingAuditTrail.REQUEST_BODY_FIELD_NAME, request.getHttpRequest().body().asFull().bytes().utf8ToString()); + if (includeRequestBody && request.hasContent()) { + checkedFields.put(LoggingAuditTrail.REQUEST_BODY_FIELD_NAME, request.content().utf8ToString()); } if (params.isEmpty() == false) { checkedFields.put(LoggingAuditTrail.URL_QUERY_FIELD_NAME, "foo=bar&evac=true"); @@ -2672,7 +2672,7 @@ public void testAuthenticationSuccessRest() throws Exception { checkedFields.put(LoggingAuditTrail.REQUEST_METHOD_FIELD_NAME, request.method().toString()); checkedFields.put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); checkedFields.put(LoggingAuditTrail.URL_PATH_FIELD_NAME, "_uri"); - if (includeRequestBody && Strings.hasLength(request.content().utf8ToString())) { + if (includeRequestBody && request.hasContent()) { checkedFields.put(LoggingAuditTrail.REQUEST_BODY_FIELD_NAME, request.content().utf8ToString()); } if (params.isEmpty() == false) { From b378a1bb54650247c867329f6bf3265918a89fa0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 20 Nov 2024 22:09:17 +0000 Subject: [PATCH 148/386] Bump 8.x to 8.18.0 --- .backportrc.json | 4 +- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 16 + .buildkite/pipelines/periodic.yml | 23 +- .ci/bwcVersions | 1 + .ci/snapshotBwcVersions | 1 + .../reference/migration/migrate_8_18.asciidoc | 20 + docs/reference/migration/migrate_9_0.asciidoc | 548 ++++++------ docs/reference/release-notes/8.18.0.asciidoc | 8 + docs/reference/release-notes/9.0.0.asciidoc | 812 ++++++------------ .../release-notes/highlights.asciidoc | 168 +--- 11 files changed, 619 insertions(+), 984 deletions(-) create mode 100644 docs/reference/migration/migrate_8_18.asciidoc create mode 100644 docs/reference/release-notes/8.18.0.asciidoc diff --git a/.backportrc.json b/.backportrc.json index 03f3f892f9227..20287f0bfc0e6 100644 --- a/.backportrc.json +++ b/.backportrc.json @@ -1,10 +1,10 @@ { "upstream" : "elastic/elasticsearch", - "targetBranchChoices" : [ "main", "8.x", "8.16", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], + "targetBranchChoices" : [ "main", "8.x", "8.17", "8.16", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], "targetPRLabels" : [ "backport" ], "branchLabelMapping" : { "^v9.0.0$" : "main", - "^v8.17.0$" : "8.x", + "^v8.18.0$" : "8.x", "^v(\\d+).(\\d+).\\d+(?:-(?:alpha|beta|rc)\\d+)?$" : "$1.$2" } } diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 19e99852869e6..8935872fdec83 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["8.16.1", "8.17.0", "9.0.0"] + BWC_VERSION: ["8.16.1", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 7dd8269f4ffe6..2dbb7f5193af6 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -320,6 +320,22 @@ steps: env: BWC_VERSION: 8.17.0 + - label: "{{matrix.image}} / 8.18.0 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.18.0 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 8.18.0 + - label: "{{matrix.image}} / 9.0.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v9.0.0 timeout_in_minutes: 300 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 79371d6ddccf5..047e4a3f4f8f6 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -344,6 +344,25 @@ steps: - signal_reason: agent_stop limit: 3 + - label: 8.18.0 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.18.0#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.18.0 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + - label: 9.0.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v9.0.0#bwcTest timeout_in_minutes: 300 @@ -429,7 +448,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk21 - BWC_VERSION: ["8.16.1", "8.17.0", "9.0.0"] + BWC_VERSION: ["8.16.1", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -471,7 +490,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 - BWC_VERSION: ["8.16.1", "8.17.0", "9.0.0"] + BWC_VERSION: ["8.16.1", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 85522e47a523f..ac07e14c2a176 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -17,4 +17,5 @@ BWC_VERSION: - "8.15.4" - "8.16.1" - "8.17.0" + - "8.18.0" - "9.0.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 9ea3072021bb3..351c605e6e092 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,4 +1,5 @@ BWC_VERSION: - "8.16.1" - "8.17.0" + - "8.18.0" - "9.0.0" diff --git a/docs/reference/migration/migrate_8_18.asciidoc b/docs/reference/migration/migrate_8_18.asciidoc new file mode 100644 index 0000000000000..c989ff9f85b6d --- /dev/null +++ b/docs/reference/migration/migrate_8_18.asciidoc @@ -0,0 +1,20 @@ +[[migrating-8.18]] +== Migrating to 8.18 +++++ +8.18 +++++ + +This section discusses the changes that you need to be aware of when migrating +your application to {es} 8.18. + +See also <> and <>. + +coming::[8.18.0] + + +[discrete] +[[breaking-changes-8.18]] +=== Breaking changes + +There are no breaking changes in {es} 8.18. + diff --git a/docs/reference/migration/migrate_9_0.asciidoc b/docs/reference/migration/migrate_9_0.asciidoc index 6569647fd993e..5048220966bba 100644 --- a/docs/reference/migration/migrate_9_0.asciidoc +++ b/docs/reference/migration/migrate_9_0.asciidoc @@ -1,6 +1,3 @@ -// THIS IS A GENERATED FILE. DO NOT EDIT DIRECTLY. -// The content generated here are is not correct and most has been manually commented out until it can be fixed. -// See ES-9931 for more details. [[migrating-9.0]] == Migrating to 9.0 ++++ @@ -23,204 +20,229 @@ The following changes in {es} 9.0 might affect your applications and prevent them from operating normally. Before upgrading to 9.0, review these changes and take the described steps to mitigate the impact. -// -// [discrete] -// [[breaking_90_analysis_changes]] -// ==== Analysis changes -// -// [[set_lenient_to_true_by_default_when_using_updateable_synonyms]] -// .Set lenient to true by default when using updateable synonyms -// [%collapsible] -// ==== -// *Details* + -// When a `synonym` or `synonym_graph` token filter is configured with `updateable: true`, the default `lenient` -// value will now be `true`. -// -// *Impact* + -// `synonym` or `synonym_graph` token filters configured with `updateable: true` will ignore invalid synonyms by -// default. This prevents shard initialization errors on invalid synonyms. -// ==== -// -// [discrete] -// [[breaking_90_mapping_changes]] -// ==== Mapping changes -// -// [[jdk_locale_database_change]] -// .JDK locale database change -// [%collapsible] -// ==== -// *Details* + -// {es} 8.16 changes the version of the JDK that is included from version 22 to version 23. This changes the locale database that is used by Elasticsearch from the COMPAT database to the CLDR database. This change can cause significant differences to the textual date formats accepted by Elasticsearch, and to calculated week-dates. -// -// If you run {es} 8.16 on JDK version 22 or below, it will use the COMPAT locale database to match the behavior of 8.15. However, starting with {es} 9.0, {es} will use the CLDR database regardless of JDK version it is run on. -// -// *Impact* + -// This affects you if you use custom date formats using textual or week-date field specifiers. If you use date fields or calculated week-dates that change between the COMPAT and CLDR databases, then this change will cause Elasticsearch to reject previously valid date fields as invalid data. You might need to modify your ingest or output integration code to account for the differences between these two JDK versions. -// -// Starting in version 8.15.2, Elasticsearch will log deprecation warnings if you are using date format specifiers that might change on upgrading to JDK 23. These warnings are visible in Kibana. -// -// For detailed guidance, refer to <> and the https://ela.st/jdk-23-locales[Elastic blog]. -// ==== -// -// [discrete] -// [[breaking_90_analysis_changes]] -// ==== Analysis changes -// -// [[snowball_stemmers_have_been_upgraded]] -// .Snowball stemmers have been upgraded -// [%collapsible] -// ==== -// *Details* + -// Lucene 10 ships with an upgrade of its Snowball stemmers. For details see https://github.com/apache/lucene/issues/13209. Users using Snowball stemmers that are experiencing changes in search behaviour on existing data are advised to reindex. -// -// *Impact* + -// The upgrade should generally provide improved stemming results. Small changes in token analysis can lead to mismatches with previously index data, so existing indices using Snowball stemmers as part of their analysis chain should be reindexed. -// ==== -// -// [[german2_snowball_stemmer_an_alias_for_german_stemmer]] -// .The "german2" snowball stemmer is now an alias for the "german" stemmer -// [%collapsible] -// ==== -// *Details* + -// Lucene 10 has merged the improved "german2" snowball language stemmer with the "german" stemmer. For Elasticsearch, "german2" is now a deprecated alias for "german". This may results in slightly different tokens being generated for terms with umlaut substitution (like "ue" for "ü" etc...) -// -// *Impact* + -// Replace usages of "german2" with "german" in analysis configuration. Old indices that use the "german" stemmer should be reindexed if possible. -// ==== -// -// [[persian_analyzer_has_stemmer_by_default]] -// .The 'persian' analyzer has stemmer by default -// [%collapsible] -// ==== -// *Details* + -// Lucene 10 has added a final stemming step to its PersianAnalyzer that Elasticsearch exposes as 'persian' analyzer. Existing indices will keep the old non-stemming behaviour while new indices will see the updated behaviour with added stemming. Users that wish to maintain the non-stemming behaviour need to define their own analyzer as outlined in https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. Users that wish to use the new stemming behaviour for existing indices will have to reindex their data. -// -// *Impact* + -// Indexing with the 'persian' analyzer will produce slightly different tokens. Users should check if this impacts their search results. If they wish to maintain the legacy non-stemming behaviour they can define their own analyzer equivalent as explained in https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. -// ==== -// -// [[korean_dictionary_for_nori_has_been_updated]] -// .The Korean dictionary for Nori has been updated -// [%collapsible] -// ==== -// *Details* + -// Lucene 10 ships with an updated Korean dictionary (mecab-ko-dic-2.1.1). For details see https://github.com/apache/lucene/issues/11452. Users experiencing changes in search behaviour on existing data are advised to reindex. -// -// *Impact* + -// The change is small and should generally provide better analysis results. Existing indices for full-text use cases should be reindexed though. -// ==== -// -// [discrete] -// [[breaking_90_cluster_and_node_setting_changes]] -// ==== Cluster and node setting changes -// -// [[remove_unsupported_legacy_value_for_discovery_type]] -// .Remove unsupported legacy value for `discovery.type` -// [%collapsible] -// ==== -// *Details* + -// Earlier versions of {es} had a `discovery.type` setting which permitted values that referred to legacy discovery types. From v9.0.0 onwards, the only supported values for this setting are `multi-node` (the default) and `single-node`. -// -// *Impact* + -// Remove any value for `discovery.type` from your `elasticsearch.yml` configuration file. -// ==== -// -// [discrete] -// [[breaking_90_es_ql_changes]] -// ==== ES|QL changes -// -// [[esql_entirely_remove_meta_functions]] -// .ESQL: Entirely remove META FUNCTIONS -// [%collapsible] -// ==== -// *Details* + -// Removes an undocumented syntax from ESQL: META FUNCTION. This was never -// reliable or really useful. Consult the documentation instead. -// -// *Impact* + -// Removes an undocumented syntax from ESQL: META FUNCTION -// ==== -// -// [discrete] -// [[breaking_90_rest_api_changes]] -// ==== REST API changes -// -// [[remove_cluster_state_from_cluster_reroute_response]] -// .Remove cluster state from `/_cluster/reroute` response -// [%collapsible] -// ==== -// *Details* + -// The `POST /_cluster/reroute` API no longer returns the cluster state in its response. The `?metric` query parameter to this API now has no effect and its use will be forbidden in a future version. -// -// *Impact* + -// Cease usage of the `?metric` query parameter when calling the `POST /_cluster/reroute` API. -// ==== -// -// [[remove_deprecated_local_attribute_from_alias_apis]] -// .Remove deprecated local attribute from alias APIs -// [%collapsible] -// ==== -// *Details* + -// The following APIs no longer accept the `?local` query parameter: `GET /_alias`, `GET /_aliases`, `GET /_alias/{name}`, `HEAD /_alias/{name}`, `GET /{index}/_alias`, `HEAD /{index}/_alias`, `GET /{index}/_alias/{name}`, `HEAD /{index}/_alias/{name}`, `GET /_cat/aliases`, and `GET /_cat/aliases/{alias}`. This parameter has been deprecated and ignored since version 8.12. -// -// *Impact* + -// Cease usage of the `?local` query parameter when calling the listed APIs. -// ==== -// -// [[reworking_rrf_retriever_to_be_evaluated_during_rewrite_phase]] -// .Reworking RRF retriever to be evaluated during rewrite phase -// [%collapsible] -// ==== -// *Details* + -// In this release (8.16), we have introduced major changes to the retrievers framework -// and how they can be evaluated, focusing mainly on compound retrievers -// like `rrf` and `text_similarity_reranker`, which allowed us to support full -// composability (i.e. any retriever can be nested under any compound retriever), -// as well as supporting additional search features like collapsing, explaining, -// aggregations, and highlighting. -// -// To ensure consistency, and given that this rework is not available until 8.16, -// `rrf` and `text_similarity_reranker` retriever queries would now -// throw an exception in a mixed cluster scenario, where there are nodes -// both in current or later (i.e. >= 8.16) and previous ( <= 8.15) versions. -// -// As part of the rework, we have also removed the `_rank` property from -// the responses of an `rrf` retriever. -// -// *Impact* + -// - Users will not be able to use the `rrf` and `text_similarity_reranker` retrievers in a mixed cluster scenario -// with previous releases (i.e. prior to 8.16), and the request will throw an `IllegalArgumentException`. -// - `_rank` has now been removed from the output of the `rrf` retrievers so trying to directly parse the field -// will throw an exception -// ==== -// -// [[update_data_stream_lifecycle_telemetry_to_track_global_retention]] -// .Update data stream lifecycle telemetry to track global retention -// [%collapsible] -// ==== -// *Details* + -// In this release we introduced global retention settings that fulfil the following criteria: -// -// - a data stream managed by the data stream lifecycle, -// - a data stream that is not an internal data stream. -// -// As a result, we defined different types of retention: -// -// - **data retention**: the retention configured on data stream level by the data stream user or owner -// - **default global retention:** the retention configured by an admin on a cluster level and applied to any -// data stream that doesn't have data retention and fulfils the criteria. -// - **max global retention:** the retention configured by an admin to guard against having long retention periods. -// Any data stream that fulfills the criteria will adhere to the data retention unless it exceeds the max retention, -// in which case the max global retention applies. -// - **effective retention:** the retention that applies on the data stream that fulfill the criteria at a given moment -// in time. It takes into consideration all the retention above and resolves it to the retention that will take effect. -// -// Considering the above changes, having a field named `retention` in the usage API was confusing. For this reason, we -// renamed it to `data_retention` and added telemetry about the other configurations too. -// -// *Impact* + -// Users that use the field `data_lifecycle.retention` should use the `data_lifecycle.data_retention` -// ==== + + +There are no notable breaking changes in {es} 9.0. +But there are some less critical breaking changes. + +[discrete] +[[breaking_90_analysis_changes]] +==== Analysis changes + +[[snowball_stemmers_have_been_upgraded]] +.Snowball stemmers have been upgraded +[%collapsible] +==== +*Details* + +Lucene 10 ships with an upgrade of its Snowball stemmers. For details see https://github.com/apache/lucene/issues/13209. Users using Snowball stemmers that are experiencing changes in search behaviour on existing data are advised to reindex. + +*Impact* + +The upgrade should generally provide improved stemming results. Small changes in token analysis can lead to mismatches with previously index data, so existing indices using Snowball stemmers as part of their analysis chain should be reindexed. +==== + +[[german2_snowball_stemmer_an_alias_for_german_stemmer]] +.The "german2" snowball stemmer is now an alias for the "german" stemmer +[%collapsible] +==== +*Details* + +Lucene 10 has merged the improved "german2" snowball language stemmer with the "german" stemmer. For Elasticsearch, "german2" is now a deprecated alias for "german". This may results in slightly different tokens being generated for terms with umlaut substitution (like "ue" for "ü" etc...) + +*Impact* + +Replace usages of "german2" with "german" in analysis configuration. Old indices that use the "german" stemmer should be reindexed if possible. +==== + +[[persian_analyzer_has_stemmer_by_default]] +.The 'persian' analyzer has stemmer by default +[%collapsible] +==== +*Details* + +Lucene 10 has added a final stemming step to its PersianAnalyzer that Elasticsearch exposes as 'persian' analyzer. Existing indices will keep the old non-stemming behaviour while new indices will see the updated behaviour with added stemming. Users that wish to maintain the non-stemming behaviour need to define their own analyzer as outlined in https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. Users that wish to use the new stemming behaviour for existing indices will have to reindex their data. + +*Impact* + +Indexing with the 'persian' analyzer will produce slightly different tokens. Users should check if this impacts their search results. If they wish to maintain the legacy non-stemming behaviour they can define their own analyzer equivalent as explained in https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. +==== + +[[korean_dictionary_for_nori_has_been_updated]] +.The Korean dictionary for Nori has been updated +[%collapsible] +==== +*Details* + +Lucene 10 ships with an updated Korean dictionary (mecab-ko-dic-2.1.1). For details see https://github.com/apache/lucene/issues/11452. Users experiencing changes in search behaviour on existing data are advised to reindex. + +*Impact* + +The change is small and should generally provide better analysis results. Existing indices for full-text use cases should be reindexed though. +==== + +[discrete] +[[breaking_90_cluster_and_node_setting_changes]] +==== Cluster and node setting changes + +[[minimum_shard_balancer_threshold_1_0]] +.Minimum shard balancer threshold is now 1.0 +[%collapsible] +==== +*Details* + +Earlier versions of {es} accepted any non-negative value for `cluster.routing.allocation.balance.threshold`, but values smaller than `1.0` do not make sense and have been ignored since version 8.6.1. From 9.0.0 these nonsensical values are now forbidden. + +*Impact* + +Do not set `cluster.routing.allocation.balance.threshold` to a value less than `1.0`. +==== + +[[remove_cluster_routing_allocation_disk_watermark_enable_for_single_data_node_setting]] +.Remove `cluster.routing.allocation.disk.watermark.enable_for_single_data_node` setting +[%collapsible] +==== +*Details* + +Prior to 7.8, whenever a cluster had only a single data node, the watermarks would not be respected. In order to change this in 7.8+ in a backwards compatible way, we introduced the `cluster.routing.allocation.disk.watermark.enable_for_single_data_node` node setting. The setting was deprecated in 7.14 and was made to accept only true in 8.0 + +*Impact* + +No known end user impact +==== + +[[remove_deprecated_xpack_searchable_snapshot_allocate_on_rolling_restart_setting]] +.Remove deprecated `xpack.searchable.snapshot.allocate_on_rolling_restart` setting +[%collapsible] +==== +*Details* + +The `xpack.searchable.snapshot.allocate_on_rolling_restart` setting was created as an escape-hatch just in case relying on the `cluster.routing.allocation.enable=primaries` setting for allocating searchable snapshots during rolling restarts had some unintended side-effects. It has been deprecated since 8.2.0. + +*Impact* + +Remove `xpack.searchable.snapshot.allocate_on_rolling_restart` from your settings if present. +==== + +[[remove_unsupported_legacy_value_for_discovery_type]] +.Remove unsupported legacy value for `discovery.type` +[%collapsible] +==== +*Details* + +Earlier versions of {es} had a `discovery.type` setting which permitted values that referred to legacy discovery types. From v9.0.0 onwards, the only supported values for this setting are `multi-node` (the default) and `single-node`. + +*Impact* + +Remove any value for `discovery.type` from your `elasticsearch.yml` configuration file. +==== + +[discrete] +[[breaking_90_ingest_changes]] +==== Ingest changes + +[[remove_ecs_option_on_user_agent_processor]] +.Remove `ecs` option on `user_agent` processor +[%collapsible] +==== +*Details* + +The `user_agent` ingest processor no longer accepts the `ecs` option. (It was previously deprecated and ignored.) + +*Impact* + +Users should stop using the `ecs` option when creating instances of the `user_agent` ingest processor. The option will be removed from existing processors stored in the cluster state on upgrade. +==== + +[[remove_ignored_fallback_option_on_geoip_processor]] +.Remove ignored fallback option on GeoIP processor +[%collapsible] +==== +*Details* + +The option fallback_to_default_databases on the geoip ingest processor has been removed. (It was deprecated and ignored since 8.0.0.) + +*Impact* + +Customers should stop remove the noop fallback_to_default_databases option on any geoip ingest processors. +==== + +[discrete] +[[breaking_90_mapping_changes]] +==== Mapping changes + +[[remove_support_for_type_fields_copy_to_boost_in_metadata_field_definition]] +.Remove support for type, fields, copy_to and boost in metadata field definition +[%collapsible] +==== +*Details* + +The type, fields, copy_to and boost parameters are no longer supported in metadata field definition + +*Impact* + +Users providing type, fields, copy_to or boost as part of metadata field definition should remove them from their mappings. +==== + +[discrete] +[[breaking_90_rest_api_changes]] +==== REST API changes + +[[apply_more_strict_parsing_of_actions_in_bulk_api]] +.Apply more strict parsing of actions in bulk API +[%collapsible] +==== +*Details* + +Previously, the following classes of malformed input were deprecated but not rejected in the action lines of the a bulk request: missing closing brace; additional keys after the action (which were ignored); additional data after the closing brace (which was ignored). They will now be considered errors and rejected. + +*Impact* + +Users must provide well-formed input when using the bulk API. (They can request REST API compatibility with v8 to get the previous behaviour back as an interim measure.) +==== + +[[error_json_structure_has_changed_when_detailed_errors_are_disabled]] +.Error JSON structure has changed when detailed errors are disabled +[%collapsible] +==== +*Details* + +This change modifies the JSON format of error messages returned to REST clients +when detailed messages are turned off. +Previously, JSON returned when an exception occurred, and `http.detailed_errors.enabled: false` was set, +just consisted of a single `"error"` text field with some basic information. +Setting `http.detailed_errors.enabled: true` (the default) changed this field +to an object with more detailed information. +With this change, non-detailed errors now have the same structure as detailed errors. `"error"` will now always +be an object with, at a minimum, a `"type"` and `"reason"` field. Additional fields are included when detailed +errors are enabled. +To use the previous structure for non-detailed errors, use the v8 REST API. + +*Impact* + +If you have set `http.detailed_errors.enabled: false` (the default is `true`) +the structure of JSON when any exceptions occur now matches the structure when +detailed errors are enabled. +To use the previous structure for non-detailed errors, use the v8 REST API. +==== + +[[remove_cluster_state_from_cluster_reroute_response]] +.Remove cluster state from `/_cluster/reroute` response +[%collapsible] +==== +*Details* + +The `POST /_cluster/reroute` API no longer returns the cluster state in its response. The `?metric` query parameter to this API now has no effect and its use will be forbidden in a future version. + +*Impact* + +Cease usage of the `?metric` query parameter when calling the `POST /_cluster/reroute` API. +==== + +[[remove_deprecated_local_attribute_from_alias_apis]] +.Remove deprecated local attribute from alias APIs +[%collapsible] +==== +*Details* + +The following APIs no longer accept the `?local` query parameter: `GET /_alias`, `GET /_aliases`, `GET /_alias/{name}`, `HEAD /_alias/{name}`, `GET /{index}/_alias`, `HEAD /{index}/_alias`, `GET /{index}/_alias/{name}`, `HEAD /{index}/_alias/{name}`, `GET /_cat/aliases`, and `GET /_cat/aliases/{alias}`. This parameter has been deprecated and ignored since version 8.12. + +*Impact* + +Cease usage of the `?local` query parameter when calling the listed APIs. +==== + +[[remove_legacy_params_from_range_query]] +.Remove legacy params from range query +[%collapsible] +==== +*Details* + +The deprecated range query parameters `to`, `from`, `include_lower`, and `include_upper` are no longer supported. + +*Impact* + +Users should use `lt`, `lte`, `gt`, and `gte` query parameters instead. +==== + +[[remove_support_for_deprecated_force_source_highlighting_parameter]] +.Remove support for deprecated `force_source` highlighting parameter +[%collapsible] +==== +*Details* + +The deprecated highlighting `force_source` parameter is no longer supported. + +*Impact* + +Users should remove usages of the `force_source` parameter from their search requests. +==== [discrete] @@ -235,85 +257,45 @@ after upgrading to 9.0. To find out if you are using any deprecated functionality, enable <>. -// -// [discrete] -// [[deprecations_90_analysis]] -// ==== Analysis deprecations -// -// [[deprecate_dutch_kp_lovins_stemmer_as_they_are_removed_in_lucene_10]] -// .Deprecate dutch_kp and lovins stemmer as they are removed in Lucene 10 -// [%collapsible] -// ==== -// *Details* + -// kp, dutch_kp, dutchKp and lovins stemmers are deprecated and will be removed. -// -// *Impact* + -// These stemmers will be removed and will be no longer supported. -// ==== -// -// [[deprecate_edge_ngram_side_parameter]] -// .deprecate `edge_ngram` side parameter -// [%collapsible] -// ==== -// *Details* + -// edge_ngram will no longer accept the side parameter. -// -// *Impact* + -// Users will need to update any usage of edge_ngram token filter that utilizes `side`. If the `back` value was used, they can achieve the same behavior by using the `reverse` token filter. -// ==== -// -// [discrete] -// [[deprecations_90_crud]] -// ==== CRUD deprecations -// -// [[deprecate_dot_prefixed_indices_composable_template_index_patterns]] -// .Deprecate dot-prefixed indices and composable template index patterns -// [%collapsible] -// ==== -// *Details* + -// Indices beginning with a dot '.' are reserved for system and internal indices, and should not be used by and end-user. Additionally, composable index templates that contain patterns for dot-prefixed indices should also be avoided, as these patterns are meant for internal use only. In a future Elasticsearch version, creation of these dot-prefixed indices will no longer be allowed. -// -// *Impact* + -// Requests performing an action that would create an index beginning with a dot (indexing a document, manual creation, reindex), or creating an index template with index patterns beginning with a dot, will contain a deprecation header warning about dot-prefixed indices in the response. -// ==== -// -// [discrete] -// [[deprecations_90_rest_api]] -// ==== REST API deprecations -// -// [[adding_deprecation_warnings_for_rrf_using_rank_sub_searches]] -// .Adding deprecation warnings for rrf using rank and `sub_searches` -// [%collapsible] -// ==== -// *Details* + -// Search API parameter `sub_searches` will no longer be a supported and will be removed in future releases. Similarly, `rrf` can only be used through the specified `retriever` and no longer though the `rank` parameter -// -// *Impact* + -// Requests specifying rrf through `rank` and/or `sub_searches` elements will be disallowed in a future version. Users should instead utilize the new `retriever` parameter. -// ==== -// -// [[deprecate_legacy_params_from_range_query]] -// .Deprecate legacy params from range query -// [%collapsible] -// ==== -// *Details* + -// Range query will not longer accept `to`, `from`, `include_lower`, and `include_upper` parameters. -// -// *Impact* + -// Instead use `gt`, `gte`, `lt` and `lte` parameters. -// ==== -// -// [[inference_api_deprecate_elser_service]] -// .[Inference API] Deprecate elser service -// [%collapsible] -// ==== -// *Details* + -// The `elser` service of the inference API will be removed in an upcoming release. Please use the elasticsearch service instead. -// -// *Impact* + -// In the current version there is no impact. In a future version, users of the `elser` service will no longer be able to use it, and will be required to use the `elasticsearch` service to access elser through the inference API. -// ==== - -// BELOW WAS MANUALLY ADDED TO FIX THE BUILD -include::migrate_9_0/transient-settings-migration-guide.asciidoc[] -//include::migrate_9_0/rest-api-changes.asciidoc[] //see ES-9932 + +[discrete] +[[deprecations_90_mapping]] +==== Mapping deprecations + +[[deprecate_source_mode_in_mappings]] +.Deprecate `_source.mode` in mappings +[%collapsible] +==== +*Details* + +Configuring `_source.mode` in mappings is deprecated and will be removed in future versions. Use `index.mapping.source.mode` index setting instead. + +*Impact* + +Use `index.mapping.source.mode` index setting instead +==== + +[discrete] +[[deprecations_90_rest_api]] +==== REST API deprecations + +[[document_type_deprecated_on_simulate_pipeline_api]] +.Document `_type` deprecated on simulate pipeline API +[%collapsible] +==== +*Details* + +Passing a document with a `_type` property is deprecated in the `/_ingest/pipeline/{id}/_simulate` and `/_ingest/pipeline/_simulate` APIs. + +*Impact* + +Users should already have stopped using mapping types, which were deprecated in {es} 7. This deprecation warning will fire if they specify mapping types on documents pass to the simulate pipeline API. +==== + +[[inference_api_deprecate_elser_service]] +.[Inference API] Deprecate elser service +[%collapsible] +==== +*Details* + +The `elser` service of the inference API will be removed in an upcoming release. Please use the elasticsearch service instead. + +*Impact* + +In the current version there is no impact. In a future version, users of the `elser` service will no longer be able to use it, and will be required to use the `elasticsearch` service to access elser through the inference API. +==== + diff --git a/docs/reference/release-notes/8.18.0.asciidoc b/docs/reference/release-notes/8.18.0.asciidoc new file mode 100644 index 0000000000000..332edfbc23eb7 --- /dev/null +++ b/docs/reference/release-notes/8.18.0.asciidoc @@ -0,0 +1,8 @@ +[[release-notes-8.18.0]] +== {es} version 8.18.0 + +coming[8.18.0] + +Also see <>. + + diff --git a/docs/reference/release-notes/9.0.0.asciidoc b/docs/reference/release-notes/9.0.0.asciidoc index af26fd57385e3..93e5a30cb82f7 100644 --- a/docs/reference/release-notes/9.0.0.asciidoc +++ b/docs/reference/release-notes/9.0.0.asciidoc @@ -1,6 +1,3 @@ -// THIS IS A GENERATED FILE. DO NOT EDIT DIRECTLY. -// The content generated here are is not correct and most has been manually commented out until it can be fixed. -// See ES-9931 for more details. [[release-notes-9.0.0]] == {es} version 9.0.0 @@ -12,546 +9,289 @@ Also see <>. [float] === Breaking changes -// Allocation:: -// * Remove cluster state from `/_cluster/reroute` response {es-pull}114231[#114231] (issue: {es-issue}88978[#88978]) -// -// Analysis:: -// * Set lenient to true by default when using updateable synonyms {es-pull}110901[#110901] -// * Snowball stemmers have been upgraded {es-pull}114146[#114146] -// * The 'german2' stemmer is now an alias for the 'german' snowball stemmer {es-pull}113614[#113614] -// * The 'persian' analyzer has stemmer by default {es-pull}113482[#113482] (issue: {es-issue}113050[#113050]) -// * The Korean dictionary for Nori has been updated {es-pull}114124[#114124] -// -// Cluster Coordination:: -// * Remove unsupported legacy value for `discovery.type` {es-pull}112903[#112903] -// -// Data streams:: -// * Update data stream lifecycle telemetry to track global retention {es-pull}112451[#112451] -// -// ES|QL:: -// * ESQL: Entirely remove META FUNCTIONS {es-pull}113967[#113967] -// -// Indices APIs:: -// * Remove deprecated local attribute from alias APIs {es-pull}115393[#115393] -// -// Mapping:: -// * JDK locale database change {es-pull}113975[#113975] -// -// Search:: -// * Adding breaking change entry for retrievers {es-pull}115399[#115399] +Allocation:: +* Increase minimum threshold in shard balancer {es-pull}115831[#115831] +* Remove `cluster.routing.allocation.disk.watermark.enable_for_single_data_node` setting {es-pull}114207[#114207] +* Remove cluster state from `/_cluster/reroute` response {es-pull}114231[#114231] (issue: {es-issue}88978[#88978]) + +Analysis:: +* Snowball stemmers have been upgraded {es-pull}114146[#114146] +* The 'german2' stemmer is now an alias for the 'german' snowball stemmer {es-pull}113614[#113614] +* The 'persian' analyzer has stemmer by default {es-pull}113482[#113482] (issue: {es-issue}113050[#113050]) +* The Korean dictionary for Nori has been updated {es-pull}114124[#114124] + +Cluster Coordination:: +* Remove unsupported legacy value for `discovery.type` {es-pull}112903[#112903] + +Highlighting:: +* Remove support for deprecated `force_source` highlighting parameter {es-pull}116943[#116943] + +Indices APIs:: +* Apply more strict parsing of actions in bulk API {es-pull}115923[#115923] +* Remove deprecated local attribute from alias APIs {es-pull}115393[#115393] + +Infra/REST API:: +* Output a consistent format when generating error json {es-pull}90529[#90529] (issue: {es-issue}89387[#89387]) + +Ingest Node:: +* Remove `ecs` option on `user_agent` processor {es-pull}116077[#116077] +* Remove ignored fallback option on GeoIP processor {es-pull}116112[#116112] + +Mapping:: +* Remove support for type, fields, `copy_to` and boost in metadata field definition {es-pull}116944[#116944] + +Search:: +* Remove legacy params from range query {es-pull}116970[#116970] + +Snapshot/Restore:: +* Remove deprecated `xpack.searchable.snapshot.allocate_on_rolling_restart` setting {es-pull}114202[#114202] [[bug-9.0.0]] [float] === Bug fixes -// -// Aggregations:: -// * Always check the parent breaker with zero bytes in `PreallocatedCircuitBreakerService` {es-pull}115181[#115181] -// * Force using the last centroid during merging {es-pull}111644[#111644] (issue: {es-issue}111065[#111065]) -// -// Authentication:: -// * Check for disabling own user in Put User API {es-pull}112262[#112262] (issue: {es-issue}90205[#90205]) -// * Expose cluster-state role mappings in APIs {es-pull}114951[#114951] -// -// Authorization:: -// * Fix DLS & FLS sometimes being enforced when it is disabled {es-pull}111915[#111915] (issue: {es-issue}94709[#94709]) -// * Fix DLS using runtime fields and synthetic source {es-pull}112341[#112341] -// -// CRUD:: -// * Don't fail retention lease sync actions due to capacity constraints {es-pull}109414[#109414] (issue: {es-issue}105926[#105926]) -// * Preserve thread context when waiting for segment generation in RTG {es-pull}114623[#114623] -// * Standardize error code when bulk body is invalid {es-pull}114869[#114869] -// -// Cluster Coordination:: -// * Ensure clean thread context in `MasterService` {es-pull}114512[#114512] -// -// Data streams:: -// * Adding support for data streams with a match-all template {es-pull}111311[#111311] (issue: {es-issue}111204[#111204]) -// * Exclude internal data streams from global retention {es-pull}112100[#112100] -// * Fix verbose get data stream API not requiring extra privileges {es-pull}112973[#112973] -// * OTel mappings: avoid metrics to be rejected when attributes are malformed {es-pull}114856[#114856] -// * [otel-data] Add more kubernetes aliases {es-pull}115429[#115429] -// * logs-apm.error-*: define log.level field as keyword {es-pull}112440[#112440] -// -// Distributed:: -// * Handle `InternalSendException` inline for non-forking handlers {es-pull}114375[#114375] -// -// EQL:: -// * Don't use a `BytesStreamOutput` to copy keys in `BytesRefBlockHash` {es-pull}114819[#114819] (issue: {es-issue}114599[#114599]) -// * Fix validation of TEXT fields with case insensitive comparison {es-pull}111238[#111238] (issue: {es-issue}111235[#111235]) -// -// ES|QL:: -// * ESQL: Add Values aggregation tests, fix `ConstantBytesRefBlock` memory handling {es-pull}111367[#111367] -// * ESQL: Align year diffing to the rest of the units in DATE_DIFF: chronological {es-pull}113103[#113103] (issue: {es-issue}112482[#112482]) -// * ESQL: Disable pushdown of WHERE past STATS {es-pull}115308[#115308] (issue: {es-issue}115281[#115281]) -// * ESQL: Fix CASE when conditions are multivalued {es-pull}112401[#112401] (issue: {es-issue}112359[#112359]) -// * ESQL: Fix Double operations returning infinite {es-pull}111064[#111064] (issue: {es-issue}111026[#111026]) -// * ESQL: Fix `REVERSE` with backspace character {es-pull}115245[#115245] (issues: {es-issue}114372[#114372], {es-issue}115227[#115227], {es-issue}115228[#115228]) -// * ESQL: Fix a bug in `MV_PERCENTILE` {es-pull}112218[#112218] (issues: {es-issue}112193[#112193], {es-issue}112180[#112180], {es-issue}112187[#112187], {es-issue}112188[#112188]) -// * ESQL: Fix filtered grouping on ords {es-pull}115312[#115312] (issue: {es-issue}114897[#114897]) -// * ESQL: Fix grammar changes around per agg filtering {es-pull}114848[#114848] -// * ESQL: Fix serialization during `can_match` {es-pull}111779[#111779] (issues: {es-issue}111701[#111701], {es-issue}111726[#111726]) -// * ESQL: Fix synthetic attribute pruning {es-pull}111413[#111413] (issue: {es-issue}105821[#105821]) -// * ESQL: don't lose the original casting error message {es-pull}111968[#111968] (issue: {es-issue}111967[#111967]) -// * ESQL: fix for missing indices error message {es-pull}111797[#111797] (issue: {es-issue}111712[#111712]) -// * ES|QL: Fix stats by constant expression {es-pull}114899[#114899] -// * ES|QL: Restrict sorting for `_source` and counter field types {es-pull}114638[#114638] (issues: {es-issue}114423[#114423], {es-issue}111976[#111976]) -// * ES|QL: better validation for GROK patterns {es-pull}110574[#110574] (issue: {es-issue}110533[#110533]) -// * ES|QL: better validation for RLIKE patterns {es-pull}112489[#112489] (issue: {es-issue}112485[#112485]) -// * ES|QL: better validation of GROK patterns {es-pull}112200[#112200] (issue: {es-issue}112111[#112111]) -// * Fix ST_CENTROID_AGG when no records are aggregated {es-pull}114888[#114888] (issue: {es-issue}106025[#106025]) -// * Fix TDigestState.read CB leaks {es-pull}114303[#114303] (issue: {es-issue}114194[#114194]) -// * Spatial search functions support multi-valued fields in compute engine {es-pull}112063[#112063] (issues: {es-issue}112102[#112102], {es-issue}112505[#112505], {es-issue}110830[#110830]) -// * [ES|QL] Check expression resolved before checking its data type in `ImplicitCasting` {es-pull}113314[#113314] (issue: {es-issue}113242[#113242]) -// * [ES|QL] Simplify patterns for subfields {es-pull}111118[#111118] -// * [ES|QL] Simplify syntax of named parameter for identifier and pattern {es-pull}115061[#115061] -// * [ES|QL] Skip validating remote cluster index names in parser {es-pull}114271[#114271] -// * [ES|QL] Use `RangeQuery` and String in `BinaryComparison` on datetime fields {es-pull}110669[#110669] (issue: {es-issue}107900[#107900]) -// * [ES|QL] add tests for stats by constant {es-pull}110593[#110593] (issue: {es-issue}105383[#105383]) -// * [ES|QL] make named parameter for identifier and pattern snapshot {es-pull}114784[#114784] -// * [ES|QL] validate `mv_sort` order {es-pull}110021[#110021] (issue: {es-issue}109910[#109910]) -// -// Geo:: -// * Fix cases of collections with one point {es-pull}111193[#111193] (issue: {es-issue}110982[#110982]) -// -// Health:: -// * Set `replica_unassigned_buffer_time` in constructor {es-pull}112612[#112612] -// -// ILM+SLM:: -// * Make `SnapshotLifecycleStats` immutable so `SnapshotLifecycleMetadata.EMPTY` isn't changed as side-effect {es-pull}111215[#111215] -// -// Indices APIs:: -// * Revert "Add `ResolvedExpression` wrapper" {es-pull}115317[#115317] -// -// Infra/Core:: -// * Fix max file size check to use `getMaxFileSize` {es-pull}113723[#113723] (issue: {es-issue}113705[#113705]) -// * Guard blob store local directory creation with `doPrivileged` {es-pull}115459[#115459] -// * Handle `BigInteger` in xcontent copy {es-pull}111937[#111937] (issue: {es-issue}111812[#111812]) -// * Report JVM stats for all memory pools (97046) {es-pull}115117[#115117] (issue: {es-issue}97046[#97046]) -// * `ByteArrayStreamInput:` Return -1 when there are no more bytes to read {es-pull}112214[#112214] -// -// Infra/Logging:: -// * Only emit product origin in deprecation log if present {es-pull}111683[#111683] (issue: {es-issue}81757[#81757]) -// -// Infra/Metrics:: -// * Make `randomInstantBetween` always return value in range [minInstant, `maxInstant]` {es-pull}114177[#114177] -// -// Infra/REST API:: -// * Fixed a `NullPointerException` in `_capabilities` API when the `path` parameter is null. {es-pull}113413[#113413] (issue: {es-issue}113413[#113413]) -// -// Infra/Settings:: -// * GET _cluster/settings with include_defaults returns the expected fallback value if defined in elasticsearch.yml {es-pull}110816[#110816] (issue: {es-issue}110815[#110815]) -// -// Ingest Node:: -// * Add warning headers for ingest pipelines containing special characters {es-pull}114837[#114837] (issue: {es-issue}104411[#104411]) -// * Fix IPinfo geolocation schema {es-pull}115147[#115147] -// * Fix `getDatabaseType` for unusual MMDBs {es-pull}112888[#112888] -// * Reducing error-level stack trace logging for normal events in `GeoIpDownloader` {es-pull}114924[#114924] -// -// License:: -// * Fix Start Trial API output acknowledgement header for features {es-pull}111740[#111740] (issue: {es-issue}111739[#111739]) -// * Fix `TokenService` always appearing used in Feature Usage {es-pull}112263[#112263] (issue: {es-issue}61956[#61956]) -// -// Logs:: -// * Do not expand dots when storing objects in ignored source {es-pull}113910[#113910] -// * Fix `ignore_above` handling in synthetic source when index level setting is used {es-pull}113570[#113570] (issue: {es-issue}113538[#113538]) -// * Fix synthetic source for flattened field when used with `ignore_above` {es-pull}113499[#113499] (issue: {es-issue}112044[#112044]) -// -// Machine Learning:: -// * Avoid `ModelAssignment` deadlock {es-pull}109684[#109684] -// * Fix NPE in Get Deployment Stats {es-pull}115404[#115404] -// * Fix bug in ML serverless autoscaling which prevented trained model updates from triggering a scale up {es-pull}110734[#110734] -// * Ignore unrecognized openai sse fields {es-pull}114715[#114715] -// * Mitigate IOSession timeouts {es-pull}115414[#115414] (issues: {es-issue}114385[#114385], {es-issue}114327[#114327], {es-issue}114105[#114105], {es-issue}114232[#114232]) -// * Prevent NPE if model assignment is removed while waiting to start {es-pull}115430[#115430] -// * Send mid-stream errors to users {es-pull}114549[#114549] -// * Temporarily return both `modelId` and `inferenceId` for GET /_inference until we migrate clients to only `inferenceId` {es-pull}111490[#111490] -// * Warn for model load failures if they have a status code <500 {es-pull}113280[#113280] -// * [Inference API] Remove unused Cohere rerank service settings fields in a BWC way {es-pull}110427[#110427] -// * [ML] Create Inference API will no longer return model_id and now only return inference_id {es-pull}112508[#112508] -// -// Mapping:: -// * Fix `MapperBuilderContext#isDataStream` when used in dynamic mappers {es-pull}110554[#110554] -// * Fix synthetic source field names for multi-fields {es-pull}112850[#112850] -// * Retrieve the source for objects and arrays in a separate parsing phase {es-pull}113027[#113027] (issue: {es-issue}112374[#112374]) -// * Two empty mappings now are created equally {es-pull}107936[#107936] (issue: {es-issue}107031[#107031]) -// -// Ranking:: -// * Fix MLTQuery handling of custom term frequencies {es-pull}110846[#110846] -// * Fix RRF validation for `rank_constant` < 1 {es-pull}112058[#112058] -// * Fix score count validation in reranker response {es-pull}111212[#111212] (issue: {es-issue}111202[#111202]) -// -// Search:: -// * Allow for querries on `_tier` to skip shards in the `can_match` phase {es-pull}114990[#114990] (issue: {es-issue}114910[#114910]) -// * Allow out of range term queries for numeric types {es-pull}112916[#112916] -// * Do not exclude empty arrays or empty objects in source filtering {es-pull}112250[#112250] (issue: {es-issue}109668[#109668]) -// * Fix synthetic source handling for `bit` type in `dense_vector` field {es-pull}114407[#114407] (issue: {es-issue}114402[#114402]) -// * Improve DateTime error handling and add some bad date tests {es-pull}112723[#112723] (issue: {es-issue}112190[#112190]) -// * Improve date expression/remote handling in index names {es-pull}112405[#112405] (issue: {es-issue}112243[#112243]) -// * Make "too many clauses" throw IllegalArgumentException to avoid 500s {es-pull}112678[#112678] (issue: {es-issue}112177[#112177]) -// * Make empty string searches be consistent with case (in)sensitivity {es-pull}110833[#110833] -// * Prevent flattening of ordered and unordered interval sources {es-pull}114234[#114234] -// * Remove needless forking to GENERIC in `TransportMultiSearchAction` {es-pull}110796[#110796] -// * Search/Mapping: KnnVectorQueryBuilder support for allowUnmappedFields {es-pull}107047[#107047] (issue: {es-issue}106846[#106846]) -// * Span term query to convert to match no docs when unmapped field is targeted {es-pull}113251[#113251] -// * Speedup `CanMatchPreFilterSearchPhase` constructor {es-pull}110860[#110860] -// * Updated Date Range to Follow Documentation When Assuming Missing Values {es-pull}112258[#112258] (issue: {es-issue}111484[#111484]) -// -// Security:: -// * Updated the transport CA name in Security Auto-Configuration. {es-pull}106520[#106520] (issue: {es-issue}106455[#106455]) -// -// Snapshot/Restore:: -// * Retry throttled snapshot deletions {es-pull}113237[#113237] -// -// TSDB:: -// * Implement `parseBytesRef` for `TimeSeriesRoutingHashFieldType` {es-pull}113373[#113373] (issue: {es-issue}112399[#112399]) -// -// Task Management:: -// * Improve handling of failure to create persistent task {es-pull}114386[#114386] -// -// Transform:: -// * Allow task canceling of validate API calls {es-pull}110951[#110951] -// * Include reason when no nodes are found {es-pull}112409[#112409] (issue: {es-issue}112404[#112404]) -// -// Vector Search:: -// * Fix dim validation for bit `element_type` {es-pull}114533[#114533] -// * Support semantic_text in object fields {es-pull}114601[#114601] (issue: {es-issue}114401[#114401]) -// -// Watcher:: -// * Truncating watcher history if it is too large {es-pull}111245[#111245] (issue: {es-issue}94745[#94745]) -// * Watch Next Run Interval Resets On Shard Move or Node Restart {es-pull}115102[#115102] (issue: {es-issue}111433[#111433]) -// -// [[deprecation-9.0.0]] -// [float] -// === Deprecations -// -// Analysis:: -// * Deprecate dutch_kp and lovins stemmer as they are removed in Lucene 10 {es-pull}113143[#113143] -// * deprecate `edge_ngram` side parameter {es-pull}110829[#110829] -// -// CRUD:: -// * Deprecate dot-prefixed indices and composable template index patterns {es-pull}112571[#112571] -// -// Machine Learning:: -// * [Inference API] Deprecate elser service {es-pull}113216[#113216] -// -// Search:: -// * Adding deprecation warnings for rrf using rank and `sub_searches` {es-pull}114854[#114854] -// * Deprecate legacy params from range query {es-pull}113286[#113286] -// -// [[enhancement-9.0.0]] -// [float] -// === Enhancements -// -// Aggregations:: -// * Account for `DelayedBucket` before reduction {es-pull}113013[#113013] -// * Add protection for OOM during aggregations partial reduction {es-pull}110520[#110520] -// * Deduplicate `BucketOrder` when deserializing {es-pull}112707[#112707] -// * Lower the memory footprint when creating `DelayedBucket` {es-pull}112519[#112519] -// * Reduce heap usage for `AggregatorsReducer` {es-pull}112874[#112874] -// * Remove reduce and `reduceContext` from `DelayedBucket` {es-pull}112547[#112547] -// -// Allocation:: -// * Add link to flood-stage watermark exception message {es-pull}111315[#111315] -// * Always allow rebalancing by default {es-pull}111015[#111015] -// * Only publish desired balance gauges on master {es-pull}115383[#115383] -// -// Application:: -// * [Profiling] add `container.id` field to event index template {es-pull}111969[#111969] -// -// Authorization:: -// * Add manage roles privilege {es-pull}110633[#110633] -// * Add privileges required for CDR misconfiguration features to work on AWS SecurityHub integration {es-pull}112574[#112574] -// * [Security Solution] Add `create_index` to `kibana_system` role for index/DS `.logs-endpoint.action.responses-*` {es-pull}115241[#115241] -// -// CRUD:: -// * Suppress merge-on-recovery for older indices {es-pull}113462[#113462] -// -// Codec:: -// * Remove zstd feature flag for index codec best compression {es-pull}112665[#112665] -// -// Data streams:: -// * Add 'verbose' flag retrieving `maximum_timestamp` for get data stream API {es-pull}112303[#112303] -// * Display effective retention in the relevant data stream APIs {es-pull}112019[#112019] -// * Expose global retention settings via data stream lifecycle API {es-pull}112210[#112210] -// * Make ecs@mappings work with OTel attributes {es-pull}111600[#111600] -// -// Distributed:: -// * Add link to Max Shards Per Node exception message {es-pull}110993[#110993] -// * Use Azure blob batch API to delete blobs in batches {es-pull}114566[#114566] -// -// EQL:: -// * ESQL: Delay construction of warnings {es-pull}114368[#114368] -// -// ES|QL:: -// * Add EXP ES|QL function {es-pull}110879[#110879] -// * Add `CircuitBreaker` to TDigest, Step 3: Connect with ESQL CB {es-pull}113387[#113387] -// * Add `CircuitBreaker` to TDigest, Step 4: Take into account shallow classes size {es-pull}113613[#113613] (issue: {es-issue}113916[#113916]) -// * Collect and display execution metadata for ES|QL cross cluster searches {es-pull}112595[#112595] (issue: {es-issue}112402[#112402]) -// * ESQL: Add support for multivalue fields in Arrow output {es-pull}114774[#114774] -// * ESQL: BUCKET: allow numerical spans as whole numbers {es-pull}111874[#111874] (issues: {es-issue}104646[#104646], {es-issue}109340[#109340], {es-issue}105375[#105375]) -// * ESQL: Have BUCKET generate friendlier intervals {es-pull}111879[#111879] (issue: {es-issue}110916[#110916]) -// * ESQL: Profile more timing information {es-pull}111855[#111855] -// * ESQL: Push down filters even in case of renames in Evals {es-pull}114411[#114411] -// * ESQL: Remove parent from `FieldAttribute` {es-pull}112881[#112881] -// * ESQL: Speed up CASE for some parameters {es-pull}112295[#112295] -// * ESQL: Speed up grouping by bytes {es-pull}114021[#114021] -// * ESQL: Support INLINESTATS grouped on expressions {es-pull}111690[#111690] -// * ESQL: Use less memory in listener {es-pull}114358[#114358] -// * ES|QL: Add support for cached strings in plan serialization {es-pull}112929[#112929] -// * ES|QL: add Telemetry API and track top functions {es-pull}111226[#111226] -// * ES|QL: add metrics for functions {es-pull}114620[#114620] -// * Enhance SORT push-down to Lucene to cover references to fields and ST_DISTANCE function {es-pull}112938[#112938] (issue: {es-issue}109973[#109973]) -// * Siem ea 9521 improve test {es-pull}111552[#111552] -// * Support multi-valued fields in compute engine for ST_DISTANCE {es-pull}114836[#114836] (issue: {es-issue}112910[#112910]) -// * [ESQL] Add `SPACE` function {es-pull}112350[#112350] -// * [ESQL] Add finish() elapsed time to aggregation profiling times {es-pull}113172[#113172] (issue: {es-issue}112950[#112950]) -// * [ESQL] Make query wrapped by `SingleValueQuery` cacheable {es-pull}110116[#110116] -// * [ES|QL] Add hypot function {es-pull}114382[#114382] -// * [ES|QL] Cast mixed numeric types to a common numeric type for Coalesce and In at Analyzer {es-pull}111917[#111917] (issue: {es-issue}111486[#111486]) -// * [ES|QL] Combine Disjunctive CIDRMatch {es-pull}111501[#111501] (issue: {es-issue}105143[#105143]) -// * [ES|QL] Create `Range` in `PushFiltersToSource` for qualified pushable filters on the same field {es-pull}111437[#111437] -// * [ES|QL] Name parameter with leading underscore {es-pull}111950[#111950] (issue: {es-issue}111821[#111821]) -// * [ES|QL] Named parameter for field names and field name patterns {es-pull}112905[#112905] -// * [ES|QL] Validate index name in parser {es-pull}112081[#112081] -// * [ES|QL] add reverse function {es-pull}113297[#113297] -// * [ES|QL] explicit cast a string literal to `date_period` and `time_duration` in arithmetic operations {es-pull}109193[#109193] -// -// Experiences:: -// * Integrate IBM watsonx to Inference API for text embeddings {es-pull}111770[#111770] -// -// Geo:: -// * Add support for spatial relationships in point field mapper {es-pull}112126[#112126] -// * Small performance improvement in h3 library {es-pull}113385[#113385] -// * Support docvalues only query in shape field {es-pull}112199[#112199] -// -// Health:: -// * (API) Cluster Health report `unassigned_primary_shards` {es-pull}112024[#112024] -// * Do not treat replica as unassigned if primary recently created and unassigned time is below a threshold {es-pull}112066[#112066] -// * Increase `replica_unassigned_buffer_time` default from 3s to 5s {es-pull}112834[#112834] -// -// ILM+SLM:: -// * ILM: Add `total_shards_per_node` setting to searchable snapshot {es-pull}112972[#112972] (issue: {es-issue}112261[#112261]) -// * PUT slm policy should only increase version if actually changed {es-pull}111079[#111079] -// * Preserve Step Info Across ILM Auto Retries {es-pull}113187[#113187] -// * Register SLM run before snapshotting to save stats {es-pull}110216[#110216] -// * SLM interval schedule followup - add back `getFieldName` style getters {es-pull}112123[#112123] -// -// Infra/Circuit Breakers:: -// * Add link to Circuit Breaker "Data too large" exception message {es-pull}113561[#113561] -// -// Infra/Core:: -// * Add nanos support to `ZonedDateTime` serialization {es-pull}111689[#111689] (issue: {es-issue}68292[#68292]) -// * Extend logging for dropped warning headers {es-pull}111624[#111624] (issue: {es-issue}90527[#90527]) -// * Give the kibana system user permission to read security entities {es-pull}114363[#114363] -// -// Infra/Metrics:: -// * Add `TaskManager` to `pluginServices` {es-pull}112687[#112687] -// * Add `ensureGreen` test method for use with `adminClient` {es-pull}113425[#113425] -// -// Infra/REST API:: -// * Optimize the loop processing of URL decoding {es-pull}110237[#110237] (issue: {es-issue}110235[#110235]) -// -// Infra/Scripting:: -// * Add a `mustache.max_output_size_bytes` setting to limit the length of results from mustache scripts {es-pull}114002[#114002] -// * Expose `HexFormat` in Painless {es-pull}112412[#112412] -// -// Infra/Settings:: -// * Improve exception message for bad environment variable placeholders in settings {es-pull}114552[#114552] (issue: {es-issue}110858[#110858]) -// * Reprocess operator file settings when settings service starts, due to node restart or master node change {es-pull}114295[#114295] -// -// Ingest Node:: -// * Add `size_in_bytes` to enrich cache stats {es-pull}110578[#110578] -// * Add support for templates when validating mappings in the simulate ingest API {es-pull}111161[#111161] -// * Adding `index_template_substitutions` to the simulate ingest API {es-pull}114128[#114128] -// * Adding component template substitutions to the simulate ingest API {es-pull}113276[#113276] -// * Adding mapping validation to the simulate ingest API {es-pull}110606[#110606] -// * Adding support for additional mapping to simulate ingest API {es-pull}114742[#114742] -// * Adding support for simulate ingest mapping adddition for indices with mappings that do not come from templates {es-pull}115359[#115359] -// * Adds example plugin for custom ingest processor {es-pull}112282[#112282] (issue: {es-issue}111539[#111539]) -// * Fix unnecessary mustache template evaluation {es-pull}110986[#110986] (issue: {es-issue}110191[#110191]) -// * Listing all available databases in the _ingest/geoip/database API {es-pull}113498[#113498] -// * Make enrich cache based on memory usage {es-pull}111412[#111412] (issue: {es-issue}106081[#106081]) -// * Tag redacted document in ingest metadata {es-pull}113552[#113552] -// * Verify Maxmind database types in the geoip processor {es-pull}114527[#114527] -// -// Logs:: -// * Add validation for synthetic source mode in logs mode indices {es-pull}110677[#110677] -// * Store original source for keywords using a normalizer {es-pull}112151[#112151] -// -// Machine Learning:: -// * Add Completion Inference API for Alibaba Cloud AI Search Model {es-pull}112512[#112512] -// * Add DeBERTa-V2/V3 tokenizer {es-pull}111852[#111852] -// * Add Streaming Inference spec {es-pull}113812[#113812] -// * Add chunking settings configuration to `CohereService,` `AmazonBedrockService,` and `AzureOpenAiService` {es-pull}113897[#113897] -// * Add chunking settings configuration to `ElasticsearchService/ELSER` {es-pull}114429[#114429] -// * Add custom rule parameters to force time shift {es-pull}110974[#110974] -// * Adding chunking settings to `GoogleVertexAiService,` `AzureAiStudioService,` and `AlibabaCloudSearchService` {es-pull}113981[#113981] -// * Adding chunking settings to `MistralService,` `GoogleAiStudioService,` and `HuggingFaceService` {es-pull}113623[#113623] -// * Adds a new Inference API for streaming responses back to the user. {es-pull}113158[#113158] -// * Create `StreamingHttpResultPublisher` {es-pull}112026[#112026] -// * Create an ml node inference endpoint referencing an existing model {es-pull}114750[#114750] -// * Default inference endpoint for ELSER {es-pull}113873[#113873] -// * Default inference endpoint for the multilingual-e5-small model {es-pull}114683[#114683] -// * Enable OpenAI Streaming {es-pull}113911[#113911] -// * Filter empty task settings objects from the API response {es-pull}114389[#114389] -// * Increase default `queue_capacity` to 10_000 and decrease max `queue_capacity` to 100_000 {es-pull}115041[#115041] -// * Migrate Inference to `ChunkedToXContent` {es-pull}111655[#111655] -// * Register Task while Streaming {es-pull}112369[#112369] -// * Server-Sent Events for Inference response {es-pull}112565[#112565] -// * Stream Anthropic Completion {es-pull}114321[#114321] -// * Stream Azure Completion {es-pull}114464[#114464] -// * Stream Bedrock Completion {es-pull}114732[#114732] -// * Stream Cohere Completion {es-pull}114080[#114080] -// * Stream Google Completion {es-pull}114596[#114596] -// * Stream OpenAI Completion {es-pull}112677[#112677] -// * Support sparse embedding models in the elasticsearch inference service {es-pull}112270[#112270] -// * Switch default chunking strategy to sentence {es-pull}114453[#114453] -// * Upgrade to AWS SDK v2 {es-pull}114309[#114309] (issue: {es-issue}110590[#110590]) -// * Use the same chunking configurations for models in the Elasticsearch service {es-pull}111336[#111336] -// * Validate streaming HTTP Response {es-pull}112481[#112481] -// * Wait for allocation on scale up {es-pull}114719[#114719] -// * [Inference API] Add Alibaba Cloud AI Search Model support to Inference API {es-pull}111181[#111181] -// * [Inference API] Add Docs for AlibabaCloud AI Search Support for the Inference API {es-pull}111181[#111181] -// * [Inference API] Introduce Update API to change some aspects of existing inference endpoints {es-pull}114457[#114457] -// * [Inference API] Prevent inference endpoints from being deleted if they are referenced by semantic text {es-pull}110399[#110399] -// * [Inference API] alibabacloud ai search service support chunk infer to support semantic_text field {es-pull}110399[#110399] -// -// Mapping:: -// * Add Field caps support for Semantic Text {es-pull}111809[#111809] -// * Add Lucene segment-level fields stats {es-pull}111123[#111123] -// * Add Search Inference ID To Semantic Text Mapping {es-pull}113051[#113051] -// * Add object param for keeping synthetic source {es-pull}113690[#113690] -// * Add support for multi-value dimensions {es-pull}112645[#112645] (issue: {es-issue}110387[#110387]) -// * Allow dimension fields to have multiple values in standard and logsdb index mode {es-pull}112345[#112345] (issues: {es-issue}112232[#112232], {es-issue}112239[#112239]) -// * Allow fields with dots in sparse vector field mapper {es-pull}111981[#111981] (issue: {es-issue}109118[#109118]) -// * Allow querying `index_mode` {es-pull}110676[#110676] -// * Configure keeping source in `FieldMapper` {es-pull}112706[#112706] -// * Control storing array source with index setting {es-pull}112397[#112397] -// * Introduce mode `subobjects=auto` for objects {es-pull}110524[#110524] -// * Update `semantic_text` field to support indexing numeric and boolean data types {es-pull}111284[#111284] -// * Use ELSER By Default For Semantic Text {es-pull}113563[#113563] -// * Use fallback synthetic source for `copy_to` and doc_values: false cases {es-pull}112294[#112294] (issues: {es-issue}110753[#110753], {es-issue}110038[#110038], {es-issue}109546[#109546]) -// -// Network:: -// * Add links to network disconnect troubleshooting {es-pull}112330[#112330] -// -// Ranking:: -// * Add timeout and cancellation check to rescore phase {es-pull}115048[#115048] -// -// Recovery:: -// * Trigger merges after recovery {es-pull}113102[#113102] -// -// Relevance:: -// * Add a query rules tester API call {es-pull}114168[#114168] -// -// Search:: -// * Add initial support for `semantic_text` field type {es-pull}113920[#113920] -// * Add more `dense_vector` details for cluster stats field stats {es-pull}113607[#113607] -// * Add range and regexp Intervals {es-pull}111465[#111465] -// * Adding support for `allow_partial_search_results` in PIT {es-pull}111516[#111516] -// * Allow incubating Panama Vector in simdvec, and add vectorized `ipByteBin` {es-pull}112933[#112933] -// * Avoid using concurrent collector manager in `LuceneChangesSnapshot` {es-pull}113816[#113816] -// * Bool query early termination should also consider `must_not` clauses {es-pull}115031[#115031] -// * Deduplicate Kuromoji User Dictionary {es-pull}112768[#112768] -// * Multi term intervals: increase max_expansions {es-pull}112826[#112826] (issue: {es-issue}110491[#110491]) -// * Search coordinator uses `event.ingested` in cluster state to do rewrites {es-pull}111523[#111523] -// * Update cluster stats for retrievers {es-pull}114109[#114109] -// -// Security:: -// * (logger) change from error to warn for short circuiting user {es-pull}112895[#112895] -// * Add asset criticality indices for `kibana_system_user` {es-pull}113588[#113588] -// * Add tier preference to security index settings allowlist {es-pull}111818[#111818] -// * [Service Account] Add `AutoOps` account {es-pull}111316[#111316] -// -// Snapshot/Restore:: -// * Add `max_multipart_parts` setting to S3 repository {es-pull}113989[#113989] -// * Add support for Azure Managed Identity {es-pull}111344[#111344] -// * Add telemetry for repository usage {es-pull}112133[#112133] -// * Add workaround for missing shard gen blob {es-pull}112337[#112337] -// * Clean up dangling S3 multipart uploads {es-pull}111955[#111955] (issues: {es-issue}101169[#101169], {es-issue}44971[#44971]) -// * Execute shard snapshot tasks in shard-id order {es-pull}111576[#111576] (issue: {es-issue}108739[#108739]) -// * Include account name in Azure settings exceptions {es-pull}111274[#111274] -// * Introduce repository integrity verification API {es-pull}112348[#112348] (issue: {es-issue}52622[#52622]) -// * Retry `S3BlobContainer#getRegister` on all exceptions {es-pull}114813[#114813] -// * Track shard snapshot progress during node shutdown {es-pull}112567[#112567] -// -// Stats:: -// * Track search and fetch failure stats {es-pull}113988[#113988] -// -// TSDB:: -// * Add support for boolean dimensions {es-pull}111457[#111457] (issue: {es-issue}111338[#111338]) -// * Stop iterating over all fields to extract @timestamp value {es-pull}110603[#110603] (issue: {es-issue}92297[#92297]) -// * Support booleans in routing path {es-pull}111445[#111445] -// -// Vector Search:: -// * Dense vector field types updatable for int4 {es-pull}110928[#110928] -// * Use native scalar scorer for int8_flat index {es-pull}111071[#111071] -// -// [[feature-9.0.0]] -// [float] -// === New features -// -// Data streams:: -// * Introduce global retention in data stream lifecycle. {es-pull}111972[#111972] -// * X-pack/plugin/otel: introduce x-pack-otel plugin {es-pull}111091[#111091] -// -// ES|QL:: -// * Add ESQL match function {es-pull}113374[#113374] -// * ESQL: Add `MV_PSERIES_WEIGHTED_SUM` for score calculations used by security solution {es-pull}109017[#109017] -// * ESQL: Add async ID and `is_running` headers to ESQL async query {es-pull}111840[#111840] -// * ESQL: Add boolean support to Max and Min aggs {es-pull}110527[#110527] -// * ESQL: Add boolean support to TOP aggregation {es-pull}110718[#110718] -// * ESQL: Added `mv_percentile` function {es-pull}111749[#111749] (issue: {es-issue}111591[#111591]) -// * ESQL: INLINESTATS {es-pull}109583[#109583] (issue: {es-issue}107589[#107589]) -// * ESQL: Introduce per agg filter {es-pull}113735[#113735] -// * ESQL: Strings support for MAX and MIN aggregations {es-pull}111544[#111544] -// * ESQL: Support IP fields in MAX and MIN aggregations {es-pull}110921[#110921] -// * ESQL: TOP aggregation IP support {es-pull}111105[#111105] -// * ESQL: TOP support for strings {es-pull}113183[#113183] (issue: {es-issue}109849[#109849]) -// * ESQL: `mv_median_absolute_deviation` function {es-pull}112055[#112055] (issue: {es-issue}111590[#111590]) -// * Remove snapshot build restriction for match and qstr functions {es-pull}114482[#114482] -// * Search in ES|QL: Add MATCH operator {es-pull}110971[#110971] -// -// ILM+SLM:: -// * SLM Interval based scheduling {es-pull}110847[#110847] -// -// Inference:: -// * EIS integration {es-pull}111154[#111154] -// -// Ingest Node:: -// * Add a `terminate` ingest processor {es-pull}114157[#114157] (issue: {es-issue}110218[#110218]) -// -// Machine Learning:: -// * Inference autoscaling {es-pull}109667[#109667] -// * Telemetry for inference adaptive allocations {es-pull}110630[#110630] -// -// Relevance:: -// * [Query rules] Add `exclude` query rule type {es-pull}111420[#111420] -// -// Search:: -// * Async search: Add ID and "is running" http headers {es-pull}112431[#112431] (issue: {es-issue}109576[#109576]) -// * Cross-cluster search telemetry {es-pull}113825[#113825] -// -// Vector Search:: -// * Adding new bbq index types behind a feature flag {es-pull}114439[#114439] + +Aggregations:: +* Handle with `illegalArgumentExceptions` negative values in HDR percentile aggregations {es-pull}116174[#116174] (issue: {es-issue}115777[#115777]) + +Analysis:: +* Adjust analyze limit exception to be a `bad_request` {es-pull}116325[#116325] + +CCS:: +* Fix long metric deserialize & add - auto-resize needs to be set manually {es-pull}117105[#117105] (issue: {es-issue}116914[#116914]) + +CRUD:: +* Preserve thread context when waiting for segment generation in RTG {es-pull}114623[#114623] +* Standardize error code when bulk body is invalid {es-pull}114869[#114869] + +Data streams:: +* Add missing header in `put_data_lifecycle` rest-api-spec {es-pull}116292[#116292] + +EQL:: +* Don't use a `BytesStreamOutput` to copy keys in `BytesRefBlockHash` {es-pull}114819[#114819] (issue: {es-issue}114599[#114599]) + +ES|QL:: +* Added stricter range type checks and runtime warnings for ENRICH {es-pull}115091[#115091] (issues: {es-issue}107357[#107357], {es-issue}116799[#116799]) +* Don't return TEXT type for functions that take TEXT {es-pull}114334[#114334] (issues: {es-issue}111537[#111537], {es-issue}114333[#114333]) +* ESQL: Fix sorts containing `_source` {es-pull}116980[#116980] (issue: {es-issue}116659[#116659]) +* ESQL: fix the column position in errors {es-pull}117153[#117153] +* ES|QL: Fix stats by constant expression {es-pull}114899[#114899] +* Fix NPE in `EnrichLookupService` on mixed clusters with <8.14 versions {es-pull}116583[#116583] (issues: {es-issue}116529[#116529], {es-issue}116544[#116544]) +* Fix TDigestState.read CB leaks {es-pull}114303[#114303] (issue: {es-issue}114194[#114194]) +* Fixing remote ENRICH by pushing the Enrich inside `FragmentExec` {es-pull}114665[#114665] (issue: {es-issue}105095[#105095]) +* Use `SearchStats` instead of field.isAggregatable in data node planning {es-pull}115744[#115744] (issue: {es-issue}115737[#115737]) +* [ESQL] Fix Binary Comparisons on Date Nanos {es-pull}116346[#116346] +* [ES|QL] To_DatePeriod and To_TimeDuration return better error messages on `union_type` fields {es-pull}114934[#114934] + +Infra/CLI:: +* Fix NPE on plugin sync {es-pull}115640[#115640] (issue: {es-issue}114818[#114818]) + +Infra/Metrics:: +* Make `randomInstantBetween` always return value in range [minInstant, `maxInstant]` {es-pull}114177[#114177] + +Infra/REST API:: +* Fixed a `NullPointerException` in `_capabilities` API when the `path` parameter is null. {es-pull}113413[#113413] (issue: {es-issue}113413[#113413]) + +Infra/Settings:: +* Don't allow secure settings in YML config (109115) {es-pull}115779[#115779] (issue: {es-issue}109115[#109115]) + +Ingest Node:: +* Add warning headers for ingest pipelines containing special characters {es-pull}114837[#114837] (issue: {es-issue}104411[#104411]) +* Reducing error-level stack trace logging for normal events in `GeoIpDownloader` {es-pull}114924[#114924] + +Logs:: +* Always check if index mode is logsdb {es-pull}116922[#116922] +* Prohibit changes to index mode, source, and sort settings during resize {es-pull}115812[#115812] + +Machine Learning:: +* Fix bug in ML autoscaling when some node info is unavailable {es-pull}116650[#116650] +* Fix deberta tokenizer bug caused by bug in normalizer {es-pull}117189[#117189] +* Hides `hugging_face_elser` service from the `GET _inference/_services API` {es-pull}116664[#116664] (issue: {es-issue}116644[#116644]) +* Mitigate IOSession timeouts {es-pull}115414[#115414] (issues: {es-issue}114385[#114385], {es-issue}114327[#114327], {es-issue}114105[#114105], {es-issue}114232[#114232]) +* Propagate scoring function through random sampler {es-pull}116957[#116957] (issue: {es-issue}110134[#110134]) +* Update Deberta tokenizer {es-pull}116358[#116358] +* Wait for up to 2 seconds for yellow status before starting search {es-pull}115938[#115938] (issues: {es-issue}107777[#107777], {es-issue}105955[#105955], {es-issue}107815[#107815], {es-issue}112191[#112191]) + +Mapping:: +* Change synthetic source logic for `constant_keyword` {es-pull}117182[#117182] (issue: {es-issue}117083[#117083]) +* Ignore conflicting fields during dynamic mapping update {es-pull}114227[#114227] (issue: {es-issue}114228[#114228]) + +Network:: +* Use underlying `ByteBuf` `refCount` for `ReleasableBytesReference` {es-pull}116211[#116211] + +Ranking:: +* Propagating nested `inner_hits` to the parent compound retriever {es-pull}116408[#116408] (issue: {es-issue}116397[#116397]) + +Relevance:: +* Fix handling of bulk requests with semantic text fields and delete ops {es-pull}116942[#116942] + +Search:: +* Catch and handle disconnect exceptions in search {es-pull}115836[#115836] +* Fields caps does not honour ignore_unavailable {es-pull}116021[#116021] (issue: {es-issue}107767[#107767]) +* Fix handling of time exceeded exception in fetch phase {es-pull}116676[#116676] +* Fix leak in `DfsQueryPhase` and introduce search disconnect stress test {es-pull}116060[#116060] (issue: {es-issue}115056[#115056]) +* Inconsistency in the _analyzer api when the index is not included {es-pull}115930[#115930] +* Semantic text simple partial update {es-pull}116478[#116478] +* Updated Date Range to Follow Documentation When Assuming Missing Values {es-pull}112258[#112258] (issue: {es-issue}111484[#111484]) +* Validate missing shards after the coordinator rewrite {es-pull}116382[#116382] +* _validate does not honour ignore_unavailable {es-pull}116656[#116656] (issue: {es-issue}116594[#116594]) + +Snapshot/Restore:: +* Retry throttled snapshot deletions {es-pull}113237[#113237] + +Vector Search:: +* Update Semantic Query To Handle Zero Size Responses {es-pull}116277[#116277] (issue: {es-issue}116083[#116083]) + +Watcher:: +* Watch Next Run Interval Resets On Shard Move or Node Restart {es-pull}115102[#115102] (issue: {es-issue}111433[#111433]) + +[[deprecation-9.0.0]] +[float] +=== Deprecations + +Ingest Node:: +* Fix `_type` deprecation on simulate pipeline API {es-pull}116259[#116259] + +Machine Learning:: +* [Inference API] Deprecate elser service {es-pull}113216[#113216] + +Mapping:: +* Deprecate `_source.mode` in mappings {es-pull}116689[#116689] + +[[enhancement-9.0.0]] +[float] +=== Enhancements + +Allocation:: +* Only publish desired balance gauges on master {es-pull}115383[#115383] + +Authorization:: +* Add a `monitor_stats` privilege and allow that privilege for remote cluster privileges {es-pull}114964[#114964] +* [Security Solution] Add `create_index` to `kibana_system` role for index/DS `.logs-endpoint.action.responses-*` {es-pull}115241[#115241] + +CRUD:: +* Suppress merge-on-recovery for older indices {es-pull}113462[#113462] + +Data streams:: +* Adding a deprecation info API warning for data streams with old indices {es-pull}116447[#116447] +* Apm-data: disable date_detection for all apm data streams {es-pull}116995[#116995] + +Distributed:: +* Metrics for incremental bulk splits {es-pull}116765[#116765] +* Use Azure blob batch API to delete blobs in batches {es-pull}114566[#114566] + +ES|QL:: +* Add ES|QL `bit_length` function {es-pull}115792[#115792] +* ESQL: Honor skip_unavailable setting for nonmatching indices errors at planning time {es-pull}116348[#116348] (issue: {es-issue}114531[#114531]) +* ESQL: Remove parent from `FieldAttribute` {es-pull}112881[#112881] +* ESQL: extract common filter from aggs {es-pull}115678[#115678] +* ESQL: optimise aggregations filtered by false/null into evals {es-pull}115858[#115858] +* ES|QL CCS uses `skip_unavailable` setting for handling disconnected remote clusters {es-pull}115266[#115266] (issue: {es-issue}114531[#114531]) +* ES|QL: add metrics for functions {es-pull}114620[#114620] +* Esql Enable Date Nanos (tech preview) {es-pull}117080[#117080] +* Support partial sort fields in TopN pushdown {es-pull}116043[#116043] (issue: {es-issue}114515[#114515]) +* [ES|QL] Implicit casting string literal to intervals {es-pull}115814[#115814] (issue: {es-issue}115352[#115352]) + +Health:: +* Increase `replica_unassigned_buffer_time` default from 3s to 5s {es-pull}112834[#112834] + +Indices APIs:: +* Ensure class resource stream is closed in `ResourceUtils` {es-pull}116437[#116437] + +Inference:: +* Add version prefix to Inference Service API path {es-pull}117095[#117095] + +Infra/Circuit Breakers:: +* Add link to Circuit Breaker "Data too large" exception message {es-pull}113561[#113561] + +Infra/Core:: +* Support for unsigned 64 bit numbers in Cpu stats {es-pull}114681[#114681] (issue: {es-issue}112274[#112274]) + +Infra/Metrics:: +* Add `ensureGreen` test method for use with `adminClient` {es-pull}113425[#113425] + +Infra/Scripting:: +* Add a `mustache.max_output_size_bytes` setting to limit the length of results from mustache scripts {es-pull}114002[#114002] + +Ingest Node:: +* Add postal_code support to the City and Enterprise databases {es-pull}114193[#114193] +* Add support for registered country fields for maxmind geoip databases {es-pull}114521[#114521] +* Adding support for additional mapping to simulate ingest API {es-pull}114742[#114742] +* Adding support for simulate ingest mapping adddition for indices with mappings that do not come from templates {es-pull}115359[#115359] +* Support IPinfo database configurations {es-pull}114548[#114548] +* Support more maxmind fields in the geoip processor {es-pull}114268[#114268] + +Logs:: +* Add logsdb telemetry {es-pull}115994[#115994] +* Add num docs and size to logsdb telemetry {es-pull}116128[#116128] +* Feature: re-structure document ID generation favoring _id inverted index compression {es-pull}104683[#104683] + +Machine Learning:: +* Add DeBERTa-V2/V3 tokenizer {es-pull}111852[#111852] +* Add special case for elastic reranker in inference API {es-pull}116962[#116962] +* Adding inference endpoint validation for `AzureAiStudioService` {es-pull}113713[#113713] +* Adds support for `input_type` field to Vertex inference service {es-pull}116431[#116431] +* Enable built-in Inference Endpoints and default for Semantic Text {es-pull}116931[#116931] +* Increase default `queue_capacity` to 10_000 and decrease max `queue_capacity` to 100_000 {es-pull}115041[#115041] +* Inference duration and error metrics {es-pull}115876[#115876] +* Remove all mentions of eis and gateway and deprecate flags that do {es-pull}116692[#116692] +* [Inference API] Add API to get configuration of inference services {es-pull}114862[#114862] +* [Inference API] Improve chunked results error message {es-pull}115807[#115807] + +Network:: +* Allow http unsafe buffers by default {es-pull}116115[#116115] + +Recovery:: +* Attempt to clean up index before remote transfer {es-pull}115142[#115142] (issue: {es-issue}104473[#104473]) +* Trigger merges after recovery {es-pull}113102[#113102] + +Reindex:: +* Change Reindexing metrics unit from millis to seconds {es-pull}115721[#115721] + +Relevance:: +* Add query rules retriever {es-pull}114855[#114855] +* Add tracking for query rule types {es-pull}116357[#116357] + +Search:: +* Add Search Phase APM metrics {es-pull}113194[#113194] +* Add `docvalue_fields` Support for `dense_vector` Fields {es-pull}114484[#114484] (issue: {es-issue}108470[#108470]) +* Add initial support for `semantic_text` field type {es-pull}113920[#113920] +* Adds access to flags no_sub_matches and no_overlapping_matches to hyphenation-decompounder-tokenfilter {es-pull}115459[#115459] (issue: {es-issue}97849[#97849]) +* Better sizing `BytesRef` for Strings in Queries {es-pull}115655[#115655] +* Enable `_tier` based coordinator rewrites for all indices (not just mounted indices) {es-pull}115797[#115797] +* Only aggregations require at least one shard request {es-pull}115314[#115314] + +Security:: +* Add refresh `.security` index call between security migrations {es-pull}114879[#114879] + +Snapshot/Restore:: +* Improve message about insecure S3 settings {es-pull}116915[#116915] +* Retry `S3BlobContainer#getRegister` on all exceptions {es-pull}114813[#114813] +* Split searchable snapshot into multiple repo operations {es-pull}116918[#116918] +* Track shard snapshot progress during node shutdown {es-pull}112567[#112567] + +Vector Search:: +* Add support for bitwise inner-product in painless {es-pull}116082[#116082] + +[[feature-9.0.0]] +[float] +=== New features + +Data streams:: +* Add default ILM policies and switch to ILM for apm-data plugin {es-pull}115687[#115687] + +ES|QL:: +* Add support for `BYTE_LENGTH` scalar function {es-pull}116591[#116591] +* Esql/lookup join grammar {es-pull}116515[#116515] +* Remove snapshot build restriction for match and qstr functions {es-pull}114482[#114482] + +Search:: +* ESQL - Add match operator (:) {es-pull}116819[#116819] [[upgrade-9.0.0]] [float] === Upgrades -// -// Infra/Core:: -// * Upgrade xcontent to Jackson 2.17.0 {es-pull}111948[#111948] -// * Upgrade xcontent to Jackson 2.17.2 {es-pull}112320[#112320] -// -// Infra/Metrics:: -// * Update APM Java Agent to support JDK 23 {es-pull}115194[#115194] (issues: {es-issue}115101[#115101], {es-issue}115100[#115100]) -// -// Search:: -// * Upgrade to Lucene 10 {es-pull}114741[#114741] -// * Upgrade to Lucene 9.12 {es-pull}113333[#113333] -// -// Snapshot/Restore:: -// * Upgrade Azure SDK {es-pull}111225[#111225] -// * Upgrade `repository-azure` dependencies {es-pull}112277[#112277] + +Search:: +* Upgrade to Lucene 10 {es-pull}114741[#114741] diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index edecd4f727583..b87081639c684 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -1,6 +1,3 @@ -// THIS IS A GENERATED FILE. DO NOT EDIT DIRECTLY. -// The content generated here are is not correct and most has been manually commented out until it can be fixed. -// See ES-9931 for more details. [[release-highlights]] == What's new in {minor-version} @@ -12,163 +9,14 @@ For detailed information about this release, see the <> and <>. endif::[] -// -// // tag::notable-highlights[] -// -// [discrete] -// [[esql_inlinestats]] -// === ESQL: INLINESTATS -// This adds the `INLINESTATS` command to ESQL which performs a STATS and -// then enriches the results into the output stream. So, this query: -// -// [source,esql] -// ---- -// FROM test -// | INLINESTATS m=MAX(a * b) BY b -// | WHERE m == a * b -// | SORT a DESC, b DESC -// | LIMIT 3 -// ---- -// -// Produces output like: -// -// | a | b | m | -// | --- | --- | ----- | -// | 99 | 999 | 98901 | -// | 99 | 998 | 98802 | -// | 99 | 997 | 98703 | -// -// {es-pull}109583[#109583] -// -// [discrete] -// [[always_allow_rebalancing_by_default]] -// === Always allow rebalancing by default -// In earlier versions of {es} the `cluster.routing.allocation.allow_rebalance` setting defaults to -// `indices_all_active` which blocks all rebalancing moves while the cluster is in `yellow` or `red` health. This was -// appropriate for the legacy allocator which might do too many rebalancing moves otherwise. Today's allocator has -// better support for rebalancing a cluster that is not in `green` health, and expects to be able to rebalance some -// shards away from over-full nodes to avoid allocating shards to undesirable locations in the first place. From -// version 8.16 `allow_rebalance` setting defaults to `always` unless the legacy allocator is explicitly enabled. -// -// {es-pull}111015[#111015] -// -// [discrete] -// [[add_global_retention_in_data_stream_lifecycle]] -// === Add global retention in data stream lifecycle -// Data stream lifecycle now supports configuring retention on a cluster level, -// namely global retention. Global retention \nallows us to configure two different -// retentions: -// -// - `data_streams.lifecycle.retention.default` is applied to all data streams managed -// by the data stream lifecycle that do not have retention defined on the data stream level. -// - `data_streams.lifecycle.retention.max` is applied to all data streams managed by the -// data stream lifecycle and it allows any data stream \ndata to be deleted after the `max_retention` has passed. -// -// {es-pull}111972[#111972] -// -// [discrete] -// [[enable_zstandard_compression_for_indices_with_index_codec_set_to_best_compression]] -// === Enable ZStandard compression for indices with index.codec set to best_compression -// Before DEFLATE compression was used to compress stored fields in indices with index.codec index setting set to -// best_compression, with this change ZStandard is used as compression algorithm to stored fields for indices with -// index.codec index setting set to best_compression. The usage ZStandard results in less storage usage with a -// similar indexing throughput depending on what options are used. Experiments with indexing logs have shown that -// ZStandard offers ~12% lower storage usage and a ~14% higher indexing throughput compared to DEFLATE. -// -// {es-pull}112665[#112665] -// -// [discrete] -// [[esql_introduce_per_agg_filter]] -// === ESQL: Introduce per agg filter -// Add support for aggregation scoped filters that work dynamically on the -// data in each group. -// -// [source,esql] -// ---- -// | STATS success = COUNT(*) WHERE 200 <= code AND code < 300, -// redirect = COUNT(*) WHERE 300 <= code AND code < 400, -// client_err = COUNT(*) WHERE 400 <= code AND code < 500, -// server_err = COUNT(*) WHERE 500 <= code AND code < 600, -// total_count = COUNT(*) -// ---- -// -// Implementation wise, the base AggregateFunction has been extended to -// allow a filter to be passed on. This is required to incorporate the -// filter as part of the aggregate equality/identity which would fail with -// the filter as an external component. -// As part of the process, the serialization for the existing aggregations -// had to be fixed so AggregateFunction implementations so that it -// delegates to their parent first. -// -// {es-pull}113735[#113735] -// -// // end::notable-highlights[] -// -// -// [discrete] -// [[esql_multi_value_fields_supported_in_geospatial_predicates]] -// === ESQL: Multi-value fields supported in Geospatial predicates -// Supporting multi-value fields in `WHERE` predicates is a challenge due to not knowing whether `ALL` or `ANY` -// of the values in the field should pass the predicate. -// For example, should the field `age:[10,30]` pass the predicate `WHERE age>20` or not? -// This ambiguity does not exist with the spatial predicates -// `ST_INTERSECTS` and `ST_DISJOINT`, because the choice between `ANY` or `ALL` -// is implied by the predicate itself. -// Consider a predicate checking a field named `location` against a test geometry named `shape`: -// -// * `ST_INTERSECTS(field, shape)` - true if `ANY` value can intersect the shape -// * `ST_DISJOINT(field, shape)` - true only if `ALL` values are disjoint from the shape -// -// This works even if the shape argument is itself a complex or compound geometry. -// -// Similar logic exists for `ST_CONTAINS` and `ST_WITHIN` predicates, but these are not as easily solved -// with `ANY` or `ALL`, because a collection of geometries contains another collection if each of the contained -// geometries is within at least one of the containing geometries. Evaluating this requires that the multi-value -// field is first combined into a single geometry before performing the predicate check. -// -// * `ST_CONTAINS(field, shape)` - true if the combined geometry contains the shape -// * `ST_WITHIN(field, shape)` - true if the combined geometry is within the shape -// -// {es-pull}112063[#112063] -// -// [discrete] -// [[enhance_sort_push_down_to_lucene_to_cover_references_to_fields_st_distance_function]] -// === Enhance SORT push-down to Lucene to cover references to fields and ST_DISTANCE function -// The most used and likely most valuable geospatial search query in Elasticsearch is the sorted proximity search, -// finding items within a certain distance of a point of interest and sorting the results by distance. -// This has been possible in ES|QL since 8.15.0, but the sorting was done in-memory, not pushed down to Lucene. -// Now the sorting is pushed down to Lucene, which results in a significant performance improvement. -// -// Queries that perform both filtering and sorting on distance are supported. For example: -// -// [source,esql] -// ---- -// FROM test -// | EVAL distance = ST_DISTANCE(location, TO_GEOPOINT("POINT(37.7749, -122.4194)")) -// | WHERE distance < 1000000 -// | SORT distance ASC, name DESC -// | LIMIT 10 -// ---- -// -// In addition, the support for sorting on EVAL expressions has been extended to cover references to fields: -// -// [source,esql] -// ---- -// FROM test -// | EVAL ref = field -// | SORT ref ASC -// | LIMIT 10 -// ---- -// -// {es-pull}112938[#112938] -// + +// The notable-highlights tag marks entries that +// should be featured in the Stack Installation and Upgrade Guide: +// tag::notable-highlights[] // [discrete] -// [[cross_cluster_search_telemetry]] -// === Cross-cluster search telemetry -// The cross-cluster search telemetry is collected when cross-cluster searches -// are performed, and is returned as "ccs" field in `_cluster/stats` output. -// It also add a new parameter `include_remotes=true` to the `_cluster/stats` API -// which will collect data from connected remote clusters. +// === Heading // -// {es-pull}113825[#113825] +// Description. +// end::notable-highlights[] + From 573b8a9252e55e24c1c34e9e734b37aafd077e83 Mon Sep 17 00:00:00 2001 From: Dan Rubinstein Date: Thu, 21 Nov 2024 14:47:24 -0500 Subject: [PATCH 149/386] Adding chunking settings to IbmWatsonxService (#114914) * Adding chunking settings to IbmWatsonxService * Removing feature flag * Update docs/changelog/114914.yaml --------- Co-authored-by: Elastic Machine --- docs/changelog/114914.yaml | 5 + .../ibmwatsonx/IbmWatsonxService.java | 29 ++- .../embeddings/IbmWatsonxEmbeddingsModel.java | 6 +- .../ibmwatsonx/IbmWatsonxServiceTests.java | 173 +++++++++++++++++- .../IbmWatsonxEmbeddingsModelTests.java | 1 + 5 files changed, 211 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/114914.yaml diff --git a/docs/changelog/114914.yaml b/docs/changelog/114914.yaml new file mode 100644 index 0000000000000..bad13e26682dc --- /dev/null +++ b/docs/changelog/114914.yaml @@ -0,0 +1,5 @@ +pr: 114914 +summary: Adding chunking settings to `IbmWatsonxService` +area: Machine Learning +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java index e960b0b777f2b..f4f4605c667c3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java @@ -16,6 +16,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; @@ -30,6 +31,7 @@ import org.elasticsearch.inference.configuration.SettingsConfigurationDisplayType; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.ibmwatsonx.IbmWatsonxActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; @@ -86,11 +88,19 @@ public void parseRequestConfig( Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap( + removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS) + ); + } + IbmWatsonxModel model = createModel( inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, serviceSettingsMap, TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), ConfigurationParseContext.REQUEST @@ -112,6 +122,7 @@ private static IbmWatsonxModel createModel( TaskType taskType, Map serviceSettings, Map taskSettings, + ChunkingSettings chunkingSettings, @Nullable Map secretSettings, String failureMessage, ConfigurationParseContext context @@ -123,6 +134,7 @@ private static IbmWatsonxModel createModel( NAME, serviceSettings, taskSettings, + chunkingSettings, secretSettings, context ); @@ -141,11 +153,17 @@ public IbmWatsonxModel parsePersistedConfigWithSecrets( Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); Map secretSettingsMap = removeFromMapOrDefaultEmpty(secrets, ModelSecrets.SECRET_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + } + return createModelFromPersistent( inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, secretSettingsMap, parsePersistedConfigErrorMsg(inferenceEntityId, NAME) ); @@ -166,6 +184,7 @@ private static IbmWatsonxModel createModelFromPersistent( TaskType taskType, Map serviceSettings, Map taskSettings, + ChunkingSettings chunkingSettings, Map secretSettings, String failureMessage ) { @@ -174,6 +193,7 @@ private static IbmWatsonxModel createModelFromPersistent( taskType, serviceSettings, taskSettings, + chunkingSettings, secretSettings, failureMessage, ConfigurationParseContext.PERSISTENT @@ -185,11 +205,17 @@ public Model parsePersistedConfig(String inferenceEntityId, TaskType taskType, M Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + } + return createModelFromPersistent( inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, null, parsePersistedConfigErrorMsg(inferenceEntityId, NAME) ); @@ -266,7 +292,8 @@ protected void doChunkedInfer( var batchedRequests = new EmbeddingRequestChunker( input.getInputs(), EMBEDDING_MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.FLOAT + EmbeddingRequestChunker.EmbeddingType.FLOAT, + model.getConfigurations().getChunkingSettings() ).batchRequestsWithListeners(listener); for (var request : batchedRequests) { var action = ibmWatsonxModel.accept(getActionCreator(getSender(), getServiceComponents()), taskSettings, inputType); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModel.java index d60e31b5d41c0..6b20e07ecc0a2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModel.java @@ -9,6 +9,7 @@ import org.apache.http.client.utils.URIBuilder; import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.ModelConfigurations; @@ -40,6 +41,7 @@ public IbmWatsonxEmbeddingsModel( String service, Map serviceSettings, Map taskSettings, + ChunkingSettings chunkingSettings, Map secrets, ConfigurationParseContext context ) { @@ -49,6 +51,7 @@ public IbmWatsonxEmbeddingsModel( service, IbmWatsonxEmbeddingsServiceSettings.fromMap(serviceSettings, context), EmptyTaskSettings.INSTANCE, + chunkingSettings, DefaultSecretSettings.fromMap(secrets) ); } @@ -64,10 +67,11 @@ public IbmWatsonxEmbeddingsModel(IbmWatsonxEmbeddingsModel model, IbmWatsonxEmbe String service, IbmWatsonxEmbeddingsServiceSettings serviceSettings, TaskSettings taskSettings, + ChunkingSettings chunkingsettings, @Nullable DefaultSecretSettings secrets ) { super( - new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings), + new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings, chunkingsettings), new ModelSecrets(secrets), serviceSettings ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java index d6c491f2b7cec..f7f37c5bcd15f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; @@ -69,6 +70,8 @@ import static org.elasticsearch.xpack.inference.Utils.getPersistedConfigMap; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests.createRandomChunkingSettings; +import static org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests.createRandomChunkingSettingsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; @@ -124,6 +127,7 @@ public void testParseRequestConfig_CreatesAIbmWatsonxEmbeddingsModel() throws IO assertThat(embeddingsModel.getServiceSettings().url(), is(URI.create(url))); assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is(apiKey)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); }, e -> fail("Model parsing should have succeeded, but failed: " + e.getMessage())); service.parseRequestConfig( @@ -150,6 +154,45 @@ public void testParseRequestConfig_CreatesAIbmWatsonxEmbeddingsModel() throws IO } } + public void testParseRequestConfig_CreatesAIbmWatsonxEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { + try (var service = createIbmWatsonxService()) { + ActionListener modelListener = ActionListener.wrap(model -> { + assertThat(model, instanceOf(IbmWatsonxEmbeddingsModel.class)); + + var embeddingsModel = (IbmWatsonxEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId)); + assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId)); + assertThat(embeddingsModel.getServiceSettings().url(), is(URI.create(url))); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is(apiKey)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + }, e -> fail("Model parsing should have succeeded, but failed: " + e.getMessage())); + + service.parseRequestConfig( + "id", + TaskType.TEXT_EMBEDDING, + getRequestConfigMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + IbmWatsonxServiceFields.PROJECT_ID, + projectId, + ServiceFields.URL, + url, + IbmWatsonxServiceFields.API_VERSION, + apiVersion + ) + ), + new HashMap<>(Map.of()), + createRandomChunkingSettingsMap(), + getSecretSettingsMap(apiKey) + ), + modelListener + ); + } + } + public void testParseRequestConfig_ThrowsUnsupportedModelType() throws IOException { try (var service = createIbmWatsonxService()) { var failureListener = getModelListenerForException( @@ -235,6 +278,47 @@ public void testParsePersistedConfigWithSecrets_CreatesAIbmWatsonxEmbeddingsMode assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); assertThat(embeddingsModel.getTaskSettings(), is(EmptyTaskSettings.INSTANCE)); assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is(apiKey)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + } + } + + public void testParsePersistedConfigWithSecrets_CreatesAIbmWatsonxEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { + try (var service = createIbmWatsonxService()) { + var persistedConfig = getPersistedConfigMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + IbmWatsonxServiceFields.PROJECT_ID, + projectId, + ServiceFields.URL, + url, + IbmWatsonxServiceFields.API_VERSION, + apiVersion + ) + ), + getTaskSettingsMapEmpty(), + createRandomChunkingSettingsMap(), + getSecretSettingsMap(apiKey) + ); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(IbmWatsonxEmbeddingsModel.class)); + + var embeddingsModel = (IbmWatsonxEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId)); + assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId)); + assertThat(embeddingsModel.getServiceSettings().url(), is(URI.create(url))); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); + assertThat(embeddingsModel.getTaskSettings(), is(EmptyTaskSettings.INSTANCE)); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is(apiKey)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); } } @@ -399,6 +483,73 @@ public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExists } } + public void testParsePersistedConfig_CreatesAIbmWatsonxEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { + try (var service = createIbmWatsonxService()) { + var persistedConfig = getPersistedConfigMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + IbmWatsonxServiceFields.PROJECT_ID, + projectId, + ServiceFields.URL, + url, + IbmWatsonxServiceFields.API_VERSION, + apiVersion + ) + ), + getTaskSettingsMapEmpty(), + null + ); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(IbmWatsonxEmbeddingsModel.class)); + + var embeddingsModel = (IbmWatsonxEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId)); + assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId)); + assertThat(embeddingsModel.getServiceSettings().url(), is(URI.create(url))); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); + assertThat(embeddingsModel.getTaskSettings(), is(EmptyTaskSettings.INSTANCE)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + } + } + + public void testParsePersistedConfig_CreatesAIbmWatsonxEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { + try (var service = createIbmWatsonxService()) { + var persistedConfig = getPersistedConfigMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + IbmWatsonxServiceFields.PROJECT_ID, + projectId, + ServiceFields.URL, + url, + IbmWatsonxServiceFields.API_VERSION, + apiVersion + ) + ), + getTaskSettingsMapEmpty(), + createRandomChunkingSettingsMap(), + null + ); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(IbmWatsonxEmbeddingsModel.class)); + + var embeddingsModel = (IbmWatsonxEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId)); + assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId)); + assertThat(embeddingsModel.getServiceSettings().url(), is(URI.create(url))); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); + assertThat(embeddingsModel.getTaskSettings(), is(EmptyTaskSettings.INSTANCE)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + } + } + public void testInfer_ThrowsErrorWhenModelIsNotIbmWatsonxModel() throws IOException { var sender = mock(Sender.class); @@ -488,7 +639,15 @@ public void testInfer_SendsEmbeddingsRequest() throws IOException { } } - public void testChunkedInfer_Batches() throws IOException { + public void testChunkedInfer_ChunkingSettingsNotSet() throws IOException { + testChunkedInfer_Batches(null); + } + + public void testChunkedInfer_ChunkingSettingsSet() throws IOException { + testChunkedInfer_Batches(createRandomChunkingSettings()); + } + + private void testChunkedInfer_Batches(ChunkingSettings chunkingSettings) throws IOException { var input = List.of("foo", "bar"); var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); @@ -878,6 +1037,18 @@ private static ActionListener getModelListenerForException(Class excep }); } + private Map getRequestConfigMap( + Map serviceSettings, + Map taskSettings, + Map chunkingSettings, + Map secretSettings + ) { + var requestConfigMap = getRequestConfigMap(serviceSettings, taskSettings, secretSettings); + requestConfigMap.put(ModelConfigurations.CHUNKING_SETTINGS, chunkingSettings); + + return requestConfigMap; + } + private Map getRequestConfigMap( Map serviceSettings, Map taskSettings, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModelTests.java index 93fd7e402a0de..33fcd752fbf30 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModelTests.java @@ -82,6 +82,7 @@ public static IbmWatsonxEmbeddingsModel createModel( null ), EmptyTaskSettings.INSTANCE, + null, new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) ); } From 6d963d324aa71a514cd4baa01bd0805eaba7426e Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Thu, 21 Nov 2024 12:50:32 -0800 Subject: [PATCH 150/386] Limit thread queue during init in ExchangeSource (#117273) ES|QL doesn't work well with 500 clusters or clusters with 500 nodes. The reason is that we enqueue three tasks to the thread pool queue, which has a limit of 1000, during the initialization of the exchange for each target (cluster or node). This simple PR reduces it to one task. I'm considering using AsyncProcessor for these requests, but that will be a follow-up issue for later. --- .../exchange/ExchangeSourceHandler.java | 23 ++++++++++--------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java index e3fc0e26e34e0..4baaf9ad89bd6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java @@ -220,20 +220,21 @@ void onSinkComplete() { * @see ExchangeSinkHandler#fetchPageAsync(boolean, ActionListener) */ public void addRemoteSink(RemoteSink remoteSink, int instances) { - for (int i = 0; i < instances; i++) { - var fetcher = new RemoteSinkFetcher(remoteSink); - fetchExecutor.execute(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - fetcher.onSinkFailed(e); - } + fetchExecutor.execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + failure.unwrapAndCollect(e); + buffer.waitForReading().listener().onResponse(null); // resume the Driver if it is being blocked on reading + } - @Override - protected void doRun() { + @Override + protected void doRun() { + for (int i = 0; i < instances; i++) { + var fetcher = new RemoteSinkFetcher(remoteSink); fetcher.fetchPage(); } - }); - } + } + }); } /** From 8e6e087ecc3ec883430b152622deab728f3f64bd Mon Sep 17 00:00:00 2001 From: Ioana Tagirta Date: Thu, 21 Nov 2024 22:09:03 +0100 Subject: [PATCH 151/386] Remove StringQueryPredicate (#117134) * Remove StringQueryPredicate * Fix tests --- .../fulltext/StringQueryPredicate.java | 62 ------------------- .../core/planner/ExpressionTranslators.java | 14 ----- .../core/querydsl/query/QueryStringQuery.java | 16 ++--- .../querydsl/query/QueryStringQueryTests.java | 20 +++--- .../function/fulltext/FullTextWritables.java | 9 +-- .../physical/local/PushFiltersToSource.java | 3 - .../planner/EsqlExpressionTranslators.java | 3 +- .../StringQuerySerializationTests.java | 34 ---------- 8 files changed, 15 insertions(+), 146 deletions(-) delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/StringQueryPredicate.java delete mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/fulltext/StringQuerySerializationTests.java diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/StringQueryPredicate.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/StringQueryPredicate.java deleted file mode 100644 index 95000a5364e12..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/StringQueryPredicate.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.fulltext; - -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -import static java.util.Collections.emptyList; - -public final class StringQueryPredicate extends FullTextPredicate { - - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( - Expression.class, - "StringQueryPredicate", - StringQueryPredicate::new - ); - - private final Map fields; - - public StringQueryPredicate(Source source, String query, String options) { - super(source, query, options, emptyList()); - - // inferred - this.fields = FullTextUtils.parseFields(optionMap(), source); - } - - StringQueryPredicate(StreamInput in) throws IOException { - super(in); - assert super.children().isEmpty(); - this.fields = FullTextUtils.parseFields(optionMap(), source()); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, StringQueryPredicate::new, query(), options()); - } - - @Override - public Expression replaceChildren(List newChildren) { - throw new UnsupportedOperationException("this type of node doesn't have any children to replace"); - } - - public Map fields() { - return fields; - } - - @Override - public String getWriteableName() { - return ENTRY.name; - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java index b6383fac33299..7836522c77130 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java @@ -12,7 +12,6 @@ import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MultiMatchQueryPredicate; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; @@ -26,7 +25,6 @@ import org.elasticsearch.xpack.esql.core.querydsl.query.MultiMatchQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.NotQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.Query; -import org.elasticsearch.xpack.esql.core.querydsl.query.QueryStringQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.RegexQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.WildcardQuery; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -73,18 +71,6 @@ private static Query translateField(RegexMatch e, String targetFieldName) { } } - public static class StringQueries extends ExpressionTranslator { - - @Override - protected Query asQuery(StringQueryPredicate q, TranslatorHandler handler) { - return doTranslate(q, handler); - } - - public static Query doTranslate(StringQueryPredicate q, TranslatorHandler handler) { - return new QueryStringQuery(q.source(), q.query(), q.fields(), q); - } - } - public static class MultiMatches extends ExpressionTranslator { @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQuery.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQuery.java index 8ac90e6314174..8dcb87749ae48 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQuery.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQuery.java @@ -14,7 +14,6 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryStringQueryBuilder; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.tree.Source; import java.util.Collections; @@ -55,20 +54,13 @@ public class QueryStringQuery extends Query { private final String query; private final Map fields; - private StringQueryPredicate predicate; private final Map options; - // dedicated constructor for QueryTranslator - public QueryStringQuery(Source source, String query, String fieldName) { - this(source, query, Collections.singletonMap(fieldName, Float.valueOf(1.0f)), null); - } - - public QueryStringQuery(Source source, String query, Map fields, StringQueryPredicate predicate) { + public QueryStringQuery(Source source, String query, Map fields, Map options) { super(source); this.query = query; this.fields = fields; - this.predicate = predicate; - this.options = predicate == null ? Collections.emptyMap() : predicate.optionMap(); + this.options = options == null ? Collections.emptyMap() : options; } @Override @@ -95,7 +87,7 @@ public String query() { @Override public int hashCode() { - return Objects.hash(query, fields, predicate); + return Objects.hash(query, fields); } @Override @@ -109,7 +101,7 @@ public boolean equals(Object obj) { } QueryStringQuery other = (QueryStringQuery) obj; - return Objects.equals(query, other.query) && Objects.equals(fields, other.fields) && Objects.equals(predicate, other.predicate); + return Objects.equals(query, other.query) && Objects.equals(fields, other.fields) && Objects.equals(options, other.options); } @Override diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQueryTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQueryTests.java index 0f80011961092..22e7b93e84ce1 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQueryTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQueryTests.java @@ -10,42 +10,40 @@ import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.util.StringUtils; import java.util.Collections; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; public class QueryStringQueryTests extends ESTestCase { public void testQueryBuilding() { - QueryStringQueryBuilder qb = getBuilder("lenient=true"); + QueryStringQueryBuilder qb = getBuilder(Map.of("lenient", "true")); assertThat(qb.lenient(), equalTo(true)); - qb = getBuilder("lenient=true;default_operator=AND"); + qb = getBuilder(Map.of("lenient", "true", "default_operator", "AND")); assertThat(qb.lenient(), equalTo(true)); assertThat(qb.defaultOperator(), equalTo(Operator.AND)); - Exception e = expectThrows(IllegalArgumentException.class, () -> getBuilder("pizza=yummy")); + Exception e = expectThrows(IllegalArgumentException.class, () -> getBuilder(Map.of("pizza", "yummy"))); assertThat(e.getMessage(), equalTo("illegal query_string option [pizza]")); - e = expectThrows(ElasticsearchParseException.class, () -> getBuilder("type=aoeu")); + e = expectThrows(ElasticsearchParseException.class, () -> getBuilder(Map.of("type", "aoeu"))); assertThat(e.getMessage(), equalTo("failed to parse [multi_match] query type [aoeu]. unknown type.")); } - private static QueryStringQueryBuilder getBuilder(String options) { + private static QueryStringQueryBuilder getBuilder(Map options) { final Source source = new Source(1, 1, StringUtils.EMPTY); - final StringQueryPredicate mmqp = new StringQueryPredicate(source, "eggplant", options); - final QueryStringQuery mmq = new QueryStringQuery(source, "eggplant", Collections.singletonMap("foo", 1.0f), mmqp); - return (QueryStringQueryBuilder) mmq.asBuilder(); + final QueryStringQuery query = new QueryStringQuery(source, "eggplant", Collections.singletonMap("foo", 1.0f), options); + return (QueryStringQueryBuilder) query.asBuilder(); } public void testToString() { final Source source = new Source(1, 1, StringUtils.EMPTY); - final StringQueryPredicate mmqp = new StringQueryPredicate(source, "eggplant", ""); - final QueryStringQuery mmq = new QueryStringQuery(source, "eggplant", Collections.singletonMap("foo", 1.0f), mmqp); + final QueryStringQuery mmq = new QueryStringQuery(source, "eggplant", Collections.singletonMap("foo", 1.0f), Map.of()); assertEquals("QueryStringQuery@1:2[{foo=1.0}:eggplant]", mmq.toString()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java index 7fdfb4b328869..d59c736783172 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java @@ -10,19 +10,12 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MatchQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MultiMatchQueryPredicate; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import java.util.List; public class FullTextWritables { public static List getNamedWriteables() { - return List.of( - MatchQueryPredicate.ENTRY, - MultiMatchQueryPredicate.ENTRY, - StringQueryPredicate.ENTRY, - QueryString.ENTRY, - Match.ENTRY - ); + return List.of(MatchQueryPredicate.ENTRY, MultiMatchQueryPredicate.ENTRY, QueryString.ENTRY, Match.ENTRY); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java index f01e7c4b1f3a6..9f574ee8005b2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.core.expression.predicate.Range; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; @@ -253,8 +252,6 @@ static boolean canPushToSource(Expression exp, LucenePushdownPredicates lucenePu && Expressions.foldable(cidrMatch.matches()); } else if (exp instanceof SpatialRelatesFunction spatial) { return canPushSpatialFunctionToSource(spatial, lucenePushdownPredicates); - } else if (exp instanceof StringQueryPredicate) { - return true; } else if (exp instanceof QueryString) { return true; } else if (exp instanceof Match mf) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java index 12dc77e6e7c59..6fac7bab2bd80 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java @@ -86,7 +86,6 @@ public final class EsqlExpressionTranslators { new ExpressionTranslators.IsNotNulls(), new ExpressionTranslators.Nots(), new ExpressionTranslators.Likes(), - new ExpressionTranslators.StringQueries(), new ExpressionTranslators.MultiMatches(), new MatchFunctionTranslator(), new QueryStringFunctionTranslator(), @@ -536,7 +535,7 @@ protected Query asQuery(Match match, TranslatorHandler handler) { public static class QueryStringFunctionTranslator extends ExpressionTranslator { @Override protected Query asQuery(QueryString queryString, TranslatorHandler handler) { - return new QueryStringQuery(queryString.source(), queryString.queryAsText(), Map.of(), null); + return new QueryStringQuery(queryString.source(), queryString.queryAsText(), Map.of(), Map.of()); } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/fulltext/StringQuerySerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/fulltext/StringQuerySerializationTests.java deleted file mode 100644 index ff00a161e1bb1..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/fulltext/StringQuerySerializationTests.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.expression.predicate.operator.fulltext; - -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; - -import java.io.IOException; - -public class StringQuerySerializationTests extends AbstractFulltextSerializationTests { - - private static final String COMMA = ","; - - @Override - protected final StringQueryPredicate createTestInstance() { - return new StringQueryPredicate(randomSource(), randomAlphaOfLength(randomIntBetween(1, 16)), randomOptionOrNull()); - } - - @Override - protected StringQueryPredicate mutateInstance(StringQueryPredicate instance) throws IOException { - var query = instance.query(); - var options = instance.options(); - if (randomBoolean()) { - query = randomValueOtherThan(query, () -> randomAlphaOfLength(randomIntBetween(1, 16))); - } else { - options = randomValueOtherThan(options, this::randomOptionOrNull); - } - return new StringQueryPredicate(instance.source(), query, options); - } -} From a9451df21833b2ea38954ae99fe18eadbd2366d8 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 21 Nov 2024 13:11:04 -0800 Subject: [PATCH 152/386] Fix SecureSM to allow innocuous threads and threadgroups for parallel streams (#117277) When a parallel stream is opened, the jdk uses an internal fork join pool to do work on processing the stream. This pool is internal to the jdk, and so it should always be allowed to create threads. This commit modifies SecureSM to account for this innocuous thread group and threads. --- .../org/elasticsearch/secure_sm/SecureSM.java | 18 +++++++++++++++--- .../elasticsearch/secure_sm/SecureSMTests.java | 11 +++++++++++ 2 files changed, 26 insertions(+), 3 deletions(-) diff --git a/libs/secure-sm/src/main/java/org/elasticsearch/secure_sm/SecureSM.java b/libs/secure-sm/src/main/java/org/elasticsearch/secure_sm/SecureSM.java index 4fd471c529e75..02d0491118dc7 100644 --- a/libs/secure-sm/src/main/java/org/elasticsearch/secure_sm/SecureSM.java +++ b/libs/secure-sm/src/main/java/org/elasticsearch/secure_sm/SecureSM.java @@ -157,7 +157,9 @@ private static void debugThreadGroups(final ThreadGroup caller, final ThreadGrou // Returns true if the given thread is an instance of the JDK's InnocuousThread. private static boolean isInnocuousThread(Thread t) { final Class c = t.getClass(); - return c.getModule() == Object.class.getModule() && c.getName().equals("jdk.internal.misc.InnocuousThread"); + return c.getModule() == Object.class.getModule() + && (c.getName().equals("jdk.internal.misc.InnocuousThread") + || c.getName().equals("java.util.concurrent.ForkJoinWorkerThread$InnocuousForkJoinWorkerThread")); } protected void checkThreadAccess(Thread t) { @@ -184,11 +186,21 @@ protected void checkThreadAccess(Thread t) { private static final Permission MODIFY_THREADGROUP_PERMISSION = new RuntimePermission("modifyThreadGroup"); private static final Permission MODIFY_ARBITRARY_THREADGROUP_PERMISSION = new ThreadPermission("modifyArbitraryThreadGroup"); + // Returns true if the given thread is an instance of the JDK's InnocuousThread. + private static boolean isInnocuousThreadGroup(ThreadGroup t) { + final Class c = t.getClass(); + return c.getModule() == Object.class.getModule() && t.getName().equals("InnocuousForkJoinWorkerThreadGroup"); + } + protected void checkThreadGroupAccess(ThreadGroup g) { Objects.requireNonNull(g); + boolean targetThreadGroupIsInnocuous = isInnocuousThreadGroup(g); + // first, check if we can modify thread groups at all. - checkPermission(MODIFY_THREADGROUP_PERMISSION); + if (targetThreadGroupIsInnocuous == false) { + checkPermission(MODIFY_THREADGROUP_PERMISSION); + } // check the threadgroup, if its our thread group or an ancestor, its fine. final ThreadGroup source = Thread.currentThread().getThreadGroup(); @@ -196,7 +208,7 @@ protected void checkThreadGroupAccess(ThreadGroup g) { if (source == null) { return; // we are a dead thread, do nothing - } else if (source.parentOf(target) == false) { + } else if (source.parentOf(target) == false && targetThreadGroupIsInnocuous == false) { checkPermission(MODIFY_ARBITRARY_THREADGROUP_PERMISSION); } } diff --git a/libs/secure-sm/src/test/java/org/elasticsearch/secure_sm/SecureSMTests.java b/libs/secure-sm/src/test/java/org/elasticsearch/secure_sm/SecureSMTests.java index b94639414ffe5..69c6973f57cdf 100644 --- a/libs/secure-sm/src/test/java/org/elasticsearch/secure_sm/SecureSMTests.java +++ b/libs/secure-sm/src/test/java/org/elasticsearch/secure_sm/SecureSMTests.java @@ -14,7 +14,10 @@ import java.security.Permission; import java.security.Policy; import java.security.ProtectionDomain; +import java.util.ArrayList; +import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.stream.Collectors; /** Simple tests for SecureSM */ public class SecureSMTests extends TestCase { @@ -128,4 +131,12 @@ public void run() { t1.join(); assertTrue(interrupted1.get()); } + + public void testParallelStreamThreadGroup() throws Exception { + List list = new ArrayList<>(); + for (int i = 0; i < 100; ++i) { + list.add(i); + } + list.parallelStream().collect(Collectors.toSet()); + } } From ec644b10ec28b6eaaa5d658eedeac02dcb7bab5c Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Thu, 21 Nov 2024 16:17:22 -0500 Subject: [PATCH 153/386] Bump versions after 8.16.1 release --- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 6 +++--- .buildkite/pipelines/periodic.yml | 10 +++++----- .ci/bwcVersions | 2 +- .ci/snapshotBwcVersions | 2 +- server/src/main/java/org/elasticsearch/Version.java | 1 + .../resources/org/elasticsearch/TransportVersions.csv | 1 + .../org/elasticsearch/index/IndexVersions.csv | 1 + 8 files changed, 14 insertions(+), 11 deletions(-) diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 8935872fdec83..5be5990cfb203 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["8.16.1", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 2dbb7f5193af6..162a7e4995467 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -288,8 +288,8 @@ steps: env: BWC_VERSION: 8.15.4 - - label: "{{matrix.image}} / 8.16.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.1 + - label: "{{matrix.image}} / 8.16.2 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.2 timeout_in_minutes: 300 matrix: setup: @@ -302,7 +302,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.16.1 + BWC_VERSION: 8.16.2 - label: "{{matrix.image}} / 8.17.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.17.0 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 047e4a3f4f8f6..aa1db893df8cc 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -306,8 +306,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.16.1 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.16.1#bwcTest + - label: 8.16.2 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.16.2#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -316,7 +316,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.16.1 + BWC_VERSION: 8.16.2 retry: automatic: - exit_status: "-1" @@ -448,7 +448,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk21 - BWC_VERSION: ["8.16.1", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -490,7 +490,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 - BWC_VERSION: ["8.16.1", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index ac07e14c2a176..a8d6dda4fb0c2 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -15,7 +15,7 @@ BWC_VERSION: - "8.13.4" - "8.14.3" - "8.15.4" - - "8.16.1" + - "8.16.2" - "8.17.0" - "8.18.0" - "9.0.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 351c605e6e092..5514fc376a285 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,5 +1,5 @@ BWC_VERSION: - - "8.16.1" + - "8.16.2" - "8.17.0" - "8.18.0" - "9.0.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 40071b19af5d3..7b65547a7d591 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -189,6 +189,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_15_4 = new Version(8_15_04_99); public static final Version V_8_16_0 = new Version(8_16_00_99); public static final Version V_8_16_1 = new Version(8_16_01_99); + public static final Version V_8_16_2 = new Version(8_16_02_99); public static final Version V_8_17_0 = new Version(8_17_00_99); public static final Version V_8_18_0 = new Version(8_18_00_99); public static final Version V_9_0_0 = new Version(9_00_00_99); diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index ba575cc642a81..6191922f13094 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -133,3 +133,4 @@ 8.15.3,8702003 8.15.4,8702003 8.16.0,8772001 +8.16.1,8772004 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index c54aea88613f5..f84d69af727ac 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -133,3 +133,4 @@ 8.15.3,8512000 8.15.4,8512000 8.16.0,8518000 +8.16.1,8518000 From 8fe8d22f7c63d574d7570abcf21757926468b415 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 21 Nov 2024 14:02:18 -0800 Subject: [PATCH 154/386] [DOCS] Remove broken migration guide link (#117293) --- docs/reference/cluster/update-settings.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/cluster/update-settings.asciidoc b/docs/reference/cluster/update-settings.asciidoc index ca3d100e31e06..3d8bdcca07e2b 100644 --- a/docs/reference/cluster/update-settings.asciidoc +++ b/docs/reference/cluster/update-settings.asciidoc @@ -59,8 +59,8 @@ An example of a transient update: ==== We no longer recommend using transient cluster settings. Use persistent cluster settings instead. If a cluster becomes unstable, transient settings can clear -unexpectedly, resulting in a potentially undesired cluster configuration. See -the <>. +unexpectedly, resulting in a potentially undesired cluster configuration. +// See the <>. ==== // end::transient-settings-warning[] From 7768133f44421e13c40922dbf23ab47b8afdf46c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 22 Nov 2024 09:31:02 +1100 Subject: [PATCH 155/386] Mute org.elasticsearch.xpack.test.rest.XPackRestIT test {p0=snapshot/10_basic/Create a source only snapshot and then restore it} #117295 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index d1e1976262f55..01cc6d0355a59 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -250,6 +250,9 @@ tests: - class: org.elasticsearch.discovery.ClusterDisruptionIT method: testAckedIndexing issue: https://github.com/elastic/elasticsearch/issues/117024 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=snapshot/10_basic/Create a source only snapshot and then restore it} + issue: https://github.com/elastic/elasticsearch/issues/117295 # Examples: # From bead24880b9d6e8099c7c9a4043f5cee448ed7db Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 22 Nov 2024 09:39:10 +1100 Subject: [PATCH 156/386] Mute org.elasticsearch.xpack.searchablesnapshots.RetrySearchIntegTests testRetryPointInTime #117116 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 01cc6d0355a59..fae848c600aea 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -253,6 +253,9 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=snapshot/10_basic/Create a source only snapshot and then restore it} issue: https://github.com/elastic/elasticsearch/issues/117295 +- class: org.elasticsearch.xpack.searchablesnapshots.RetrySearchIntegTests + method: testRetryPointInTime + issue: https://github.com/elastic/elasticsearch/issues/117116 # Examples: # From de73397a05a982212b9f9bad0f5bea3c3bedabb0 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Thu, 21 Nov 2024 17:11:38 -0700 Subject: [PATCH 157/386] Add configurable timeout safe await method (#117296) Add a method for a configurable timeout with countdown latches. --- .../main/java/org/elasticsearch/test/ESTestCase.java | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index d98b51adce615..5b2beaee00bfe 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -2331,10 +2331,18 @@ public static void safeAwait(CyclicBarrier barrier) { * flag and asserting that the latch is indeed completed before the timeout. */ public static void safeAwait(CountDownLatch countDownLatch) { + safeAwait(countDownLatch, SAFE_AWAIT_TIMEOUT); + } + + /** + * Await on the given {@link CountDownLatch} with a supplied timeout, preserving the thread's interrupt status + * flag and asserting that the latch is indeed completed before the timeout. + */ + public static void safeAwait(CountDownLatch countDownLatch, TimeValue timeout) { try { assertTrue( "safeAwait: CountDownLatch did not reach zero within the timeout", - countDownLatch.await(SAFE_AWAIT_TIMEOUT.millis(), TimeUnit.MILLISECONDS) + countDownLatch.await(timeout.millis(), TimeUnit.MILLISECONDS) ); } catch (InterruptedException e) { Thread.currentThread().interrupt(); From eff0c42de91f0b8483c06035b10949632869b17d Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Fri, 22 Nov 2024 12:53:50 +1100 Subject: [PATCH 158/386] Fix and unmute OperatorPrivilegesIT (#117218) This updates the constants for `OperatorPrivilegesIT` to include the registered actions that were missing and unmutes the test --- muted-tests.yml | 3 --- .../org/elasticsearch/xpack/security/operator/Constants.java | 4 ++++ 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index fae848c600aea..b88bff86a0fbe 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -162,9 +162,6 @@ tests: - class: org.elasticsearch.xpack.deprecation.DeprecationHttpIT method: testDeprecatedSettingsReturnWarnings issue: https://github.com/elastic/elasticsearch/issues/108628 -- class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT - method: testEveryActionIsEitherOperatorOnlyOrNonOperator - issue: https://github.com/elastic/elasticsearch/issues/102992 - class: org.elasticsearch.action.search.SearchQueryThenFetchAsyncActionTests method: testBottomFieldSort issue: https://github.com/elastic/elasticsearch/issues/116249 diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index df97c489cc6b7..bfff63442281d 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -358,6 +358,7 @@ public class Constants { "cluster:monitor/nodes/data_tier_usage", "cluster:monitor/nodes/features", "cluster:monitor/nodes/hot_threads", + "cluster:monitor/nodes/index_mode_stats", "cluster:monitor/nodes/info", "cluster:monitor/nodes/stats", "cluster:monitor/nodes/usage", @@ -399,6 +400,7 @@ public class Constants { "cluster:monitor/xpack/info/frozen_indices", "cluster:monitor/xpack/info/graph", "cluster:monitor/xpack/info/ilm", + "cluster:monitor/xpack/info/logsdb", "cluster:monitor/xpack/info/logstash", "cluster:monitor/xpack/info/ml", "cluster:monitor/xpack/info/monitoring", @@ -463,6 +465,7 @@ public class Constants { "cluster:monitor/xpack/usage/health_api", "cluster:monitor/xpack/usage/ilm", "cluster:monitor/xpack/usage/inference", + "cluster:monitor/xpack/usage/logsdb", "cluster:monitor/xpack/usage/logstash", "cluster:monitor/xpack/usage/ml", "cluster:monitor/xpack/usage/monitoring", @@ -488,6 +491,7 @@ public class Constants { "indices:admin/block/add[s]", "indices:admin/cache/clear", "indices:admin/data_stream/lazy_rollover", + "indices:admin/data_stream/reindex", "indices:internal/admin/ccr/restore/file_chunk/get", "indices:internal/admin/ccr/restore/session/clear", "indices:internal/admin/ccr/restore/session/put", From d87a1a2b5f9a16a1bda030980ef831462bf3a686 Mon Sep 17 00:00:00 2001 From: Nick Tindall Date: Fri, 22 Nov 2024 13:33:06 +1100 Subject: [PATCH 159/386] Restore repo_name label to repository metrics (#117114) --- .../AzureBlobStoreRepositoryMetricsTests.java | 24 +++++++++++++------ .../azure/AzureBlobStoreRepositoryTests.java | 5 +++- .../s3/S3BlobStoreRepositoryTests.java | 5 +++- .../s3/S3RetryingInputStream.java | 2 ++ .../s3/S3BlobContainerRetriesTests.java | 2 +- .../repositories/RepositoriesMetrics.java | 11 ++++++++- 6 files changed, 38 insertions(+), 11 deletions(-) diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java index e049d4cd372e6..61940be247861 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java @@ -112,7 +112,7 @@ public void testThrottleResponsesAreCountedInMetrics() throws IOException { blobContainer.blobExists(purpose, blobName); // Correct metrics are recorded - metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES).expectMetrics() + metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES, repository).expectMetrics() .withRequests(numThrottles + 1) .withThrottles(numThrottles) .withExceptions(numThrottles) @@ -137,7 +137,7 @@ public void testRangeNotSatisfiedAreCountedInMetrics() throws IOException { assertThrows(RequestedRangeNotSatisfiedException.class, () -> blobContainer.readBlob(purpose, blobName)); // Correct metrics are recorded - metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB).expectMetrics() + metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB, repository).expectMetrics() .withRequests(1) .withThrottles(0) .withExceptions(1) @@ -170,7 +170,7 @@ public void testErrorResponsesAreCountedInMetrics() throws IOException { blobContainer.blobExists(purpose, blobName); // Correct metrics are recorded - metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES).expectMetrics() + metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES, repository).expectMetrics() .withRequests(numErrors + 1) .withThrottles(throttles.get()) .withExceptions(numErrors) @@ -191,7 +191,7 @@ public void testRequestFailuresAreCountedInMetrics() { assertThrows(IOException.class, () -> blobContainer.listBlobs(purpose)); // Correct metrics are recorded - metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.LIST_BLOBS).expectMetrics() + metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.LIST_BLOBS, repository).expectMetrics() .withRequests(4) .withThrottles(0) .withExceptions(4) @@ -322,14 +322,20 @@ private void clearMetrics(String discoveryNode) { .forEach(TestTelemetryPlugin::resetMeter); } - private MetricsAsserter metricsAsserter(String dataNodeName, OperationPurpose operationPurpose, AzureBlobStore.Operation operation) { - return new MetricsAsserter(dataNodeName, operationPurpose, operation); + private MetricsAsserter metricsAsserter( + String dataNodeName, + OperationPurpose operationPurpose, + AzureBlobStore.Operation operation, + String repository + ) { + return new MetricsAsserter(dataNodeName, operationPurpose, operation, repository); } private class MetricsAsserter { private final String dataNodeName; private final OperationPurpose purpose; private final AzureBlobStore.Operation operation; + private final String repository; enum Result { Success, @@ -355,10 +361,11 @@ List getMeasurements(TestTelemetryPlugin testTelemetryPlugin, Strin abstract List getMeasurements(TestTelemetryPlugin testTelemetryPlugin, String name); } - private MetricsAsserter(String dataNodeName, OperationPurpose purpose, AzureBlobStore.Operation operation) { + private MetricsAsserter(String dataNodeName, OperationPurpose purpose, AzureBlobStore.Operation operation, String repository) { this.dataNodeName = dataNodeName; this.purpose = purpose; this.operation = operation; + this.repository = repository; } private class Expectations { @@ -451,6 +458,7 @@ private void assertMatchingMetricRecorded(MetricType metricType, String metricNa .filter( m -> m.attributes().get("operation").equals(operation.getKey()) && m.attributes().get("purpose").equals(purpose.getKey()) + && m.attributes().get("repo_name").equals(repository) && m.attributes().get("repo_type").equals("azure") ) .findFirst() @@ -462,6 +470,8 @@ private void assertMatchingMetricRecorded(MetricType metricType, String metricNa + operation.getKey() + " and purpose=" + purpose.getKey() + + " and repo_name=" + + repository + " in " + measurements ) diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java index ab3f3ee4f3728..bd21f208faac4 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -402,7 +402,10 @@ public void testMetrics() throws Exception { ) ); metrics.forEach(metric -> { - assertThat(metric.attributes(), allOf(hasEntry("repo_type", AzureRepository.TYPE), hasKey("operation"), hasKey("purpose"))); + assertThat( + metric.attributes(), + allOf(hasEntry("repo_type", AzureRepository.TYPE), hasKey("repo_name"), hasKey("operation"), hasKey("purpose")) + ); final AzureBlobStore.Operation operation = AzureBlobStore.Operation.fromKey((String) metric.attributes().get("operation")); final AzureBlobStore.StatsKey statsKey = new AzureBlobStore.StatsKey( operation, diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index d9480abf21687..bb8a452e21771 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -300,7 +300,10 @@ public void testMetrics() throws Exception { ) ); metrics.forEach(metric -> { - assertThat(metric.attributes(), allOf(hasEntry("repo_type", S3Repository.TYPE), hasKey("operation"), hasKey("purpose"))); + assertThat( + metric.attributes(), + allOf(hasEntry("repo_type", S3Repository.TYPE), hasKey("repo_name"), hasKey("operation"), hasKey("purpose")) + ); final S3BlobStore.Operation operation = S3BlobStore.Operation.parse((String) metric.attributes().get("operation")); final S3BlobStore.StatsKey statsKey = new S3BlobStore.StatsKey( operation, diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java index 7407522651e55..da357dc09ab95 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java @@ -327,6 +327,8 @@ private Map metricAttributes(String action) { return Map.of( "repo_type", S3Repository.TYPE, + "repo_name", + blobStore.getRepositoryMetadata().name(), "operation", Operation.GET_OBJECT.getKey(), "purpose", diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index ac49cffc1e0da..b292dc5872994 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -1106,7 +1106,7 @@ private List getRetryHistogramMeasurements() { } private Map metricAttributes(String action) { - return Map.of("repo_type", "s3", "operation", "GetObject", "purpose", "Indices", "action", action); + return Map.of("repo_type", "s3", "repo_name", "repository", "operation", "GetObject", "purpose", "Indices", "action", action); } /** diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java index 3a210199065b7..2cd6e2b11ef7a 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java @@ -127,7 +127,16 @@ public static Map createAttributesMap( OperationPurpose purpose, String operation ) { - return Map.of("repo_type", repositoryMetadata.type(), "operation", operation, "purpose", purpose.getKey()); + return Map.of( + "repo_type", + repositoryMetadata.type(), + "repo_name", + repositoryMetadata.name(), + "operation", + operation, + "purpose", + purpose.getKey() + ); } } From 4793caa9af8b7eb90ad4b62e8d638be987c7279f Mon Sep 17 00:00:00 2001 From: Nick Tindall Date: Fri, 22 Nov 2024 14:45:04 +1100 Subject: [PATCH 160/386] Expand LineLength exemption to include all comments with a URL (#117221) --- build-tools-internal/src/main/resources/checkstyle.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/resources/checkstyle.xml b/build-tools-internal/src/main/resources/checkstyle.xml index daedc2ac3c629..9ed31d993909e 100644 --- a/build-tools-internal/src/main/resources/checkstyle.xml +++ b/build-tools-internal/src/main/resources/checkstyle.xml @@ -57,7 +57,7 @@ unfair. --> - + From 8cfe8f1c5ccbe00422609c36819a58115caad922 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Fri, 22 Nov 2024 07:13:44 +0100 Subject: [PATCH 161/386] MultiBucketsAggregation.Bucket does not implement ToXContent anymore (#117240) This change makes some buckets implementation leaner. --- .../adjacency/InternalAdjacencyMatrix.java | 6 +-- .../histogram/InternalAutoDateHistogram.java | 6 +-- .../bucket/timeseries/InternalTimeSeries.java | 22 ++++------- .../timeseries/TimeSeriesAggregator.java | 3 +- .../timeseries/InternalTimeSeriesTests.java | 20 +++++----- .../timeseries/TimeSeriesAggregatorTests.java | 6 +-- .../bucket/MultiBucketsAggregation.java | 3 +- .../bucket/composite/InternalComposite.java | 8 ---- .../bucket/filter/FiltersAggregator.java | 14 ++----- .../bucket/filter/InternalFilters.java | 30 +++++--------- .../bucket/geogrid/InternalGeoGrid.java | 2 +- .../bucket/geogrid/InternalGeoGridBucket.java | 5 +-- .../AbstractHistogramAggregator.java | 2 +- .../histogram/DateHistogramAggregator.java | 3 +- .../DateRangeHistogramAggregator.java | 1 - .../histogram/InternalDateHistogram.java | 27 +++++-------- .../bucket/histogram/InternalHistogram.java | 27 +++++-------- .../InternalVariableWidthHistogram.java | 6 +-- .../bucket/prefix/InternalIpPrefix.java | 38 +++--------------- .../bucket/prefix/IpPrefixAggregator.java | 2 - .../bucket/range/BinaryRangeAggregator.java | 4 +- .../bucket/range/InternalBinaryRange.java | 29 ++++---------- .../bucket/range/InternalDateRange.java | 18 ++------- .../bucket/range/InternalGeoDistance.java | 10 ++--- .../bucket/range/InternalRange.java | 39 ++++--------------- .../bucket/range/RangeAggregator.java | 15 ++----- .../bucket/terms/AbstractInternalTerms.java | 5 ++- .../terms/InternalMappedSignificantTerms.java | 2 +- .../bucket/terms/InternalMappedTerms.java | 2 +- .../bucket/terms/InternalRareTerms.java | 6 +-- .../terms/InternalSignificantTerms.java | 4 +- .../bucket/terms/InternalTerms.java | 3 +- .../bucket/terms/UnmappedTerms.java | 2 +- .../search/SearchResponseMergerTests.java | 11 +----- .../InternalAggregationsTests.java | 32 ++++++--------- .../bucket/filter/InternalFiltersTests.java | 7 ++-- .../histogram/InternalDateHistogramTests.java | 10 +---- .../histogram/InternalHistogramTests.java | 12 ++---- .../bucket/prefix/InternalIpPrefixTests.java | 16 +------- .../range/InternalBinaryRangeTests.java | 3 +- .../bucket/range/InternalDateRangeTests.java | 6 +-- .../range/InternalGeoDistanceTests.java | 6 +-- .../bucket/range/InternalRangeTests.java | 6 +-- .../pipeline/BucketHelpersTests.java | 10 ----- .../multiterms/InternalMultiTerms.java | 7 ++-- .../InternalCategorizationAggregation.java | 6 +-- .../aggs/changepoint/ChangePointBucket.java | 3 +- .../aggregation/AggregationTestUtils.java | 2 +- .../rollup/RollupResponseTranslator.java | 10 +---- .../xpack/sql/execution/search/Querier.java | 6 --- .../search/extractor/TestBucket.java | 7 ---- .../pivot/AggregationResultUtilsTests.java | 16 ++++---- 52 files changed, 157 insertions(+), 389 deletions(-) diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java index 6f36f1f17bf8b..824f009bc7d8e 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java @@ -81,14 +81,12 @@ public InternalAggregations getAggregations() { return aggregations; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(CommonFields.KEY.getPreferredName(), key); builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -237,7 +235,7 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (InternalBucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params); } builder.endArray(); return builder; diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java index 42aa79f990fc6..edb7ec4cffce7 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -99,8 +99,7 @@ public Object getKey() { return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params, DocValueFormat format) throws IOException { String keyAsString = format.format(key).toString(); builder.startObject(); if (format != DocValueFormat.RAW) { @@ -110,7 +109,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -597,7 +595,7 @@ private BucketReduceResult mergeConsecutiveBuckets( public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, format); } builder.endArray(); builder.field("interval", getInterval().toString()); diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java index c4cdacd135cb4..d7590f2126325 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java @@ -36,24 +36,21 @@ public class InternalTimeSeries extends InternalMultiBucketAggregation buckets = new ArrayList<>(size); for (int i = 0; i < size; i++) { - buckets.add(new InternalTimeSeries.InternalBucket(in, keyed)); + buckets.add(new InternalTimeSeries.InternalBucket(in)); } this.buckets = buckets; this.bucketMap = null; @@ -162,7 +156,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (InternalBucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); @@ -252,14 +246,14 @@ public InternalTimeSeries create(List buckets) { @Override public InternalBucket createBucket(InternalAggregations aggregations, InternalBucket prototype) { - return new InternalBucket(prototype.key, prototype.docCount, aggregations, prototype.keyed); + return new InternalBucket(prototype.key, prototype.docCount, aggregations); } private InternalBucket reduceBucket(List buckets, AggregationReduceContext context) { InternalTimeSeries.InternalBucket reduced = null; for (InternalTimeSeries.InternalBucket bucket : buckets) { if (reduced == null) { - reduced = new InternalTimeSeries.InternalBucket(bucket.key, bucket.docCount, bucket.aggregations, bucket.keyed); + reduced = new InternalTimeSeries.InternalBucket(bucket.key, bucket.docCount, bucket.aggregations); } else { reduced.docCount += bucket.docCount; } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java index 369ae4590fe97..63472bca1d9ac 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java @@ -83,8 +83,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw InternalTimeSeries.InternalBucket bucket = new InternalTimeSeries.InternalBucket( BytesRef.deepCopyOf(spare), // Closing bucketOrds will corrupt the bytes ref, so need to make a deep copy here. docCount, - null, - keyed + null ); bucket.bucketOrd = ordsEnum.ord(); buckets.add(bucket); diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeriesTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeriesTests.java index e61c02e0b9cd2..3b67d09c0d6a1 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeriesTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeriesTests.java @@ -49,7 +49,7 @@ private List randomBuckets(boolean keyed, InternalAggregations a } try { var key = TimeSeriesIdFieldMapper.buildLegacyTsid(routingPathFields).toBytesRef(); - bucketList.add(new InternalBucket(key, docCount, aggregations, keyed)); + bucketList.add(new InternalBucket(key, docCount, aggregations)); } catch (IOException e) { throw new UncheckedIOException(e); } @@ -108,10 +108,10 @@ public void testReduceSimple() { InternalTimeSeries first = new InternalTimeSeries( "ts", List.of( - new InternalBucket(new BytesRef("1"), 3, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("10"), 6, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("2"), 2, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("9"), 5, InternalAggregations.EMPTY, false) + new InternalBucket(new BytesRef("1"), 3, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("10"), 6, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("2"), 2, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("9"), 5, InternalAggregations.EMPTY) ), false, Map.of() @@ -119,8 +119,8 @@ public void testReduceSimple() { InternalTimeSeries second = new InternalTimeSeries( "ts", List.of( - new InternalBucket(new BytesRef("2"), 1, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("3"), 3, InternalAggregations.EMPTY, false) + new InternalBucket(new BytesRef("2"), 1, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("3"), 3, InternalAggregations.EMPTY) ), false, Map.of() @@ -128,9 +128,9 @@ public void testReduceSimple() { InternalTimeSeries third = new InternalTimeSeries( "ts", List.of( - new InternalBucket(new BytesRef("1"), 2, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("3"), 4, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("9"), 4, InternalAggregations.EMPTY, false) + new InternalBucket(new BytesRef("1"), 2, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("3"), 4, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("9"), 4, InternalAggregations.EMPTY) ), false, Map.of() diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregatorTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregatorTests.java index d9a4023457126..493b4bdc81860 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregatorTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregatorTests.java @@ -176,19 +176,19 @@ public void testMultiBucketAggregationAsSubAggregation() throws IOException { InternalDateHistogram byTimeStampBucket = ts.getBucketByKey("{dim1=aaa, dim2=xxx}").getAggregations().get("by_timestamp"); assertThat( byTimeStampBucket.getBuckets(), - contains(new InternalDateHistogram.Bucket(startTime, 2, false, null, InternalAggregations.EMPTY)) + contains(new InternalDateHistogram.Bucket(startTime, 2, null, InternalAggregations.EMPTY)) ); assertThat(ts.getBucketByKey("{dim1=aaa, dim2=yyy}").docCount, equalTo(2L)); byTimeStampBucket = ts.getBucketByKey("{dim1=aaa, dim2=yyy}").getAggregations().get("by_timestamp"); assertThat( byTimeStampBucket.getBuckets(), - contains(new InternalDateHistogram.Bucket(startTime, 2, false, null, InternalAggregations.EMPTY)) + contains(new InternalDateHistogram.Bucket(startTime, 2, null, InternalAggregations.EMPTY)) ); assertThat(ts.getBucketByKey("{dim1=bbb, dim2=zzz}").docCount, equalTo(4L)); byTimeStampBucket = ts.getBucketByKey("{dim1=bbb, dim2=zzz}").getAggregations().get("by_timestamp"); assertThat( byTimeStampBucket.getBuckets(), - contains(new InternalDateHistogram.Bucket(startTime, 4, false, null, InternalAggregations.EMPTY)) + contains(new InternalDateHistogram.Bucket(startTime, 4, null, InternalAggregations.EMPTY)) ); }; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketsAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketsAggregation.java index 87ebec525a6fa..d39e90b44579e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketsAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketsAggregation.java @@ -12,7 +12,6 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.HasAggregations; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.xcontent.ToXContent; import java.util.List; @@ -24,7 +23,7 @@ public interface MultiBucketsAggregation extends Aggregation { * A bucket represents a criteria to which all documents that fall in it adhere to. It is also uniquely identified * by a key, and can potentially hold sub-aggregations computed over all documents in it. */ - interface Bucket extends HasAggregations, ToXContent { + interface Bucket extends HasAggregations { /** * @return The key associated with the bucket */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index 30c45ba46d9b7..8b3253418bc23 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -465,14 +465,6 @@ public int compareKey(InternalBucket other) { return 0; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - /** - * See {@link CompositeAggregation#bucketToXContent} - */ - throw new UnsupportedOperationException("not implemented"); - } - InternalBucket finalizeSampling(SamplingContext samplingContext) { return new InternalBucket( sourceNames, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java index 69eff3630a8f4..a9ec0ba878ec0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java @@ -215,15 +215,9 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw filters.size() + (otherBucketKey == null ? 0 : 1), (offsetInOwningOrd, docCount, subAggregationResults) -> { if (offsetInOwningOrd < filters.size()) { - return new InternalFilters.InternalBucket( - filters.get(offsetInOwningOrd).key(), - docCount, - subAggregationResults, - keyed, - keyedBucket - ); + return new InternalFilters.InternalBucket(filters.get(offsetInOwningOrd).key(), docCount, subAggregationResults); } - return new InternalFilters.InternalBucket(otherBucketKey, docCount, subAggregationResults, keyed, keyedBucket); + return new InternalFilters.InternalBucket(otherBucketKey, docCount, subAggregationResults); }, buckets -> new InternalFilters(name, buckets, keyed, keyedBucket, metadata()) ); @@ -234,12 +228,12 @@ public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); List buckets = new ArrayList<>(filters.size() + (otherBucketKey == null ? 0 : 1)); for (QueryToFilterAdapter filter : filters) { - InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(filter.key(), 0, subAggs, keyed, keyedBucket); + InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(filter.key(), 0, subAggs); buckets.add(bucket); } if (otherBucketKey != null) { - InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(otherBucketKey, 0, subAggs, keyed, keyedBucket); + InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(otherBucketKey, 0, subAggs); buckets.add(bucket); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java index a5dfb0d8efafa..c05759582346a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java @@ -32,26 +32,20 @@ public class InternalFilters extends InternalMultiBucketAggregation implements Filters { public static class InternalBucket extends InternalMultiBucketAggregation.InternalBucket implements Filters.Bucket { - private final boolean keyed; - private final boolean keyedBucket; private final String key; private long docCount; InternalAggregations aggregations; - public InternalBucket(String key, long docCount, InternalAggregations aggregations, boolean keyed, boolean keyedBucket) { + public InternalBucket(String key, long docCount, InternalAggregations aggregations) { this.key = key; - this.keyedBucket = keyedBucket; this.docCount = docCount; this.aggregations = aggregations; - this.keyed = keyed; } /** * Read from a stream. */ - public InternalBucket(StreamInput in, boolean keyed, boolean keyedBucket) throws IOException { - this.keyed = keyed; - this.keyedBucket = keyedBucket; + public InternalBucket(StreamInput in) throws IOException { key = in.readOptionalString(); docCount = in.readVLong(); aggregations = InternalAggregations.readFrom(in); @@ -84,8 +78,7 @@ public InternalAggregations getAggregations() { return aggregations; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params, boolean keyed, boolean keyedBucket) throws IOException { if (keyed && keyedBucket) { builder.startObject(key); } else { @@ -97,7 +90,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -110,24 +102,20 @@ public boolean equals(Object other) { } InternalBucket that = (InternalBucket) other; return Objects.equals(key, that.key) - && Objects.equals(keyed, that.keyed) - && Objects.equals(keyedBucket, that.keyedBucket) && Objects.equals(docCount, that.docCount) && Objects.equals(aggregations, that.aggregations); } @Override public int hashCode() { - return Objects.hash(getClass(), key, keyed, keyedBucket, docCount, aggregations); + return Objects.hash(getClass(), key, docCount, aggregations); } InternalBucket finalizeSampling(SamplingContext samplingContext) { return new InternalBucket( key, samplingContext.scaleUp(docCount), - InternalAggregations.finalizeSampling(aggregations, samplingContext), - keyed, - keyedBucket + InternalAggregations.finalizeSampling(aggregations, samplingContext) ); } } @@ -155,7 +143,7 @@ public InternalFilters(StreamInput in) throws IOException { int size = in.readVInt(); List buckets = new ArrayList<>(size); for (int i = 0; i < size; i++) { - buckets.add(new InternalBucket(in, keyed, keyedBucket)); + buckets.add(new InternalBucket(in)); } this.buckets = buckets; this.bucketMap = null; @@ -182,7 +170,7 @@ public InternalFilters create(List buckets) { @Override public InternalBucket createBucket(InternalAggregations aggregations, InternalBucket prototype) { - return new InternalBucket(prototype.key, prototype.docCount, aggregations, prototype.keyed, keyedBucket); + return new InternalBucket(prototype.key, prototype.docCount, aggregations); } @Override @@ -211,7 +199,7 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont ) { @Override protected InternalBucket createBucket(InternalBucket proto, long docCount, InternalAggregations aggregations) { - return new InternalBucket(proto.key, docCount, aggregations, proto.keyed, proto.keyedBucket); + return new InternalBucket(proto.key, docCount, aggregations); } }; @@ -252,7 +240,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (InternalBucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, keyed, keyedBucket); } if (keyed && keyedBucket) { builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java index d56625ab28c51..6a32b41034503 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java @@ -152,7 +152,7 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (InternalGeoGridBucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params); } builder.endArray(); return builder; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java index c972845468c2b..9e3c96da2e70b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java @@ -13,6 +13,7 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; +import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -76,14 +77,12 @@ public int compareTo(InternalGeoGridBucket other) { return 0; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + final void bucketToXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(); builder.field(Aggregation.CommonFields.KEY.getPreferredName(), getKeyAsString()); builder.field(Aggregation.CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java index ed687df6377dd..5ea8cd035e580 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java @@ -84,7 +84,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds, (bucketValue, docCount, subAggregationResults) -> { double roundKey = Double.longBitsToDouble(bucketValue); double key = roundKey * interval + offset; - return new InternalHistogram.Bucket(key, docCount, keyed, formatter, subAggregationResults); + return new InternalHistogram.Bucket(key, docCount, formatter, subAggregationResults); }, (owningBucketOrd, buckets) -> { // the contract of the histogram aggregation is that shards must return buckets ordered by key in ascending order CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java index cc2db63fa5ec5..1eb0226ad8c8c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java @@ -340,7 +340,7 @@ private void addRoundedValue(long rounded, int doc, long owningBucketOrd, LeafBu @Override public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds, (bucketValue, docCount, subAggregationResults) -> { - return new InternalDateHistogram.Bucket(bucketValue, docCount, keyed, formatter, subAggregationResults); + return new InternalDateHistogram.Bucket(bucketValue, docCount, formatter, subAggregationResults); }, (owningBucketOrd, buckets) -> { // the contract of the histogram aggregation is that shards must return buckets ordered by key in ascending order CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); @@ -466,7 +466,6 @@ protected InternalAggregation adapt(InternalAggregation delegateResult) { new InternalDateHistogram.Bucket( rangeBucket.getFrom().toInstant().toEpochMilli(), rangeBucket.getDocCount(), - keyed, format, rangeBucket.getAggregations() ) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java index f385f7c34f6b7..5a104055d9aec 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java @@ -171,7 +171,6 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw (bucketValue, docCount, subAggregationResults) -> new InternalDateHistogram.Bucket( bucketValue, docCount, - keyed, formatter, subAggregationResults ), diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index 564abff2a9f97..d2badbeec4622 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -53,19 +53,17 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation< public static class Bucket extends AbstractHistogramBucket implements KeyComparable { final long key; - private final transient boolean keyed; - public Bucket(long key, long docCount, boolean keyed, DocValueFormat format, InternalAggregations aggregations) { + public Bucket(long key, long docCount, DocValueFormat format, InternalAggregations aggregations) { super(docCount, aggregations, format); - this.keyed = keyed; this.key = key; } /** * Read from a stream. */ - public static Bucket readFrom(StreamInput in, boolean keyed, DocValueFormat format) throws IOException { - return new Bucket(in.readLong(), in.readVLong(), keyed, format, InternalAggregations.readFrom(in)); + public static Bucket readFrom(StreamInput in, DocValueFormat format) throws IOException { + return new Bucket(in.readLong(), in.readVLong(), format, InternalAggregations.readFrom(in)); } @Override @@ -101,8 +99,7 @@ public Object getKey() { return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params, boolean keyed) throws IOException { String keyAsString = format.format(key).toString(); if (keyed) { builder.startObject(keyAsString); @@ -116,7 +113,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -124,15 +120,10 @@ public int compareKey(Bucket other) { return Long.compare(key, other.key); } - public boolean getKeyed() { - return keyed; - } - Bucket finalizeSampling(SamplingContext samplingContext) { return new Bucket( key, samplingContext.scaleUp(docCount), - keyed, format, InternalAggregations.finalizeSampling(aggregations, samplingContext) ); @@ -237,7 +228,7 @@ public InternalDateHistogram(StreamInput in) throws IOException { } else { downsampledResultsOffset = false; } - buckets = in.readCollectionAsList(stream -> Bucket.readFrom(stream, keyed, format)); + buckets = in.readCollectionAsList(stream -> Bucket.readFrom(stream, format)); // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort if (in.getTransportVersion().between(TransportVersions.V_8_13_0, TransportVersions.V_8_14_0)) { // list is mutable by #readCollectionAsList contract @@ -301,7 +292,7 @@ public InternalDateHistogram create(List buckets) { @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(prototype.key, prototype.docCount, prototype.keyed, prototype.format, aggregations); + return new Bucket(prototype.key, prototype.docCount, prototype.format, aggregations); } private List reduceBuckets(final PriorityQueue> pq, AggregationReduceContext reduceContext) { @@ -398,7 +389,7 @@ public void accept(long key) { reduceContext.consumeBucketsAndMaybeBreak(size); size = 0; } - iter.add(new InternalDateHistogram.Bucket(key, 0, keyed, format, reducedEmptySubAggs)); + iter.add(new InternalDateHistogram.Bucket(key, 0, format, reducedEmptySubAggs)); } }); } @@ -546,7 +537,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); @@ -603,7 +594,7 @@ public InternalAggregation createAggregation(List { final double key; - private final transient boolean keyed; - public Bucket(double key, long docCount, boolean keyed, DocValueFormat format, InternalAggregations aggregations) { + public Bucket(double key, long docCount, DocValueFormat format, InternalAggregations aggregations) { super(docCount, aggregations, format); - this.keyed = keyed; this.key = key; } /** * Read from a stream. */ - public static Bucket readFrom(StreamInput in, boolean keyed, DocValueFormat format) throws IOException { - return new Bucket(in.readDouble(), in.readVLong(), keyed, format, InternalAggregations.readFrom(in)); + public static Bucket readFrom(StreamInput in, DocValueFormat format) throws IOException { + return new Bucket(in.readDouble(), in.readVLong(), format, InternalAggregations.readFrom(in)); } @Override @@ -96,8 +94,7 @@ public Object getKey() { return key; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params, boolean keyed) throws IOException { String keyAsString = format.format(key).toString(); if (keyed) { builder.startObject(keyAsString); @@ -111,7 +108,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -119,15 +115,10 @@ public int compareKey(Bucket other) { return Double.compare(key, other.key); } - public boolean getKeyed() { - return keyed; - } - Bucket finalizeSampling(SamplingContext samplingContext) { return new Bucket( key, samplingContext.scaleUp(docCount), - keyed, format, InternalAggregations.finalizeSampling(aggregations, samplingContext) ); @@ -220,7 +211,7 @@ public InternalHistogram(StreamInput in) throws IOException { } format = in.readNamedWriteable(DocValueFormat.class); keyed = in.readBoolean(); - buckets = in.readCollectionAsList(stream -> Bucket.readFrom(stream, keyed, format)); + buckets = in.readCollectionAsList(stream -> Bucket.readFrom(stream, format)); // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort if (in.getTransportVersion().between(TransportVersions.V_8_13_0, TransportVersions.V_8_14_0)) { // list is mutable by #readCollectionAsList contract @@ -265,7 +256,7 @@ public InternalHistogram create(List buckets) { @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(prototype.key, prototype.docCount, prototype.keyed, prototype.format, aggregations); + return new Bucket(prototype.key, prototype.docCount, prototype.format, aggregations); } private List reduceBuckets(PriorityQueue> pq, AggregationReduceContext reduceContext) { @@ -373,7 +364,7 @@ public void accept(double key) { reduceContext.consumeBucketsAndMaybeBreak(size); size = 0; } - iter.add(new Bucket(key, 0, keyed, format, reducedEmptySubAggs)); + iter.add(new Bucket(key, 0, format, reducedEmptySubAggs)); } }); } @@ -478,7 +469,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); @@ -508,7 +499,7 @@ public InternalAggregation createAggregation(List { - private final transient DocValueFormat format; private final BytesRef key; - private final boolean keyed; private final boolean isIpv6; private final int prefixLength; private final boolean appendPrefixLength; @@ -48,18 +46,14 @@ public static class Bucket extends InternalMultiBucketAggregation.InternalBucket private final InternalAggregations aggregations; public Bucket( - DocValueFormat format, BytesRef key, - boolean keyed, boolean isIpv6, int prefixLength, boolean appendPrefixLength, long docCount, InternalAggregations aggregations ) { - this.format = format; this.key = key; - this.keyed = keyed; this.isIpv6 = isIpv6; this.prefixLength = prefixLength; this.appendPrefixLength = appendPrefixLength; @@ -70,9 +64,7 @@ public Bucket( /** * Read from a stream. */ - public Bucket(StreamInput in, DocValueFormat format, boolean keyed) throws IOException { - this.format = format; - this.keyed = keyed; + public Bucket(StreamInput in) throws IOException { this.key = in.readBytesRef(); this.isIpv6 = in.readBoolean(); this.prefixLength = in.readVInt(); @@ -81,8 +73,7 @@ public Bucket(StreamInput in, DocValueFormat format, boolean keyed) throws IOExc this.aggregations = InternalAggregations.readFrom(in); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params, boolean keyed) throws IOException { String key = DocValueFormat.IP.format(this.key); if (appendPrefixLength) { key = key + "/" + prefixLength; @@ -101,7 +92,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(IpPrefixAggregationBuilder.PREFIX_LENGTH_FIELD.getPreferredName(), prefixLength); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } private static BytesRef netmask(int prefixLength) { @@ -118,10 +108,6 @@ public void writeTo(StreamOutput out) throws IOException { aggregations.writeTo(out); } - public DocValueFormat getFormat() { - return format; - } - public BytesRef getKey() { return key; } @@ -162,14 +148,13 @@ public boolean equals(Object o) { && prefixLength == bucket.prefixLength && appendPrefixLength == bucket.appendPrefixLength && docCount == bucket.docCount - && Objects.equals(format, bucket.format) && Objects.equals(key, bucket.key) && Objects.equals(aggregations, bucket.aggregations); } @Override public int hashCode() { - return Objects.hash(format, key, isIpv6, prefixLength, appendPrefixLength, docCount, aggregations); + return Objects.hash(key, isIpv6, prefixLength, appendPrefixLength, docCount, aggregations); } @Override @@ -206,7 +191,7 @@ public InternalIpPrefix(StreamInput in) throws IOException { format = in.readNamedWriteable(DocValueFormat.class); keyed = in.readBoolean(); minDocCount = in.readVLong(); - buckets = in.readCollectionAsList(stream -> new Bucket(stream, format, keyed)); + buckets = in.readCollectionAsList(Bucket::new); } @Override @@ -298,7 +283,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (InternalIpPrefix.Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); @@ -316,9 +301,7 @@ public InternalIpPrefix create(List buckets) { @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { return new Bucket( - format, prototype.key, - prototype.keyed, prototype.isIpv6, prototype.prefixLength, prototype.appendPrefixLength, @@ -328,16 +311,7 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) } private Bucket createBucket(Bucket prototype, InternalAggregations aggregations, long docCount) { - return new Bucket( - format, - prototype.key, - prototype.keyed, - prototype.isIpv6, - prototype.prefixLength, - prototype.appendPrefixLength, - docCount, - aggregations - ); + return new Bucket(prototype.key, prototype.isIpv6, prototype.prefixLength, prototype.appendPrefixLength, docCount, aggregations); } private Bucket reduceBucket(List buckets, AggregationReduceContext context) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java index e3192e9b2fa16..38d26bfa9ae28 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java @@ -200,9 +200,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw checkRealMemoryCBForInternalBucket(); buckets.add( new InternalIpPrefix.Bucket( - config.format(), BytesRef.deepCopyOf(ipAddress), - keyed, ipPrefix.isIpv6, ipPrefix.prefixLength, ipPrefix.appendPrefixLength, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java index 9bde8d007c1b7..c10bb3543549e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java @@ -366,7 +366,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw ranges.length, (offsetInOwningOrd, docCount, subAggregationResults) -> { Range range = ranges[offsetInOwningOrd]; - return new InternalBinaryRange.Bucket(format, keyed, range.key, range.from, range.to, docCount, subAggregationResults); + return new InternalBinaryRange.Bucket(format, range.key, range.from, range.to, docCount, subAggregationResults); }, buckets -> new InternalBinaryRange(name, format, keyed, buckets, metadata()) ); @@ -378,7 +378,7 @@ public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); List buckets = new ArrayList<>(ranges.length); for (Range range : ranges) { - InternalBinaryRange.Bucket bucket = new InternalBinaryRange.Bucket(format, keyed, range.key, range.from, range.to, 0, subAggs); + InternalBinaryRange.Bucket bucket = new InternalBinaryRange.Bucket(format, range.key, range.from, range.to, 0, subAggs); buckets.add(bucket); } return new InternalBinaryRange(name, format, keyed, buckets, metadata()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java index 100bab7443a51..9571dfebc6069 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java @@ -39,23 +39,13 @@ public final class InternalBinaryRange extends InternalMultiBucketAggregation Bucket.createFromStream(stream, format, keyed)); + buckets = in.readCollectionAsList(stream -> Bucket.createFromStream(stream, format)); } @Override @@ -235,7 +222,7 @@ public InternalBinaryRange create(List buckets) { @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(format, keyed, prototype.key, prototype.from, prototype.to, prototype.docCount, aggregations); + return new Bucket(format, prototype.key, prototype.from, prototype.to, prototype.docCount, aggregations); } @Override @@ -251,7 +238,7 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont @Override protected Bucket createBucket(Bucket proto, long docCount, InternalAggregations aggregations) { - return new Bucket(proto.format, proto.keyed, proto.key, proto.from, proto.to, docCount, aggregations); + return new Bucket(proto.format, proto.key, proto.from, proto.to, docCount, aggregations); } }; @@ -299,7 +286,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (Bucket range : buckets) { - range.toXContent(builder, params); + range.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java index 7b2858806c325..7291a099dd7f7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java @@ -34,19 +34,11 @@ public Bucket( boolean keyed, DocValueFormat formatter ) { - super(key, from, to, docCount, InternalAggregations.from(aggregations), keyed, formatter); + super(key, from, to, docCount, InternalAggregations.from(aggregations), formatter); } - public Bucket( - String key, - double from, - double to, - long docCount, - InternalAggregations aggregations, - boolean keyed, - DocValueFormat formatter - ) { - super(key, from, to, docCount, aggregations, keyed, formatter); + public Bucket(String key, double from, double to, long docCount, InternalAggregations aggregations, DocValueFormat formatter) { + super(key, from, to, docCount, aggregations, formatter); } @Override @@ -99,10 +91,9 @@ public Bucket createBucket( double to, long docCount, InternalAggregations aggregations, - boolean keyed, DocValueFormat formatter ) { - return new Bucket(key, from, to, docCount, aggregations, keyed, formatter); + return new Bucket(key, from, to, docCount, aggregations, formatter); } @Override @@ -113,7 +104,6 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) prototype.internalGetTo(), prototype.getDocCount(), aggregations, - prototype.getKeyed(), prototype.getFormat() ); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistance.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistance.java index d1c3761d45e82..9a33df4702c1c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistance.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistance.java @@ -23,8 +23,8 @@ public class InternalGeoDistance extends InternalRange ranges, DocValueFormat format, boolean keye } @SuppressWarnings("unchecked") - public B createBucket( - String key, - double from, - double to, - long docCount, - InternalAggregations aggregations, - boolean keyed, - DocValueFormat format - ) { - return (B) new Bucket(key, from, to, docCount, aggregations, keyed, format); + public B createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, DocValueFormat format) { + return (B) new Bucket(key, from, to, docCount, aggregations, format); } @SuppressWarnings("unchecked") @@ -232,7 +209,6 @@ public B createBucket(InternalAggregations aggregations, B prototype) { prototype.to, prototype.getDocCount(), aggregations, - prototype.keyed, prototype.format ); } @@ -285,7 +261,7 @@ public InternalRange(StreamInput in) throws IOException { } long docCount = in.readVLong(); InternalAggregations aggregations = InternalAggregations.readFrom(in); - ranges.add(getFactory().createBucket(key, from, to, docCount, aggregations, keyed, format)); + ranges.add(getFactory().createBucket(key, from, to, docCount, aggregations, format)); } this.ranges = ranges; } @@ -335,7 +311,7 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont @Override protected Bucket createBucket(Bucket proto, long docCount, InternalAggregations aggregations) { - return getFactory().createBucket(proto.key, proto.from, proto.to, docCount, aggregations, proto.keyed, proto.format); + return getFactory().createBucket(proto.key, proto.from, proto.to, docCount, aggregations, proto.format); } }; @@ -371,7 +347,6 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { b.to, samplingContext.scaleUp(b.getDocCount()), InternalAggregations.finalizeSampling(b.getAggregations(), samplingContext), - b.keyed, b.format ) ) @@ -390,7 +365,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (B range : ranges) { - range.toXContent(builder, params); + range.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java index 0654a788a10a9..a4574e8081868 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java @@ -538,15 +538,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw ranges.length, (offsetInOwningOrd, docCount, subAggregationResults) -> { Range range = ranges[offsetInOwningOrd]; - return rangeFactory.createBucket( - range.key, - range.originalFrom, - range.originalTo, - docCount, - subAggregationResults, - keyed, - format - ); + return rangeFactory.createBucket(range.key, range.originalFrom, range.originalTo, docCount, subAggregationResults, format); }, buckets -> rangeFactory.create(name, buckets, format, keyed, metadata()) ); @@ -564,7 +556,6 @@ public InternalAggregation buildEmptyAggregation() { range.originalTo, 0, subAggs, - keyed, format ); buckets.add(bucket); @@ -614,7 +605,7 @@ public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); List buckets = new ArrayList<>(ranges.length); for (RangeAggregator.Range range : ranges) { - buckets.add(factory.createBucket(range.key, range.originalFrom, range.originalTo, 0, subAggs, keyed, format)); + buckets.add(factory.createBucket(range.key, range.originalFrom, range.originalTo, 0, subAggs, format)); } return factory.create(name, buckets, format, keyed, metadata()); } @@ -886,7 +877,7 @@ protected InternalAggregation adapt(InternalAggregation delegateResult) { Range r = ranges[i]; InternalFilters.InternalBucket b = filters.getBuckets().get(i); buckets.add( - rangeFactory.createBucket(r.getKey(), r.originalFrom, r.originalTo, b.getDocCount(), b.getAggregations(), keyed, format) + rangeFactory.createBucket(r.getKey(), r.originalFrom, r.originalTo, b.getDocCount(), b.getAggregations(), format) ); } return rangeFactory.create(name(), buckets, format, keyed, filters.getMetadata()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java index 5c422a9dd4e32..6388eb3baaa84 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java @@ -66,6 +66,8 @@ public abstract static class AbstractTermsBucket> buckets @@ -369,7 +372,7 @@ protected static XContentBuilder doXContentCommon( builder.field(SUM_OF_OTHER_DOC_COUNTS.getPreferredName(), otherDocCount); builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (AbstractTermsBucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, showDocCountError); } builder.endArray(); return builder; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java index f179b7d05f9a4..3f75a27306ab4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java @@ -134,7 +134,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th // There is a condition (presumably when only one shard has a bucket?) where reduce is not called // and I end up with buckets that contravene the user's min_doc_count criteria in my reducer if (bucket.subsetDf >= minDocCount) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params); } } builder.endArray(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java index 563321f56cb5f..5b9403840dfff 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java @@ -145,6 +145,6 @@ public int hashCode() { @Override public final XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - return doXContentCommon(builder, params, docCountError, otherDocCount, buckets); + return doXContentCommon(builder, params, showTermDocCountError, docCountError, otherDocCount, buckets); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java index 6540cd2ee38da..64cebee880141 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java @@ -81,14 +81,12 @@ public InternalAggregations getAggregations() { return aggregations; } - @Override - public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); keyToXContent(builder); builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } protected abstract XContentBuilder keyToXContent(XContentBuilder builder) throws IOException; @@ -160,7 +158,7 @@ protected static XContentBuilder doXContentCommon(XContentBuilder builder, Param throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params); } builder.endArray(); return builder; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index eeb7305ac51fa..3f579947248bb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -157,8 +157,7 @@ public int hashCode() { return Objects.hash(getClass(), aggregations, score, format); } - @Override - public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + final void bucketToXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); keyToXContent(builder); builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); @@ -166,7 +165,6 @@ public final XContentBuilder toXContent(XContentBuilder builder, Params params) builder.field(BG_COUNT, supersetDf); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } protected abstract XContentBuilder keyToXContent(XContentBuilder builder) throws IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java index 8e25c164d5f33..b94b1f5ea40b1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java @@ -136,7 +136,7 @@ public void setAggregations(InternalAggregations aggregations) { } @Override - public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public final void bucketToXContent(XContentBuilder builder, Params params, boolean showDocCountError) throws IOException { builder.startObject(); keyToXContent(builder); builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); @@ -145,7 +145,6 @@ public final XContentBuilder toXContent(XContentBuilder builder, Params params) } aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } protected abstract XContentBuilder keyToXContent(XContentBuilder builder) throws IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java index 7755f1db6a3ee..8047d1f06990f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java @@ -111,7 +111,7 @@ public boolean canLeadReduction() { @Override public final XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - return doXContentCommon(builder, params, 0L, 0, Collections.emptyList()); + return doXContentCommon(builder, params, false, 0L, 0, Collections.emptyList()); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java index 51796f404c283..d54ac9c66d9a5 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java @@ -639,7 +639,6 @@ public void testMergeAggs() throws InterruptedException { 10000D, count, InternalAggregations.EMPTY, - false, DocValueFormat.RAW ); InternalDateRange range = factory.create(rangeAggName, singletonList(bucket), DocValueFormat.RAW, false, emptyMap()); @@ -1498,15 +1497,7 @@ private SearchHits createSimpleDeterministicSearchHits(String clusterAlias, Inde private static InternalAggregations createDeterminsticAggregation(String maxAggName, String rangeAggName, double value, int count) { Max max = new Max(maxAggName, value, DocValueFormat.RAW, Collections.emptyMap()); InternalDateRange.Factory factory = new InternalDateRange.Factory(); - InternalDateRange.Bucket bucket = factory.createBucket( - "bucket", - 0D, - 10000D, - count, - InternalAggregations.EMPTY, - false, - DocValueFormat.RAW - ); + InternalDateRange.Bucket bucket = factory.createBucket("bucket", 0D, 10000D, count, InternalAggregations.EMPTY, DocValueFormat.RAW); InternalDateRange range = factory.create(rangeAggName, singletonList(bucket), DocValueFormat.RAW, false, emptyMap()); InternalAggregations aggs = InternalAggregations.from(Arrays.asList(range, max)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java index bd423999722f3..c9185fe35e677 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java @@ -137,17 +137,15 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont new InternalFiltersForF2( "f2", List.of( - new InternalFilters.InternalBucket("f2k1", k1k1, InternalAggregations.EMPTY, true, true), - new InternalFilters.InternalBucket("f2k2", k1k2, InternalAggregations.EMPTY, true, true) + new InternalFilters.InternalBucket("f2k1", k1k1, InternalAggregations.EMPTY), + new InternalFilters.InternalBucket("f2k2", k1k2, InternalAggregations.EMPTY) ), true, true, null ) ) - ), - true, - true + ) ), new InternalFilters.InternalBucket( "f1k2", @@ -157,17 +155,15 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont new InternalFiltersForF2( "f2", List.of( - new InternalFilters.InternalBucket("f2k1", k2k1, InternalAggregations.EMPTY, true, true), - new InternalFilters.InternalBucket("f2k2", k2k2, InternalAggregations.EMPTY, true, true) + new InternalFilters.InternalBucket("f2k1", k2k1, InternalAggregations.EMPTY), + new InternalFilters.InternalBucket("f2k2", k2k2, InternalAggregations.EMPTY) ), true, true, null ) ) - ), - true, - true + ) ) ), true, @@ -192,17 +188,15 @@ InternalAggregations reduced(int k1, int k2, int k1k1, int k1k2, int k2k1, int k new InternalFilters( "f2", List.of( - new InternalFilters.InternalBucket("f2k1", k1k1, InternalAggregations.EMPTY, true, true), - new InternalFilters.InternalBucket("f2k2", k1k2, InternalAggregations.EMPTY, true, true) + new InternalFilters.InternalBucket("f2k1", k1k1, InternalAggregations.EMPTY), + new InternalFilters.InternalBucket("f2k2", k1k2, InternalAggregations.EMPTY) ), true, true, null ) ) - ), - true, - true + ) ), new InternalFilters.InternalBucket( "f1k2", @@ -212,17 +206,15 @@ InternalAggregations reduced(int k1, int k2, int k1k1, int k1k2, int k2k1, int k new InternalFilters( "f2", List.of( - new InternalFilters.InternalBucket("f2k1", k2k1, InternalAggregations.EMPTY, true, true), - new InternalFilters.InternalBucket("f2k2", k2k2, InternalAggregations.EMPTY, true, true) + new InternalFilters.InternalBucket("f2k1", k2k1, InternalAggregations.EMPTY), + new InternalFilters.InternalBucket("f2k2", k2k2, InternalAggregations.EMPTY) ), true, true, null ) ) - ), - true, - true + ) ) ), true, diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java index c300bfed5f62a..ad2543548dcae 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java @@ -59,10 +59,9 @@ public void setUp() throws Exception { @Override protected InternalFilters createTestInstance(String name, Map metadata, InternalAggregations aggregations) { final List buckets = new ArrayList<>(); - for (int i = 0; i < keys.size(); ++i) { - String key = keys.get(i); + for (String key : keys) { int docCount = randomIntBetween(0, 1000); - buckets.add(new InternalFilters.InternalBucket(key, docCount, aggregations, keyed, keyedBucket)); + buckets.add(new InternalBucket(key, docCount, aggregations)); } return new InternalFilters(name, buckets, keyed, keyedBucket, metadata); } @@ -94,7 +93,7 @@ protected InternalFilters mutateInstance(InternalFilters instance) { case 0 -> name += randomAlphaOfLength(5); case 1 -> { buckets = new ArrayList<>(buckets); - buckets.add(new InternalBucket("test", randomIntBetween(0, 1000), InternalAggregations.EMPTY, keyed, keyedBucket)); + buckets.add(new InternalBucket("test", randomIntBetween(0, 1000), InternalAggregations.EMPTY)); } default -> { if (metadata == null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java index 9e6829139d772..5eb1500e37269 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java @@ -106,7 +106,7 @@ private InternalDateHistogram createTestInstance( // rarely leave some holes to be filled up with empty buckets in case minDocCount is set to 0 if (frequently()) { long key = startingDate + intervalMillis * i; - buckets.add(new InternalDateHistogram.Bucket(key, randomIntBetween(1, 100), keyed, format, aggregations)); + buckets.add(new InternalDateHistogram.Bucket(key, randomIntBetween(1, 100), format, aggregations)); } } BucketOrder order = BucketOrder.key(randomBoolean()); @@ -181,13 +181,7 @@ protected InternalDateHistogram mutateInstance(InternalDateHistogram instance) { case 1 -> { buckets = new ArrayList<>(buckets); buckets.add( - new InternalDateHistogram.Bucket( - randomNonNegativeLong(), - randomIntBetween(1, 100), - keyed, - format, - InternalAggregations.EMPTY - ) + new InternalDateHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), format, InternalAggregations.EMPTY) ); } case 2 -> order = BucketOrder.count(randomBoolean()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java index db93bc5dfe179..f97a836712e36 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java @@ -74,7 +74,7 @@ protected InternalHistogram createTestInstance(String name, Map // rarely leave some holes to be filled up with empty buckets in case minDocCount is set to 0 if (frequently()) { final int docCount = TestUtil.nextInt(random(), 1, 50); - buckets.add(new InternalHistogram.Bucket(base + i * interval, docCount, keyed, format, aggregations)); + buckets.add(new InternalHistogram.Bucket(base + i * interval, docCount, format, aggregations)); } } BucketOrder order = BucketOrder.key(randomBoolean()); @@ -96,7 +96,7 @@ public void testHandlesNaN() { newBuckets.addAll(buckets.subList(0, buckets.size() - 1)); } InternalHistogram.Bucket b = buckets.get(buckets.size() - 1); - newBuckets.add(new InternalHistogram.Bucket(Double.NaN, b.docCount, keyed, b.format, b.aggregations)); + newBuckets.add(new InternalHistogram.Bucket(Double.NaN, b.docCount, b.format, b.aggregations)); List reduceMe = List.of(histogram, histogram2); InternalAggregationTestCase.reduce(reduceMe, mockReduceContext(mockBuilder(reduceMe)).forPartialReduction()); @@ -171,13 +171,7 @@ protected InternalHistogram mutateInstance(InternalHistogram instance) { case 1 -> { buckets = new ArrayList<>(buckets); buckets.add( - new InternalHistogram.Bucket( - randomNonNegativeLong(), - randomIntBetween(1, 100), - keyed, - format, - InternalAggregations.EMPTY - ) + new InternalHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), format, InternalAggregations.EMPTY) ); } case 2 -> order = BucketOrder.count(randomBoolean()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java index 5ca78f322491b..dc5b57619676e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java @@ -75,16 +75,7 @@ private InternalIpPrefix createTestInstance( BytesRef key = itr.next(); boolean v6 = InetAddressPoint.decode(key.bytes) instanceof Inet6Address; buckets.add( - new InternalIpPrefix.Bucket( - DocValueFormat.IP, - key, - keyed, - v6, - prefixLength, - appendPrefixLength, - randomLongBetween(0, Long.MAX_VALUE), - aggregations - ) + new InternalIpPrefix.Bucket(key, v6, prefixLength, appendPrefixLength, randomLongBetween(0, Long.MAX_VALUE), aggregations) ); } @@ -126,7 +117,6 @@ protected void assertReduced(InternalIpPrefix reduced, List in Map expectedCounts = new HashMap<>(); for (InternalIpPrefix i : inputs) { for (InternalIpPrefix.Bucket b : i.getBuckets()) { - assertThat(b.getFormat(), equalTo(DocValueFormat.IP)); long acc = expectedCounts.getOrDefault(b.getKey(), 0L); acc += b.getDocCount(); expectedCounts.put(b.getKey(), acc); @@ -146,20 +136,16 @@ protected void assertReduced(InternalIpPrefix reduced, List in public void testPartialReduceNoMinDocCount() { InternalIpPrefix.Bucket b1 = new InternalIpPrefix.Bucket( - DocValueFormat.IP, new BytesRef(InetAddressPoint.encode(InetAddresses.forString("192.168.0.1"))), false, - false, 1, false, 1, InternalAggregations.EMPTY ); InternalIpPrefix.Bucket b2 = new InternalIpPrefix.Bucket( - DocValueFormat.IP, new BytesRef(InetAddressPoint.encode(InetAddresses.forString("200.0.0.1"))), false, - false, 1, false, 2, diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java index b888e61e1bbf9..383065193c4d5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java @@ -72,7 +72,7 @@ protected InternalBinaryRange createTestInstance( for (int i = 0; i < ranges.size(); ++i) { final int docCount = randomIntBetween(1, 100); final String key = (i == nullKey) ? null : randomAlphaOfLength(10); - buckets.add(new InternalBinaryRange.Bucket(format, keyed, key, ranges.get(i).v1(), ranges.get(i).v2(), docCount, aggregations)); + buckets.add(new InternalBinaryRange.Bucket(format, key, ranges.get(i).v1(), ranges.get(i).v2(), docCount, aggregations)); } return new InternalBinaryRange(name, format, keyed, buckets, metadata); } @@ -113,7 +113,6 @@ protected InternalBinaryRange mutateInstance(InternalBinaryRange instance) { buckets.add( new InternalBinaryRange.Bucket( format, - keyed, "range_a", new BytesRef(randomAlphaOfLengthBetween(1, 20)), new BytesRef(randomAlphaOfLengthBetween(1, 20)), diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java index 255ad7c4417b3..fdfffaf8fb8e7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java @@ -81,7 +81,7 @@ protected InternalDateRange createTestInstance( int docCount = randomIntBetween(0, 1000); double from = range.v1(); double to = range.v2(); - buckets.add(new InternalDateRange.Bucket("range_" + i, from, to, docCount, aggregations, keyed, format)); + buckets.add(new InternalDateRange.Bucket("range_" + i, from, to, docCount, aggregations, format)); } return new InternalDateRange(name, buckets, format, keyed, metadata); } @@ -105,9 +105,7 @@ protected InternalDateRange mutateInstance(InternalDateRange instance) { buckets = new ArrayList<>(buckets); double from = randomDouble(); double to = from + randomDouble(); - buckets.add( - new InternalDateRange.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY, false, format) - ); + buckets.add(new InternalDateRange.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY, format)); } case 3 -> { if (metadata == null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java index 49144ec2f40fb..dcb41322a9426 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java @@ -63,7 +63,7 @@ protected InternalGeoDistance createTestInstance( int docCount = randomIntBetween(0, 1000); double from = range.v1(); double to = range.v2(); - buckets.add(new InternalGeoDistance.Bucket("range_" + i, from, to, docCount, aggregations, keyed)); + buckets.add(new InternalGeoDistance.Bucket("range_" + i, from, to, docCount, aggregations)); } return new InternalGeoDistance(name, buckets, keyed, metadata); } @@ -86,9 +86,7 @@ protected InternalGeoDistance mutateInstance(InternalGeoDistance instance) { buckets = new ArrayList<>(buckets); double from = randomDouble(); double to = from + randomDouble(); - buckets.add( - new InternalGeoDistance.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY, false) - ); + buckets.add(new InternalGeoDistance.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY)); } case 3 -> { if (metadata == null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java index da0fbd94d6ed6..0d957255b6416 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java @@ -76,7 +76,7 @@ public void setUp() throws Exception { int docCount = randomIntBetween(0, 1000); double from = range.v1(); double to = range.v2(); - buckets.add(new InternalRange.Bucket("range_" + i, from, to, docCount, aggregations, keyed, format)); + buckets.add(new InternalRange.Bucket("range_" + i, from, to, docCount, aggregations, format)); } return new InternalRange<>(name, buckets, format, keyed, metadata); } @@ -100,9 +100,7 @@ protected Class interna buckets = new ArrayList<>(buckets); double from = randomDouble(); double to = from + randomDouble(); - buckets.add( - new InternalRange.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY, false, format) - ); + buckets.add(new InternalRange.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY, format)); } case 3 -> { if (metadata == null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java index 9f667b3efcb61..b2f79c02baf8d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java @@ -81,11 +81,6 @@ public InternalAggregations getAggregations() { return null; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return null; - } - @Override public Object getProperty(String containingAggName, List path) { return new Object[0]; @@ -161,11 +156,6 @@ public InternalAggregations getAggregations() { return null; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return null; - } - @Override public Object getProperty(String containingAggName, List path) { return mock(InternalTDigestPercentiles.class); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java index 0f732d2017c74..c6bfb5b1b2778 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java @@ -122,17 +122,16 @@ public InternalAggregations getAggregations() { } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public void bucketToXContent(XContentBuilder builder, Params params, boolean showDocCountError) throws IOException { builder.startObject(); builder.field(CommonFields.KEY.getPreferredName(), getKey()); builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), getKeyAsString()); builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); - if (getShowDocCountError()) { + if (showDocCountError) { builder.field(DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName(), getDocCountError()); } aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -589,7 +588,7 @@ public List getBuckets() { @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - return doXContentCommon(builder, params, docCountError, otherDocCount, buckets); + return doXContentCommon(builder, params, showTermDocCountError, docCountError, otherDocCount, buckets); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java index 7ef7a8f4e6dd5..95b6a18182f9b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java @@ -142,8 +142,7 @@ public void writeTo(StreamOutput out) throws IOException { aggregations.writeTo(out); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(CommonFields.DOC_COUNT.getPreferredName(), serializableCategory.getNumMatches()); builder.field(CommonFields.KEY.getPreferredName()); @@ -152,7 +151,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CategoryDefinition.MAX_MATCHING_LENGTH.getPreferredName(), serializableCategory.maxMatchingStringLen()); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } BucketKey getRawKey() { @@ -280,7 +278,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params); } builder.endArray(); return builder; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java index c97166ac6fd80..39bdb69d4da40 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java @@ -12,12 +12,13 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; +import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.Objects; -public class ChangePointBucket extends InternalMultiBucketAggregation.InternalBucket { +public class ChangePointBucket extends InternalMultiBucketAggregation.InternalBucket implements ToXContent { private final Object key; private final long docCount; private final InternalAggregations aggregations; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java index 561076c302eda..1604c47ac4754 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java @@ -36,7 +36,7 @@ public final class AggregationTestUtils { private AggregationTestUtils() {} static InternalHistogram.Bucket createHistogramBucket(long timestamp, long docCount, List subAggregations) { - return new InternalHistogram.Bucket(timestamp, docCount, false, DocValueFormat.RAW, createAggs(subAggregations)); + return new InternalHistogram.Bucket(timestamp, docCount, DocValueFormat.RAW, createAggs(subAggregations)); } static InternalComposite.InternalBucket createCompositeBucket( diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java index ba25a774ff540..e33c1cc30f355 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java @@ -444,20 +444,14 @@ private static InternalAggregation unrollMultiBucket( long key = ((InternalDateHistogram) rolled).getKey(bucket).longValue(); DocValueFormat formatter = ((InternalDateHistogram.Bucket) bucket).getFormatter(); assert bucketCount >= 0; - return new InternalDateHistogram.Bucket( - key, - bucketCount, - ((InternalDateHistogram.Bucket) bucket).getKeyed(), - formatter, - subAggs - ); + return new InternalDateHistogram.Bucket(key, bucketCount, formatter, subAggs); }); } else if (rolled instanceof InternalHistogram) { return unrollMultiBucket(rolled, original, currentTree, (bucket, bucketCount, subAggs) -> { long key = ((InternalHistogram) rolled).getKey(bucket).longValue(); DocValueFormat formatter = ((InternalHistogram.Bucket) bucket).getFormatter(); assert bucketCount >= 0; - return new InternalHistogram.Bucket(key, bucketCount, ((InternalHistogram.Bucket) bucket).getKeyed(), formatter, subAggs); + return new InternalHistogram.Bucket(key, bucketCount, formatter, subAggs); }); } else if (rolled instanceof StringTerms) { return unrollMultiBucket(rolled, original, currentTree, (bucket, bucketCount, subAggs) -> { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index 411a4cda868f0..f9fed2b8f6a7d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -38,7 +38,6 @@ import org.elasticsearch.search.builder.PointInTimeBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.TaskCancelledException; -import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.ql.execution.search.FieldExtraction; import org.elasticsearch.xpack.ql.execution.search.extractor.AbstractFieldHitExtractor; import org.elasticsearch.xpack.ql.execution.search.extractor.BucketExtractor; @@ -360,11 +359,6 @@ static class ImplicitGroupActionListener extends BaseAggActionListener { private static final List EMPTY_BUCKET = singletonList(new Bucket() { - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - throw new SqlIllegalArgumentException("No group-by/aggs defined"); - } - @Override public Object getKey() { throw new SqlIllegalArgumentException("No group-by/aggs defined"); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestBucket.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestBucket.java index cb832cbd4b2d4..8f8f5917ae123 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestBucket.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestBucket.java @@ -8,9 +8,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation.Bucket; -import org.elasticsearch.xcontent.XContentBuilder; -import java.io.IOException; import java.util.Map; class TestBucket implements Bucket { @@ -25,11 +23,6 @@ class TestBucket implements Bucket { this.aggs = aggs; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - throw new UnsupportedOperationException(); - } - @Override public Map getKey() { return key; diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java index 681ec38e9a57a..7359071996cc8 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java @@ -918,14 +918,14 @@ public void testRangeAggExtractor() { Aggregation agg = createRangeAgg( "p_agg", List.of( - new InternalRange.Bucket(null, Double.NEGATIVE_INFINITY, 10.5, 10, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, 10.5, 19.5, 30, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, 19.5, 200, 30, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, 20, Double.POSITIVE_INFINITY, 0, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, -10, -5, 0, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, -11.0, -6.0, 0, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, -11.0, 0, 0, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket("custom-0", 0, 10, 777, InternalAggregations.EMPTY, false, DocValueFormat.RAW) + new InternalRange.Bucket(null, Double.NEGATIVE_INFINITY, 10.5, 10, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, 10.5, 19.5, 30, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, 19.5, 200, 30, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, 20, Double.POSITIVE_INFINITY, 0, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, -10, -5, 0, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, -11.0, -6.0, 0, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, -11.0, 0, 0, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket("custom-0", 0, 10, 777, InternalAggregations.EMPTY, DocValueFormat.RAW) ) ); assertThat( From 546e8e91e6f6f5cd5e5343a0874239026701fff4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20Fred=C3=A9n?= <109296772+jfreden@users.noreply.github.com> Date: Fri, 22 Nov 2024 08:20:35 +0100 Subject: [PATCH 162/386] Bump major version for feature migration system indices (#117243) * Bump major version for feature upgrade system indices --- docs/changelog/117243.yaml | 5 +++++ .../elasticsearch/migration/FeatureMigrationIT.java | 8 ++++---- .../migration/MultiFeatureMigrationIT.java | 10 +++++----- .../TransportGetFeatureUpgradeStatusAction.java | 8 ++------ .../java/org/elasticsearch/indices/SystemIndices.java | 2 +- 5 files changed, 17 insertions(+), 16 deletions(-) create mode 100644 docs/changelog/117243.yaml diff --git a/docs/changelog/117243.yaml b/docs/changelog/117243.yaml new file mode 100644 index 0000000000000..f871d476bd0ec --- /dev/null +++ b/docs/changelog/117243.yaml @@ -0,0 +1,5 @@ +pr: 117243 +summary: Bump major version for feature migration system indices +area: Infra/Core +type: upgrade +issues: [] diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java index 3905edae46c2f..a4aa0514bb47a 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java @@ -208,7 +208,7 @@ public void testMigrateInternalManagedSystemIndex() throws Exception { assertIndexHasCorrectProperties( finalMetadata, - ".int-man-old-reindexed-for-8", + ".int-man-old-reindexed-for-9", INTERNAL_MANAGED_FLAG_VALUE, true, true, @@ -216,7 +216,7 @@ public void testMigrateInternalManagedSystemIndex() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".int-unman-old-reindexed-for-8", + ".int-unman-old-reindexed-for-9", INTERNAL_UNMANAGED_FLAG_VALUE, false, true, @@ -224,7 +224,7 @@ public void testMigrateInternalManagedSystemIndex() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".ext-man-old-reindexed-for-8", + ".ext-man-old-reindexed-for-9", EXTERNAL_MANAGED_FLAG_VALUE, true, false, @@ -232,7 +232,7 @@ public void testMigrateInternalManagedSystemIndex() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".ext-unman-old-reindexed-for-8", + ".ext-unman-old-reindexed-for-9", EXTERNAL_UNMANAGED_FLAG_VALUE, false, false, diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java index 1ee5519593569..3442e9dc43925 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java @@ -218,7 +218,7 @@ public void testMultipleFeatureMigration() throws Exception { // Finally, verify that all the indices exist and have the properties we expect. assertIndexHasCorrectProperties( finalMetadata, - ".int-man-old-reindexed-for-8", + ".int-man-old-reindexed-for-9", INTERNAL_MANAGED_FLAG_VALUE, true, true, @@ -226,7 +226,7 @@ public void testMultipleFeatureMigration() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".int-unman-old-reindexed-for-8", + ".int-unman-old-reindexed-for-9", INTERNAL_UNMANAGED_FLAG_VALUE, false, true, @@ -234,7 +234,7 @@ public void testMultipleFeatureMigration() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".ext-man-old-reindexed-for-8", + ".ext-man-old-reindexed-for-9", EXTERNAL_MANAGED_FLAG_VALUE, true, false, @@ -242,7 +242,7 @@ public void testMultipleFeatureMigration() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".ext-unman-old-reindexed-for-8", + ".ext-unman-old-reindexed-for-9", EXTERNAL_UNMANAGED_FLAG_VALUE, false, false, @@ -251,7 +251,7 @@ public void testMultipleFeatureMigration() throws Exception { assertIndexHasCorrectProperties( finalMetadata, - ".second-int-man-old-reindexed-for-8", + ".second-int-man-old-reindexed-for-9", SECOND_FEATURE_IDX_FLAG_VALUE, true, true, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java index e2475bca31d53..afe615add28df 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java @@ -19,7 +19,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.SystemIndices; @@ -56,15 +55,13 @@ public class TransportGetFeatureUpgradeStatusAction extends TransportMasterNodeA /** * Once all feature migrations for 8.x -> 9.x have been tested, we can bump this to Version.V_8_0_0 */ - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final Version NO_UPGRADE_REQUIRED_VERSION = Version.V_7_0_0; - public static final IndexVersion NO_UPGRADE_REQUIRED_INDEX_VERSION = IndexVersions.V_7_0_0; + public static final Version NO_UPGRADE_REQUIRED_VERSION = Version.V_8_0_0; + public static final IndexVersion NO_UPGRADE_REQUIRED_INDEX_VERSION = IndexVersions.V_8_0_0; private final SystemIndices systemIndices; PersistentTasksService persistentTasksService; @Inject - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // Once we begin working on 9.x, we need to update our migration classes public TransportGetFeatureUpgradeStatusAction( TransportService transportService, ThreadPool threadPool, @@ -149,7 +146,6 @@ static GetFeatureUpgradeStatusResponse.FeatureUpgradeStatus getFeatureUpgradeSta .map(idxInfo -> ERROR) .map(idxStatus -> GetFeatureUpgradeStatusResponse.UpgradeStatus.combine(idxStatus, initialStatus)) .orElse(initialStatus); - return new GetFeatureUpgradeStatusResponse.FeatureUpgradeStatus(featureName, minimumVersion, status, indexInfos); } diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java index 856b30d1c19e8..42cda4da1a9e6 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java @@ -110,7 +110,7 @@ public class SystemIndices { public static final String SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY = "_system_index_access_allowed"; public static final String EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY = "_external_system_index_access_origin"; - public static final String UPGRADED_INDEX_SUFFIX = "-reindexed-for-8"; + public static final String UPGRADED_INDEX_SUFFIX = "-reindexed-for-9"; private static final Automaton EMPTY = Automata.makeEmpty(); From 2ac267de3a0f14f62d426ae47b4dc1adc1fe9161 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mariusz=20J=C3=B3zala?= <377355+jozala@users.noreply.github.com> Date: Fri, 22 Nov 2024 10:02:03 +0100 Subject: [PATCH 163/386] [ci] Add debian-12 to matrix in packaging and platform jobs (#116172) Lintian test has been changed to parse the result instead of using exit code. This was required, because now `mismatched-override` is non-erasable tag which cannot be ignored for exit code. Lintian introduced non-backward-compatible format change for overrides file. Because of that, some overrides are now duplicated in a format for older versions. Additionally, Lintian overrides file has been cleaned up to remove the tags which are no longer failing. --- .../pipelines/periodic-packaging.template.yml | 1 + .buildkite/pipelines/periodic-packaging.yml | 1 + .../pipelines/periodic-platform-support.yml | 1 + .../pull-request/packaging-tests-unix.yml | 3 + distribution/packages/build.gradle | 1 - .../packages/src/deb/lintian/elasticsearch | 57 ++++++++++--------- .../packaging/test/DebMetadataTests.java | 40 ++++++++++--- .../packaging/util/LintianResultParser.java | 54 ++++++++++++++++++ 8 files changed, 122 insertions(+), 36 deletions(-) create mode 100644 qa/packaging/src/test/java/org/elasticsearch/packaging/util/LintianResultParser.java diff --git a/.buildkite/pipelines/periodic-packaging.template.yml b/.buildkite/pipelines/periodic-packaging.template.yml index 081d059460653..1a1e46d55f7a4 100644 --- a/.buildkite/pipelines/periodic-packaging.template.yml +++ b/.buildkite/pipelines/periodic-packaging.template.yml @@ -8,6 +8,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 162a7e4995467..a49e486176484 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -9,6 +9,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 diff --git a/.buildkite/pipelines/periodic-platform-support.yml b/.buildkite/pipelines/periodic-platform-support.yml index f9f75488f0917..79e5a2e8dcdbb 100644 --- a/.buildkite/pipelines/periodic-platform-support.yml +++ b/.buildkite/pipelines/periodic-platform-support.yml @@ -8,6 +8,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 diff --git a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml index 04ccc41891b3b..8bec706bb758d 100644 --- a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml +++ b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml @@ -11,6 +11,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 @@ -38,6 +39,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 @@ -65,6 +67,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 918980fea616a..7d60137ac86b1 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -335,7 +335,6 @@ Closure commonDebConfig(String architecture) { // versions found on oldest supported distro, centos-6 requires('bash', '4.1', GREATER | EQUAL) - requires('lsb-base', '4', GREATER | EQUAL) requires 'libc6' requires 'adduser' diff --git a/distribution/packages/src/deb/lintian/elasticsearch b/distribution/packages/src/deb/lintian/elasticsearch index edd705b66caaa..1622d8d8aeb40 100644 --- a/distribution/packages/src/deb/lintian/elasticsearch +++ b/distribution/packages/src/deb/lintian/elasticsearch @@ -5,8 +5,6 @@ changelog-file-missing-in-native-package # we intentionally copy our copyright file for all deb packages -copyright-file-contains-full-apache-2-license -copyright-not-using-common-license-for-apache2 copyright-without-copyright-notice # we still put all our files under /usr/share/elasticsearch even after transition to platform dependent packages @@ -16,37 +14,23 @@ arch-dependent-file-in-usr-share missing-dep-on-jarwrapper # we prefer to not make our config and log files world readable -non-standard-file-perm etc/default/elasticsearch 0660 != 0644 -non-standard-dir-perm etc/elasticsearch/ 2750 != 0755 -non-standard-dir-perm etc/elasticsearch/jvm.options.d/ 2750 != 0755 -non-standard-file-perm etc/elasticsearch/* -non-standard-dir-perm var/lib/elasticsearch/ 2750 != 0755 -non-standard-dir-perm var/log/elasticsearch/ 2750 != 0755 - -# this lintian tag is simply wrong; contrary to the explanation, Debian systemd -# does actually look at /usr/lib/systemd/system -systemd-service-file-outside-lib usr/lib/systemd/system/elasticsearch.service +non-standard-file-perm 0660 != 0644 [etc/default/elasticsearch] +non-standard-dir-perm 2750 != 0755 [etc/elasticsearch/] +non-standard-dir-perm 2750 != 0755 [etc/elasticsearch/jvm.options.d/] +non-standard-file-perm 0660 != 0644 [etc/elasticsearch/*] +non-standard-dir-perm 2750 != 0755 [var/lib/elasticsearch/] +non-standard-dir-perm 2750 != 0755 [var/log/elasticsearch/] # the package scripts handle systemd directly and don't need to use deb helpers maintainer-script-calls-systemctl # bundled JDK embedded-library -unstripped-binary-or-object usr/share/elasticsearch/jdk/* -extra-license-file usr/share/elasticsearch/jdk/legal/* -hardening-no-pie usr/share/elasticsearch/jdk/bin/* -hardening-no-pie usr/share/elasticsearch/jdk/lib/* +unstripped-binary-or-object [usr/share/elasticsearch/jdk/*] # the system java version that lintian assumes is far behind what elasticsearch uses unknown-java-class-version -# elastic licensed modules contain elastic license -extra-license-file usr/share/elasticsearch/modules/* - -# This dependency appears to have a packaging flaw, and includes a -# generated source file alongside the compiled version -jar-contains-source usr/share/elasticsearch/modules/repository-gcs/api-common*.jar * - # There's no `License` field in Debian control files, but earlier versions # of `lintian` were more permissive. Override this warning so that we can # run `lintian` on different releases of Debian. The format of this override @@ -58,8 +42,27 @@ unknown-field License # indirectly to libc via libdl. This might not be best practice but we # don't build them ourselves and the license precludes us modifying them # to fix this. -library-not-linked-against-libc usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64/lib/libmkl_*.so +library-not-linked-against-libc [usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64/lib/libmkl_*.so*] + + +# Below is the copy of some of the above rules in format for Lintian versions <= 2.104 (Debian 11) +# Override syntax changes between Lintian versions in a non-backwards compatible way, so we handle it with +# duplication and ignoring some issues in the test code. + + +# we prefer to not make our config and log files world readable +non-standard-file-perm etc/default/elasticsearch 0660 != 0644 +non-standard-dir-perm etc/elasticsearch/ 2750 != 0755 +non-standard-dir-perm etc/elasticsearch/jvm.options.d/ 2750 != 0755 +non-standard-file-perm etc/elasticsearch/* +non-standard-dir-perm var/lib/elasticsearch/ 2750 != 0755 +non-standard-dir-perm var/log/elasticsearch/ 2750 != 0755 -# shared-lib-without-dependency-information (now shared-library-lacks-prerequisites) is falsely reported for libvec.so -# which has no dependencies (not even libc) besides the symbols in the base executable. -shared-lib-without-dependency-information usr/share/elasticsearch/lib/platform/linux-x64/libvec.so +# bundled JDK +unstripped-binary-or-object usr/share/elasticsearch/jdk/* + +# Intel MKL libraries are not linked directly to libc. They are linked +# indirectly to libc via libdl. This might not be best practice but we +# don't build them ourselves and the license precludes us modifying them +# to fix this. +library-not-linked-against-libc usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64/lib/libmkl_*.so* diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java index a60e58c34918b..9f9aa78a4910b 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java @@ -12,18 +12,31 @@ import junit.framework.TestCase; import org.elasticsearch.packaging.util.Distribution; -import org.elasticsearch.packaging.util.FileUtils; +import org.elasticsearch.packaging.util.LintianResultParser; +import org.elasticsearch.packaging.util.LintianResultParser.Issue; +import org.elasticsearch.packaging.util.LintianResultParser.Result; import org.elasticsearch.packaging.util.Shell; import org.junit.BeforeClass; +import java.util.List; import java.util.Locale; import java.util.regex.Pattern; +import java.util.stream.Collectors; import static org.elasticsearch.packaging.util.FileUtils.getDistributionFile; import static org.junit.Assume.assumeTrue; public class DebMetadataTests extends PackagingTestCase { + private final LintianResultParser lintianParser = new LintianResultParser(); + private static final List IGNORED_TAGS = List.of( + // Override syntax changes between lintian versions in a non-backwards compatible way, so we have to tolerate these. + // Tag mismatched-override is a non-erasable tag which cannot be ignored with overrides, so we handle it here. + "mismatched-override", + // systemd-service-file-outside-lib has been incorrect and removed in the newer version on Lintian + "systemd-service-file-outside-lib" + ); + @BeforeClass public static void filterDistros() { assumeTrue("only deb", distribution.packaging == Distribution.Packaging.DEB); @@ -35,15 +48,26 @@ public void test05CheckLintian() { if (helpText.contains("--fail-on-warnings")) { extraArgs = "--fail-on-warnings"; } else if (helpText.contains("--fail-on error")) { - extraArgs = "--fail-on warning"; - // Recent lintian versions are picky about malformed or mismatched overrides. - // Unfortunately override syntax changes between lintian versions in a non-backwards compatible - // way, so we have to tolerate these (or maintain separate override files per lintian version). - if (helpText.contains("--suppress-tags")) { - extraArgs += " --suppress-tags malformed-override,mismatched-override"; + extraArgs = "--fail-on error,warning"; + } + Shell.Result result = sh.runIgnoreExitCode( + String.format(Locale.ROOT, "lintian %s %s", extraArgs, getDistributionFile(distribution())) + ); + Result lintianResult = lintianParser.parse(result.stdout()); + // Unfortunately Lintian overrides syntax changes between Lintian versions in a non-backwards compatible + // way, so we have to manage some exclusions outside the overrides file. + if (lintianResult.isSuccess() == false) { + List importantIssues = lintianResult.issues() + .stream() + .filter(issue -> IGNORED_TAGS.contains(issue.tag()) == false) + .toList(); + if (importantIssues.isEmpty() == false) { + fail( + "Issues for DEB package found by Lintian:\n" + + importantIssues.stream().map(Record::toString).collect(Collectors.joining("\n")) + ); } } - sh.run(String.format(Locale.ROOT, "lintian %s %s", extraArgs, FileUtils.getDistributionFile(distribution()))); } public void test06Dependencies() { diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/LintianResultParser.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/LintianResultParser.java new file mode 100644 index 0000000000000..511080427ea77 --- /dev/null +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/LintianResultParser.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.packaging.util; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class LintianResultParser { + + private static final Logger logger = LogManager.getLogger(LintianResultParser.class); + private static final Pattern RESULT_PATTERN = Pattern.compile("(?[EW]): (?\\S+): (?\\S+) (?.+)"); + + public Result parse(String output) { + String[] lines = output.split("\n"); + List issues = Arrays.stream(lines).map(line -> { + Matcher matcher = RESULT_PATTERN.matcher(line); + if (matcher.matches() == false) { + logger.info("Lintian output not matching expected pattern: {}", line); + return null; + } + Severity severity = switch (matcher.group("severity")) { + case "E" -> Severity.ERROR; + case "W" -> Severity.WARNING; + default -> Severity.UNKNOWN; + }; + return new Issue(severity, matcher.group("tag"), matcher.group("message")); + }).filter(Objects::nonNull).toList(); + + return new Result(issues.stream().noneMatch(it -> it.severity == Severity.ERROR || it.severity == Severity.WARNING), issues); + } + + public record Result(boolean isSuccess, List issues) {} + + public record Issue(Severity severity, String tag, String message) {} + + enum Severity { + ERROR, + WARNING, + UNKNOWN + } +} From 6ea3e01958cfe355475fe605a49fb731294a8c2b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slobodan=20Adamovi=C4=87?= Date: Fri, 22 Nov 2024 11:39:25 +0100 Subject: [PATCH 164/386] Upgrade Bouncy Castle FIPS dependencies (#112989) This PR updates `bc-fips` and `bctls-fips` dependencies to the latest minor versions. --- .../src/main/groovy/elasticsearch.fips.gradle | 8 ++++---- .../src/main/resources/fips_java.policy | 3 ++- distribution/tools/plugin-cli/build.gradle | 2 +- docs/changelog/112989.yaml | 5 +++++ .../security/fips-140-compliance.asciidoc | 4 ++-- gradle/verification-metadata.xml | 18 +++++++++--------- plugins/discovery-ec2/build.gradle | 1 + .../src/main/resources/fips/fips_java.policy | 3 ++- x-pack/plugin/core/build.gradle | 2 +- .../core/ssl/RestrictedTrustManagerTests.java | 2 +- .../ssl/SslClientAuthenticationTests.java | 2 +- ...mpleSecurityNetty4ServerTransportTests.java | 6 +++++- 12 files changed, 34 insertions(+), 22 deletions(-) create mode 100644 docs/changelog/112989.yaml diff --git a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle index 493f7a505bb5b..3c9cf121813c9 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle @@ -25,12 +25,12 @@ if (buildParams.inFipsJvm) { File fipsSecurity = new File(fipsResourcesDir, javaSecurityFilename) File fipsPolicy = new File(fipsResourcesDir, 'fips_java.policy') File fipsTrustStore = new File(fipsResourcesDir, 'cacerts.bcfks') - def bcFips = dependencies.create('org.bouncycastle:bc-fips:1.0.2.4') - def bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.17') + def bcFips = dependencies.create('org.bouncycastle:bc-fips:1.0.2.5') + def bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.19') def manualDebug = false; //change this to manually debug bouncy castle in an IDE if(manualDebug) { - bcFips = dependencies.create('org.bouncycastle:bc-fips-debug:1.0.2.4') - bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.17'){ + bcFips = dependencies.create('org.bouncycastle:bc-fips-debug:1.0.2.5') + bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.19'){ exclude group: 'org.bouncycastle', module: 'bc-fips' // to avoid jar hell } } diff --git a/build-tools-internal/src/main/resources/fips_java.policy b/build-tools-internal/src/main/resources/fips_java.policy index c259b0bc908d8..781e1247db7a5 100644 --- a/build-tools-internal/src/main/resources/fips_java.policy +++ b/build-tools-internal/src/main/resources/fips_java.policy @@ -5,6 +5,7 @@ grant { permission java.security.SecurityPermission "getProperty.jdk.tls.disabledAlgorithms"; permission java.security.SecurityPermission "getProperty.jdk.certpath.disabledAlgorithms"; permission java.security.SecurityPermission "getProperty.jdk.tls.server.defaultDHEParameters"; + permission java.security.SecurityPermission "getProperty.org.bouncycastle.ec.max_f2m_field_size"; permission java.lang.RuntimePermission "getProtectionDomain"; permission java.util.PropertyPermission "java.runtime.name", "read"; permission org.bouncycastle.crypto.CryptoServicesPermission "tlsAlgorithmsEnabled"; @@ -20,6 +21,6 @@ grant { }; // rely on the caller's socket permissions, the JSSE TLS implementation here is always allowed to connect -grant codeBase "file:${jdk.module.path}/bctls-fips-1.0.17.jar" { +grant codeBase "file:${jdk.module.path}/bctls-fips-1.0.19.jar" { permission java.net.SocketPermission "*", "connect"; }; diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle index ac8ade89c9014..57750f2162a71 100644 --- a/distribution/tools/plugin-cli/build.gradle +++ b/distribution/tools/plugin-cli/build.gradle @@ -29,7 +29,7 @@ dependencies { implementation 'org.ow2.asm:asm-tree:9.7' api "org.bouncycastle:bcpg-fips:1.0.7.1" - api "org.bouncycastle:bc-fips:1.0.2.4" + api "org.bouncycastle:bc-fips:1.0.2.5" testImplementation project(":test:framework") testImplementation "com.google.jimfs:jimfs:${versions.jimfs}" testRuntimeOnly "com.google.guava:guava:${versions.jimfs_guava}" diff --git a/docs/changelog/112989.yaml b/docs/changelog/112989.yaml new file mode 100644 index 0000000000000..364f012f94420 --- /dev/null +++ b/docs/changelog/112989.yaml @@ -0,0 +1,5 @@ +pr: 112989 +summary: Upgrade Bouncy Castle FIPS dependencies +area: Security +type: upgrade +issues: [] diff --git a/docs/reference/security/fips-140-compliance.asciidoc b/docs/reference/security/fips-140-compliance.asciidoc index 5bf73d43541d6..dec17927e62b8 100644 --- a/docs/reference/security/fips-140-compliance.asciidoc +++ b/docs/reference/security/fips-140-compliance.asciidoc @@ -53,8 +53,8 @@ https://docs.oracle.com/en/java/javase/17/security/java-cryptography-architectur https://docs.oracle.com/en/java/javase/17/security/java-secure-socket-extension-jsse-reference-guide.html[JSSE] implementation is required so that the JVM uses FIPS validated implementations of NIST recommended cryptographic algorithms. -Elasticsearch has been tested with Bouncy Castle's https://repo1.maven.org/maven2/org/bouncycastle/bc-fips/1.0.2.4/bc-fips-1.0.2.4.jar[bc-fips 1.0.2.4] -and https://repo1.maven.org/maven2/org/bouncycastle/bctls-fips/1.0.17/bctls-fips-1.0.17.jar[bctls-fips 1.0.17]. +Elasticsearch has been tested with Bouncy Castle's https://repo1.maven.org/maven2/org/bouncycastle/bc-fips/1.0.2.5/bc-fips-1.0.2.5.jar[bc-fips 1.0.2.5] +and https://repo1.maven.org/maven2/org/bouncycastle/bctls-fips/1.0.19/bctls-fips-1.0.19.jar[bctls-fips 1.0.19]. Please refer to the {es} https://www.elastic.co/support/matrix#matrix_jvm[JVM support matrix] for details on which combinations of JVM and security provider are supported in FIPS mode. Elasticsearch does not ship with a FIPS certified provider. It is the responsibility of the user to install and configure the security provider to ensure compliance with FIPS 140-2. Using a FIPS certified provider will ensure that only diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 3f56071f6f495..2f465e06a662a 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -3288,14 +3288,14 @@ - - - + + + - - - + + + @@ -3333,9 +3333,9 @@ - - - + + + diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index a166a89ad4026..f281db5279660 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -77,6 +77,7 @@ tasks.register("writeTestJavaPolicy") { "permission java.security.SecurityPermission \"getProperty.jdk.tls.disabledAlgorithms\";", "permission java.security.SecurityPermission \"getProperty.jdk.certpath.disabledAlgorithms\";", "permission java.security.SecurityPermission \"getProperty.keystore.type.compat\";", + "permission java.security.SecurityPermission \"getProperty.org.bouncycastle.ec.max_f2m_field_size\";", "};" ].join("\n") ) diff --git a/test/test-clusters/src/main/resources/fips/fips_java.policy b/test/test-clusters/src/main/resources/fips/fips_java.policy index c259b0bc908d8..781e1247db7a5 100644 --- a/test/test-clusters/src/main/resources/fips/fips_java.policy +++ b/test/test-clusters/src/main/resources/fips/fips_java.policy @@ -5,6 +5,7 @@ grant { permission java.security.SecurityPermission "getProperty.jdk.tls.disabledAlgorithms"; permission java.security.SecurityPermission "getProperty.jdk.certpath.disabledAlgorithms"; permission java.security.SecurityPermission "getProperty.jdk.tls.server.defaultDHEParameters"; + permission java.security.SecurityPermission "getProperty.org.bouncycastle.ec.max_f2m_field_size"; permission java.lang.RuntimePermission "getProtectionDomain"; permission java.util.PropertyPermission "java.runtime.name", "read"; permission org.bouncycastle.crypto.CryptoServicesPermission "tlsAlgorithmsEnabled"; @@ -20,6 +21,6 @@ grant { }; // rely on the caller's socket permissions, the JSSE TLS implementation here is always allowed to connect -grant codeBase "file:${jdk.module.path}/bctls-fips-1.0.17.jar" { +grant codeBase "file:${jdk.module.path}/bctls-fips-1.0.19.jar" { permission java.net.SocketPermission "*", "connect"; }; diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index b4f17cb436df5..d4c3f67bf3ebb 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -65,7 +65,7 @@ dependencies { testImplementation project(path: ':modules:rest-root') testImplementation project(path: ':modules:health-shards-availability') // Needed for Fips140ProviderVerificationTests - testCompileOnly('org.bouncycastle:bc-fips:1.0.2.4') + testCompileOnly('org.bouncycastle:bc-fips:1.0.2.5') testImplementation(project(':x-pack:license-tools')) { transitive = false diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManagerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManagerTests.java index bbf80279b0b2a..60db8b6522518 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManagerTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManagerTests.java @@ -218,7 +218,7 @@ public void testThatDelegateTrustManagerIsRespected() throws Exception { if (cert.endsWith("/ca")) { assertTrusted(trustManager, cert); } else { - assertNotValid(trustManager, cert, inFipsJvm() ? "Unable to find certificate chain." : "PKIX path building failed.*"); + assertNotValid(trustManager, cert, inFipsJvm() ? "Unable to construct a valid chain" : "PKIX path building failed.*"); } } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SslClientAuthenticationTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SslClientAuthenticationTests.java index bc01b0693af0a..2851af1461012 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SslClientAuthenticationTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SslClientAuthenticationTests.java @@ -107,7 +107,7 @@ public void testThatHttpFailsWithoutSslClientAuth() throws IOException { if (inFipsJvm()) { Throwable t = ExceptionsHelper.unwrap(e, CertificateException.class); assertThat(t, instanceOf(CertificateException.class)); - assertThat(t.getMessage(), containsString("Unable to find certificate chain")); + assertThat(t.getMessage(), containsString("Unable to construct a valid chain")); } else { Throwable t = ExceptionsHelper.unwrap(e, CertPathBuilderException.class); assertThat(t, instanceOf(CertPathBuilderException.class)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java index c5c5e14934408..e381663d4174e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java @@ -571,7 +571,11 @@ public void testClientChannelUsesSeparateSslConfigurationForRemoteCluster() thro final ConnectTransportException e = openConnectionExpectFailure(qcService, node, connectionProfile); assertThat( e.getRootCause().getMessage(), - anyOf(containsString("unable to find valid certification path"), containsString("Unable to find certificate chain")) + anyOf( + containsString("unable to find valid certification path"), + containsString("Unable to find certificate chain"), + containsString("Unable to construct a valid chain") + ) ); } From a1247b3e60f7fd0df1a980a1d8a7a8a0e1760661 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Fri, 22 Nov 2024 12:12:09 +0100 Subject: [PATCH 165/386] ES|QL: fix validation of SORT by aggregate functions (#117316) --- docs/changelog/117316.yaml | 5 +++ .../kibana/definition/match_operator.json | 36 +++++++++++++++++++ .../functions/types/match_operator.asciidoc | 2 ++ .../xpack/esql/analysis/Verifier.java | 13 +++++++ .../xpack/esql/analysis/VerifierTests.java | 7 ++++ 5 files changed, 63 insertions(+) create mode 100644 docs/changelog/117316.yaml diff --git a/docs/changelog/117316.yaml b/docs/changelog/117316.yaml new file mode 100644 index 0000000000000..69474d68a8190 --- /dev/null +++ b/docs/changelog/117316.yaml @@ -0,0 +1,5 @@ +pr: 117316 +summary: Fix validation of SORT by aggregate functions +area: ES|QL +type: bug +issues: [] diff --git a/docs/reference/esql/functions/kibana/definition/match_operator.json b/docs/reference/esql/functions/kibana/definition/match_operator.json index 2facebfc44e57..7a0ace6168b59 100644 --- a/docs/reference/esql/functions/kibana/definition/match_operator.json +++ b/docs/reference/esql/functions/kibana/definition/match_operator.json @@ -22,6 +22,42 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "text", + "optional" : false, + "description" : "Text you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "keyword", + "optional" : false, + "description" : "Text you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/types/match_operator.asciidoc b/docs/reference/esql/functions/types/match_operator.asciidoc index 5c6afacdce1b2..7523b29c62b1d 100644 --- a/docs/reference/esql/functions/types/match_operator.asciidoc +++ b/docs/reference/esql/functions/types/match_operator.asciidoc @@ -6,5 +6,7 @@ |=== field | query | result keyword | keyword | boolean +keyword | text | boolean +text | keyword | boolean text | text | boolean |=== diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index 694328e57b5ae..3ebb52641232e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -215,6 +215,7 @@ else if (p instanceof Lookup lookup) { checkOperationsOnUnsignedLong(p, failures); checkBinaryComparison(p, failures); checkForSortableDataTypes(p, failures); + checkSort(p, failures); checkFullTextQueryFunctions(p, failures); }); @@ -232,6 +233,18 @@ else if (p instanceof Lookup lookup) { return failures; } + private void checkSort(LogicalPlan p, Set failures) { + if (p instanceof OrderBy ob) { + ob.order().forEach(o -> { + o.forEachDown(Function.class, f -> { + if (f instanceof AggregateFunction) { + failures.add(fail(f, "Aggregate functions are not allowed in SORT [{}]", f.functionName())); + } + }); + }); + } + } + private static void checkFilterConditionType(LogicalPlan p, Set localFailures) { if (p instanceof Filter f) { Expression condition = f.condition(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 8da6863465d39..53c6e9a5fd841 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1793,6 +1793,13 @@ public void testCategorizeWithinAggregations() { ); } + public void testSortByAggregate() { + assertEquals("1:18: Aggregate functions are not allowed in SORT [COUNT]", error("ROW a = 1 | SORT count(*)")); + assertEquals("1:28: Aggregate functions are not allowed in SORT [COUNT]", error("ROW a = 1 | SORT to_string(count(*))")); + assertEquals("1:22: Aggregate functions are not allowed in SORT [MAX]", error("ROW a = 1 | SORT 1 + max(a)")); + assertEquals("1:18: Aggregate functions are not allowed in SORT [COUNT]", error("FROM test | SORT count(*)")); + } + private void query(String query) { defaultAnalyzer.analyze(parser.createStatement(query)); } From bcb29b797d8dcb61aca7f2beb922839277326def Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Fri, 22 Nov 2024 12:31:40 +0100 Subject: [PATCH 166/386] Preserve thread context when waiting for segment generation in RTG (#117148) The multi-get counterpart of https://github.com/elastic/elasticsearch/pull/114623. --- docs/changelog/117148.yaml | 5 +++++ .../action/get/TransportShardMultiGetAction.java | 9 +++++---- 2 files changed, 10 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/117148.yaml diff --git a/docs/changelog/117148.yaml b/docs/changelog/117148.yaml new file mode 100644 index 0000000000000..92dd69672616a --- /dev/null +++ b/docs/changelog/117148.yaml @@ -0,0 +1,5 @@ +pr: 117148 +summary: Preserve thread context when waiting for segment generation in RTG +area: CRUD +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index 633e7ef6793ab..93e1b18ec64c6 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -19,6 +19,7 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.indices.refresh.TransportShardRefreshAction; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.replication.BasicReplicationRequest; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; @@ -280,15 +281,15 @@ private void tryShardMultiGetFromTranslog( } else { assert r.segmentGeneration() > -1L; assert r.primaryTerm() > Engine.UNKNOWN_PRIMARY_TERM; - indexShard.waitForPrimaryTermAndGeneration( - r.primaryTerm(), - r.segmentGeneration(), + final ActionListener termAndGenerationListener = ContextPreservingActionListener.wrapPreservingContext( listener.delegateFailureAndWrap( (ll, aLong) -> getExecutor(request, shardId).execute( ActionRunnable.supply(ll, () -> handleLocalGets(request, r.multiGetShardResponse(), shardId)) ) - ) + ), + threadPool.getThreadContext() ); + indexShard.waitForPrimaryTermAndGeneration(r.primaryTerm(), r.segmentGeneration(), termAndGenerationListener); } } }), TransportShardMultiGetFomTranslogAction.Response::new, getExecutor(request, shardId)) From d26a772f6e26cead6d1d130be1298c0e67943c51 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Iv=C3=A1n=20Cea=20Fontenla?= Date: Fri, 22 Nov 2024 12:51:37 +0100 Subject: [PATCH 167/386] Unmute Categorize VerifierTests and require snapshot on them (#117016) Fixes https://github.com/elastic/elasticsearch/issues/116856 Fixes https://github.com/elastic/elasticsearch/issues/116857 Fixes https://github.com/elastic/elasticsearch/issues/116858 --- muted-tests.yml | 9 --------- .../elasticsearch/xpack/esql/analysis/VerifierTests.java | 6 ++++++ 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index b88bff86a0fbe..8be390e670c9b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -208,15 +208,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/116777 - class: org.elasticsearch.xpack.searchablesnapshots.hdfs.SecureHdfsSearchableSnapshotsIT issue: https://github.com/elastic/elasticsearch/issues/116851 -- class: org.elasticsearch.xpack.esql.analysis.VerifierTests - method: testCategorizeWithinAggregations - issue: https://github.com/elastic/elasticsearch/issues/116856 -- class: org.elasticsearch.xpack.esql.analysis.VerifierTests - method: testCategorizeSingleGrouping - issue: https://github.com/elastic/elasticsearch/issues/116857 -- class: org.elasticsearch.xpack.esql.analysis.VerifierTests - method: testCategorizeNestedGrouping - issue: https://github.com/elastic/elasticsearch/issues/116858 - class: org.elasticsearch.search.basic.SearchWithRandomIOExceptionsIT method: testRandomDirectoryIOExceptions issue: https://github.com/elastic/elasticsearch/issues/114824 diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 53c6e9a5fd841..7b2f85b80b3b6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1732,6 +1732,8 @@ public void testIntervalAsString() { } public void testCategorizeSingleGrouping() { + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE.isEnabled()); + query("from test | STATS COUNT(*) BY CATEGORIZE(first_name)"); query("from test | STATS COUNT(*) BY cat = CATEGORIZE(first_name)"); @@ -1759,6 +1761,8 @@ public void testCategorizeSingleGrouping() { } public void testCategorizeNestedGrouping() { + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE.isEnabled()); + query("from test | STATS COUNT(*) BY CATEGORIZE(LENGTH(first_name)::string)"); assertEquals( @@ -1772,6 +1776,8 @@ public void testCategorizeNestedGrouping() { } public void testCategorizeWithinAggregations() { + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE.isEnabled()); + query("from test | STATS MV_COUNT(cat), COUNT(*) BY cat = CATEGORIZE(first_name)"); assertEquals( From 1fe3ed1e850c12d21806061e53687c0f1bd96738 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Fri, 22 Nov 2024 13:26:30 +0100 Subject: [PATCH 168/386] Add docs for aggs filtering (#116681) Add documentation for aggs filtering (the WHERE in STATS command). Fixes: #115083 --- .../esql/processing-commands/stats.asciidoc | 49 +++++++++++++++---- .../src/main/resources/stats.csv-spec | 36 ++++++++++++++ 2 files changed, 75 insertions(+), 10 deletions(-) diff --git a/docs/reference/esql/processing-commands/stats.asciidoc b/docs/reference/esql/processing-commands/stats.asciidoc index 0c479c1f62b76..3ed296fb6db24 100644 --- a/docs/reference/esql/processing-commands/stats.asciidoc +++ b/docs/reference/esql/processing-commands/stats.asciidoc @@ -1,16 +1,18 @@ [discrete] [[esql-stats-by]] -=== `STATS ... BY` +=== `STATS` -The `STATS ... BY` processing command groups rows according to a common value +The `STATS` processing command groups rows according to a common value and calculates one or more aggregated values over the grouped rows. **Syntax** [source,esql] ---- -STATS [column1 =] expression1[, ..., [columnN =] expressionN] -[BY grouping_expression1[, ..., grouping_expressionN]] +STATS [column1 =] expression1 [WHERE boolean_expression1][, + ..., + [columnN =] expressionN [WHERE boolean_expressionN]] + [BY grouping_expression1[, ..., grouping_expressionN]] ---- *Parameters* @@ -28,14 +30,18 @@ An expression that computes an aggregated value. An expression that outputs the values to group by. If its name coincides with one of the computed columns, that column will be ignored. +`boolean_expressionX`:: +The condition that must be met for a row to be included in the evaluation of `expressionX`. + NOTE: Individual `null` values are skipped when computing aggregations. *Description* -The `STATS ... BY` processing command groups rows according to a common value -and calculate one or more aggregated values over the grouped rows. If `BY` is -omitted, the output table contains exactly one row with the aggregations applied -over the entire dataset. +The `STATS` processing command groups rows according to a common value +and calculates one or more aggregated values over the grouped rows. For the +calculation of each aggregated value, the rows in a group can be filtered with +`WHERE`. If `BY` is omitted, the output table contains exactly one row with +the aggregations applied over the entire dataset. The following <> are supported: @@ -90,6 +96,29 @@ include::{esql-specs}/stats.csv-spec[tag=statsCalcMultipleValues] include::{esql-specs}/stats.csv-spec[tag=statsCalcMultipleValues-result] |=== +To filter the rows that go into an aggregation, use the `WHERE` clause: + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats.csv-spec[tag=aggFiltering] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=aggFiltering-result] +|=== + +The aggregations can be mixed, with and without a filter and grouping is +optional as well: + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats.csv-spec[tag=aggFilteringNoGroup] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=aggFilteringNoGroup-result] +|=== + [[esql-stats-mv-group]] If the grouping key is multivalued then the input row is in all groups: @@ -109,7 +138,7 @@ It's also possible to group by multiple values: include::{esql-specs}/stats.csv-spec[tag=statsGroupByMultipleValues] ---- -If the all grouping keys are multivalued then the input row is in all groups: +If all the grouping keys are multivalued then the input row is in all groups: [source.merge.styled,esql] ---- @@ -121,7 +150,7 @@ include::{esql-specs}/stats.csv-spec[tag=multi-mv-group-result] |=== Both the aggregating functions and the grouping expressions accept other -functions. This is useful for using `STATS...BY` on multivalue columns. +functions. This is useful for using `STATS` on multivalue columns. For example, to calculate the average salary change, you can use `MV_AVG` to first average the multiple values per employee, and use the result with the `AVG` function: diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index cba5ace0dfe86..66c5362a24134 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -2348,6 +2348,42 @@ v:integer | job_positions:keyword 10094 | Accountant ; +docsStatsWithSimpleFiltering +required_capability: per_agg_filtering +// tag::aggFiltering[] +FROM employees +| STATS avg50s = AVG(salary)::LONG WHERE birth_date < "1960-01-01", + avg60s = AVG(salary)::LONG WHERE birth_date >= "1960-01-01" + BY gender +| SORT gender +// end::aggFiltering[] +| WHERE gender IS NOT NULL +; + +// tag::aggFiltering-result[] +avg50s:long |avg60s:long |gender:keyword +55462 |46637 |F +48279 |44879 |M +// end::aggFiltering-result[] +; + +docsStatsWithFilteringNoGroups +required_capability: per_agg_filtering +// tag::aggFilteringNoGroup[] +FROM employees +| EVAL Ks = salary / 1000 // thousands +| STATS under_40K = COUNT(*) WHERE Ks < 40, + inbetween = COUNT(*) WHERE 40 <= Ks AND Ks < 60, + over_60K = COUNT(*) WHERE 60 <= Ks, + total = COUNT(*) +// end::aggFilteringNoGroup[] +; + +// tag::aggFilteringNoGroup-result[] +under_40K:long |inbetween:long |over_60K:long |total:long +36 |39 |25 |100 +// end::aggFilteringNoGroup-result[] +; statsWithFiltering required_capability: per_agg_filtering From 7ac8d6392fb5bdac6330381a81ace8a17759b48b Mon Sep 17 00:00:00 2001 From: Jack Pan <35284546+jackpan123@users.noreply.github.com> Date: Fri, 22 Nov 2024 20:52:57 +0800 Subject: [PATCH 169/386] ESQL: Fix double lookup and HashJoinExec.addedFields (#115616) Fix a bug in HashJoinExec.addedFields that caused multiple LOOKUPs in a query to fail. --- docs/changelog/115616.yaml | 6 ++++++ .../xpack/esql/plan/physical/HashJoinExec.java | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/115616.yaml diff --git a/docs/changelog/115616.yaml b/docs/changelog/115616.yaml new file mode 100644 index 0000000000000..4fb4dc18538de --- /dev/null +++ b/docs/changelog/115616.yaml @@ -0,0 +1,6 @@ +pr: 115616 +summary: Fix double lookup failure on ESQL +area: ES|QL +type: bug +issues: + - 111398 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java index 4574c3720f8ee..5ae3702993fcb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java @@ -91,7 +91,7 @@ public List rightFields() { public Set addedFields() { if (lazyAddedFields == null) { - lazyAddedFields = outputSet(); + lazyAddedFields = new AttributeSet(output()); lazyAddedFields.removeAll(left().output()); } return lazyAddedFields; From f9223531ac5fab80bd4c7204eee8df409906f6e3 Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 22 Nov 2024 13:14:26 +0000 Subject: [PATCH 170/386] Enable test-fixture test suites (#117329) Today the `:test:fixtures` modules' test suites are disabled, but in fact these fixtures do have nontrivial behaviour that wants testing in its own right, so we should run their tests. This commit reinstates the disabled tests and fixes one which should have been fixed as part of #116212. --- test/fixtures/build.gradle | 9 --------- .../src/test/java/fixture/s3/S3HttpHandlerTests.java | 12 ++++++++++-- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/test/fixtures/build.gradle b/test/fixtures/build.gradle index 02d68517903a3..e69de29bb2d1d 100644 --- a/test/fixtures/build.gradle +++ b/test/fixtures/build.gradle @@ -1,9 +0,0 @@ - -subprojects { - // fixtures don't have tests, these are external projects used by the build - pluginManager.withPlugin('java') { - tasks.named('test').configure { - enabled = false - } - } -} diff --git a/test/fixtures/s3-fixture/src/test/java/fixture/s3/S3HttpHandlerTests.java b/test/fixtures/s3-fixture/src/test/java/fixture/s3/S3HttpHandlerTests.java index 375f428f748e6..58f32292fa91c 100644 --- a/test/fixtures/s3-fixture/src/test/java/fixture/s3/S3HttpHandlerTests.java +++ b/test/fixtures/s3-fixture/src/test/java/fixture/s3/S3HttpHandlerTests.java @@ -31,6 +31,8 @@ import java.util.List; import java.util.Objects; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.greaterThan; public class S3HttpHandlerTests extends ESTestCase { @@ -261,7 +263,7 @@ public void testListAndAbortMultipartUpload() { path/blob10000false\ """), handleRequest(handler, "GET", "/bucket/?uploads&prefix=path/blob")); - assertEquals(RestStatus.NOT_FOUND, handleRequest(handler, "POST", "/bucket/path/blob?uploadId=" + uploadId, Strings.format(""" + final var completeUploadResponse = handleRequest(handler, "POST", "/bucket/path/blob?uploadId=" + uploadId, Strings.format(""" @@ -272,7 +274,13 @@ public void testListAndAbortMultipartUpload() { %s 2 - """, part1Etag, part2Etag)).status()); + """, part1Etag, part2Etag)); + if (completeUploadResponse.status() == RestStatus.OK) { + // possible, but rare, indicating that S3 started processing the upload before returning an error + assertThat(completeUploadResponse.body().utf8ToString(), allOf(containsString(""), containsString("NoSuchUpload"))); + } else { + assertEquals(RestStatus.NOT_FOUND, completeUploadResponse.status()); + } } private static String getUploadId(BytesReference createUploadResponseBody) { From 09a53388cc7713d8d9fdca44bc50db7fd049ab45 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Fri, 22 Nov 2024 14:48:47 +0100 Subject: [PATCH 171/386] ESQL: drop RowExec (#117133) Drop `RowExec` physical node: `Row` is now optimised away into a `LocalRelation`, which has its own physical mapping. `Row` is kept around as a container for the logical optimisations/folding of the expressions supported by the `ROW` command (which makes it in fact a source _plus_ `EVAL`), `LocalRelation` only being a container for the schema and end results (it doesn't actually go through transformations). Fixes #104960 --- .../compute/operator/RowOperator.java | 47 ----------- .../compute/operator/RowOperatorTests.java | 81 ------------------- .../esql/optimizer/LogicalPlanOptimizer.java | 3 +- .../logical/ReplaceRowAsLocalRelation.java | 30 +++++++ .../xpack/esql/plan/PlanWritables.java | 2 - .../xpack/esql/plan/physical/RowExec.java | 75 ----------------- .../esql/planner/LocalExecutionPlanner.java | 11 --- .../esql/planner/mapper/MapperUtils.java | 19 ----- .../optimizer/LogicalPlanOptimizerTests.java | 12 +-- .../physical/RowExecSerializationTests.java | 51 ------------ 10 files changed, 38 insertions(+), 293 deletions(-) delete mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/RowOperator.java delete mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRowAsLocalRelation.java delete mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/RowExec.java delete mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/RowExecSerializationTests.java diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/RowOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/RowOperator.java deleted file mode 100644 index 4b4379eb6a4d8..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/RowOperator.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.operator; - -import org.elasticsearch.compute.data.BlockFactory; - -import java.util.List; -import java.util.Objects; - -import static java.util.stream.Collectors.joining; - -public class RowOperator extends LocalSourceOperator { - - private final List objects; - - public record RowOperatorFactory(List objects) implements SourceOperatorFactory { - - @Override - public SourceOperator get(DriverContext driverContext) { - return new RowOperator(driverContext.blockFactory(), objects); - } - - @Override - public String describe() { - return "RowOperator[objects = " + objects.stream().map(Objects::toString).collect(joining(",")) + "]"; - } - } - - public RowOperator(BlockFactory blockFactory, List objects) { - super(blockFactory, () -> objects); - this.objects = objects; - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(this.getClass().getSimpleName()).append("["); - sb.append("objects=").append(objects); - sb.append("]"); - return sb.toString(); - } -} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java deleted file mode 100644 index cd8a49939fbb5..0000000000000 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.operator; - -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.MockBigArrays; -import org.elasticsearch.common.util.PageCacheRecycler; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.TestBlockFactory; -import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.test.ESTestCase; - -import java.util.Arrays; -import java.util.List; - -import static org.hamcrest.Matchers.equalTo; - -public class RowOperatorTests extends ESTestCase { - final DriverContext driverContext = new DriverContext( - new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(), - TestBlockFactory.getNonBreakingInstance() - ); - - public void testBoolean() { - RowOperator.RowOperatorFactory factory = new RowOperator.RowOperatorFactory(List.of(false)); - assertThat(factory.describe(), equalTo("RowOperator[objects = false]")); - assertThat(factory.get(driverContext).toString(), equalTo("RowOperator[objects=[false]]")); - BooleanBlock block = factory.get(driverContext).getOutput().getBlock(0); - assertThat(block.getBoolean(0), equalTo(false)); - } - - public void testInt() { - RowOperator.RowOperatorFactory factory = new RowOperator.RowOperatorFactory(List.of(213)); - assertThat(factory.describe(), equalTo("RowOperator[objects = 213]")); - assertThat(factory.get(driverContext).toString(), equalTo("RowOperator[objects=[213]]")); - IntBlock block = factory.get(driverContext).getOutput().getBlock(0); - assertThat(block.getInt(0), equalTo(213)); - } - - public void testLong() { - RowOperator.RowOperatorFactory factory = new RowOperator.RowOperatorFactory(List.of(21321343214L)); - assertThat(factory.describe(), equalTo("RowOperator[objects = 21321343214]")); - assertThat(factory.get(driverContext).toString(), equalTo("RowOperator[objects=[21321343214]]")); - LongBlock block = factory.get(driverContext).getOutput().getBlock(0); - assertThat(block.getLong(0), equalTo(21321343214L)); - } - - public void testDouble() { - RowOperator.RowOperatorFactory factory = new RowOperator.RowOperatorFactory(List.of(2.0)); - assertThat(factory.describe(), equalTo("RowOperator[objects = 2.0]")); - assertThat(factory.get(driverContext).toString(), equalTo("RowOperator[objects=[2.0]]")); - DoubleBlock block = factory.get(driverContext).getOutput().getBlock(0); - assertThat(block.getDouble(0), equalTo(2.0)); - } - - public void testString() { - RowOperator.RowOperatorFactory factory = new RowOperator.RowOperatorFactory(List.of(new BytesRef("cat"))); - assertThat(factory.describe(), equalTo("RowOperator[objects = [63 61 74]]")); - assertThat(factory.get(driverContext).toString(), equalTo("RowOperator[objects=[[63 61 74]]]")); - BytesRefBlock block = factory.get(driverContext).getOutput().getBlock(0); - assertThat(block.getBytesRef(0, new BytesRef()), equalTo(new BytesRef("cat"))); - } - - public void testNull() { - RowOperator.RowOperatorFactory factory = new RowOperator.RowOperatorFactory(Arrays.asList(new Object[] { null })); - assertThat(factory.describe(), equalTo("RowOperator[objects = null]")); - assertThat(factory.get(driverContext).toString(), equalTo("RowOperator[objects=[null]]")); - Block block = factory.get(driverContext).getOutput().getBlock(0); - assertTrue(block.isNull(0)); - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 5007b011092f0..a5f97cf961378 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -47,6 +47,7 @@ import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceLimitAndSortAsTopN; import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceOrderByExpressionWithEval; import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceRegexMatch; +import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceRowAsLocalRelation; import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceStatsFilteredAggWithEval; import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceTrivialTypeConversions; import org.elasticsearch.xpack.esql.optimizer.rules.logical.SetAsOptimized; @@ -192,6 +193,6 @@ protected static Batch operators() { } protected static Batch cleanup() { - return new Batch<>("Clean Up", new ReplaceLimitAndSortAsTopN()); + return new Batch<>("Clean Up", new ReplaceLimitAndSortAsTopN(), new ReplaceRowAsLocalRelation()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRowAsLocalRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRowAsLocalRelation.java new file mode 100644 index 0000000000000..eebeb1dc14f48 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRowAsLocalRelation.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules.logical; + +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.Row; +import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; +import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; + +import java.util.ArrayList; +import java.util.List; + +public final class ReplaceRowAsLocalRelation extends OptimizerRules.OptimizerRule { + + @Override + protected LogicalPlan rule(Row row) { + var fields = row.fields(); + List values = new ArrayList<>(fields.size()); + fields.forEach(f -> values.add(f.child().fold())); + var blocks = BlockUtils.fromListRow(PlannerUtils.NON_BREAKING_BLOCK_FACTORY, values); + return new LocalRelation(row.source(), row.output(), LocalSupplier.of(blocks)); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java index 40b91beaee3eb..b3c273cbfa1bb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java @@ -45,7 +45,6 @@ import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; import org.elasticsearch.xpack.esql.plan.physical.OrderExec; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; -import org.elasticsearch.xpack.esql.plan.physical.RowExec; import org.elasticsearch.xpack.esql.plan.physical.ShowExec; import org.elasticsearch.xpack.esql.plan.physical.SubqueryExec; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; @@ -106,7 +105,6 @@ public static List phsyical() { MvExpandExec.ENTRY, OrderExec.ENTRY, ProjectExec.ENTRY, - RowExec.ENTRY, ShowExec.ENTRY, SubqueryExec.ENTRY, TopNExec.ENTRY diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/RowExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/RowExec.java deleted file mode 100644 index 3a104d4bc292b..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/RowExec.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.plan.physical; - -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.expression.Alias; -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.Expressions; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class RowExec extends LeafExec { - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(PhysicalPlan.class, "RowExec", RowExec::new); - - private final List fields; - - public RowExec(Source source, List fields) { - super(source); - this.fields = fields; - } - - private RowExec(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), in.readCollectionAsList(Alias::new)); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - Source.EMPTY.writeTo(out); - out.writeCollection(fields()); - } - - @Override - public String getWriteableName() { - return ENTRY.name; - } - - public List fields() { - return fields; - } - - @Override - public List output() { - return Expressions.asAttributes(fields); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, RowExec::new, fields); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - RowExec constant = (RowExec) o; - return Objects.equals(fields, constant.fields); - } - - @Override - public int hashCode() { - return Objects.hash(fields); - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 1096c917fed4f..1ffc652e54337 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -31,7 +31,6 @@ import org.elasticsearch.compute.operator.Operator.OperatorFactory; import org.elasticsearch.compute.operator.OutputOperator.OutputOperatorFactory; import org.elasticsearch.compute.operator.RowInTableLookupOperator; -import org.elasticsearch.compute.operator.RowOperator.RowOperatorFactory; import org.elasticsearch.compute.operator.ShowOperator; import org.elasticsearch.compute.operator.SinkOperator; import org.elasticsearch.compute.operator.SinkOperator.SinkOperatorFactory; @@ -89,7 +88,6 @@ import org.elasticsearch.xpack.esql.plan.physical.OutputExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; -import org.elasticsearch.xpack.esql.plan.physical.RowExec; import org.elasticsearch.xpack.esql.plan.physical.ShowExec; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; @@ -220,8 +218,6 @@ else if (node instanceof EsQueryExec esQuery) { return planEsQueryNode(esQuery, context); } else if (node instanceof EsStatsQueryExec statsQuery) { return planEsStats(statsQuery, context); - } else if (node instanceof RowExec row) { - return planRow(row, context); } else if (node instanceof LocalSourceExec localSource) { return planLocal(localSource, context); } else if (node instanceof ShowExec show) { @@ -620,13 +616,6 @@ private ExpressionEvaluator.Factory toEvaluator(Expression exp, Layout layout) { return EvalMapper.toEvaluator(exp, layout); } - private PhysicalOperation planRow(RowExec row, LocalExecutionPlannerContext context) { - List obj = row.fields().stream().map(f -> f.child().fold()).toList(); - Layout.Builder layout = new Layout.Builder(); - layout.append(row.output()); - return PhysicalOperation.fromSource(new RowOperatorFactory(obj), layout.build()); - } - private PhysicalOperation planLocal(LocalSourceExec localSourceExec, LocalExecutionPlannerContext context) { Layout.Builder layout = new Layout.Builder(); layout.append(localSourceExec.output()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java index ea21943aced9b..e881eabb38c43 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java @@ -9,10 +9,7 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.compute.aggregation.AggregatorMode; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -27,10 +24,8 @@ import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Project; -import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; -import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.DissectExec; @@ -45,9 +40,7 @@ import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; import org.elasticsearch.xpack.esql.plan.physical.ShowExec; import org.elasticsearch.xpack.esql.planner.AbstractPhysicalOperationProviders; -import org.elasticsearch.xpack.esql.planner.PlannerUtils; -import java.util.ArrayList; import java.util.List; /** @@ -57,18 +50,6 @@ class MapperUtils { private MapperUtils() {} static PhysicalPlan mapLeaf(LeafPlan p) { - if (p instanceof Row row) { - // return new RowExec(row.source(), row.fields()); - // convert row into local relation - List fields = row.fields(); - List values = new ArrayList<>(fields.size()); - for (Alias field : fields) { - values.add(field.child().fold()); - } - Block[] blocks = BlockUtils.fromListRow(PlannerUtils.NON_BREAKING_BLOCK_FACTORY, values); - p = new LocalRelation(row.source(), row.output(), LocalSupplier.of(blocks)); - } - if (p instanceof LocalRelation local) { return new LocalSourceExec(local.source(), local.output(), local.supplier()); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 96951ee15d48b..a11a9cef82989 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -2141,7 +2141,7 @@ public void testLimitThenSortBeforeMvExpand() { mvExpand = as(topN.child(), MvExpand.class); var limit = as(mvExpand.child(), Limit.class); assertThat(limit.limit().fold(), equalTo(7300)); - as(limit.child(), Row.class); + as(limit.child(), LocalRelation.class); } /** @@ -2286,7 +2286,7 @@ public void testSortMvExpand() { var expand = as(plan, MvExpand.class); assertThat(expand.limit(), equalTo(1000)); var topN = as(expand.child(), TopN.class); - var row = as(topN.child(), Row.class); + var row = as(topN.child(), LocalRelation.class); } /** @@ -2327,7 +2327,7 @@ public void testWhereMvExpand() { assertThat(expand.limit(), equalTo(1000)); var limit2 = as(expand.child(), Limit.class); assertThat(limit2.limit().fold(), is(1000)); - var row = as(limit2.child(), Row.class); + var row = as(limit2.child(), LocalRelation.class); } private static List orderNames(TopN topN) { @@ -3545,7 +3545,7 @@ public void testMvExpandFoldable() { var filterProp = ((GreaterThan) filter.condition()).left(); assertTrue(expand.expanded().semanticEquals(filterProp)); assertFalse(expand.target().semanticEquals(filterProp)); - var row = as(expand.child(), Row.class); + var row = as(expand.child(), LocalRelation.class); } /** @@ -3564,7 +3564,7 @@ public void testRenameStatsDropGroup() { var limit = as(plan, Limit.class); var agg = as(limit.child(), Aggregate.class); assertThat(Expressions.names(agg.groupings()), contains("a")); - var row = as(agg.child(), Row.class); + var row = as(agg.child(), LocalRelation.class); } /** @@ -3583,7 +3583,7 @@ public void testMultipleRenameStatsDropGroup() { var limit = as(plan, Limit.class); var agg = as(limit.child(), Aggregate.class); assertThat(Expressions.names(agg.groupings()), contains("a", "b")); - var row = as(agg.child(), Row.class); + var row = as(agg.child(), LocalRelation.class); } /** diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/RowExecSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/RowExecSerializationTests.java deleted file mode 100644 index 3dd44cd20e369..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/RowExecSerializationTests.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.plan.physical; - -import org.elasticsearch.xpack.esql.core.expression.Alias; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.LiteralTests; -import org.elasticsearch.xpack.esql.core.expression.NameId; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.tree.SourceTests; - -import java.io.IOException; -import java.util.List; - -public class RowExecSerializationTests extends AbstractPhysicalPlanSerializationTests { - public static RowExec randomRowExec() { - Source source = randomSource(); - List fields = randomList(1, 10, RowExecSerializationTests::randomAlias); - return new RowExec(source, fields); - } - - private static Alias randomAlias() { - Source source = SourceTests.randomSource(); - String name = randomAlphaOfLength(5); - Expression child = LiteralTests.randomLiteral(); - boolean synthetic = randomBoolean(); - return new Alias(source, name, child, new NameId(), synthetic); - } - - @Override - protected RowExec createTestInstance() { - return randomRowExec(); - } - - @Override - protected RowExec mutateInstance(RowExec instance) throws IOException { - List fields = instance.fields(); - fields = randomValueOtherThan(fields, () -> randomList(1, 10, RowExecSerializationTests::randomAlias)); - return new RowExec(instance.source(), fields); - } - - @Override - protected boolean alwaysEmptySource() { - return true; - } -} From bd18787af5a21d3bc95b356e713f2014498d842f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mariusz=20J=C3=B3zala?= <377355+jozala@users.noreply.github.com> Date: Fri, 22 Nov 2024 14:55:25 +0100 Subject: [PATCH 172/386] Change default container image to be based on UBI minimal instead of Ubuntu (#116739) Previously default Docker image was based on Ubuntu. This changes the base image for default to be UBI minimal. --- .../pull-request/packaging-tests-unix.yml | 67 ++--------------- .../gradle/internal/DockerBase.java | 4 +- distribution/docker/README.md | 12 +-- distribution/docker/build.gradle | 12 ++- distribution/docker/src/docker/Dockerfile | 73 ++----------------- .../ubi-docker-aarch64-export/build.gradle | 2 - .../docker/ubi-docker-export/build.gradle | 2 - .../packaging/test/DockerTests.java | 26 +++---- .../test/KeystoreManagementTests.java | 2 +- .../packaging/test/PackagingTestCase.java | 6 +- .../packaging/util/Distribution.java | 5 +- .../packaging/util/docker/DockerRun.java | 1 - settings.gradle | 2 - 13 files changed, 37 insertions(+), 177 deletions(-) delete mode 100644 distribution/docker/ubi-docker-aarch64-export/build.gradle delete mode 100644 distribution/docker/ubi-docker-export/build.gradle diff --git a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml index 8bec706bb758d..ffc1350aceab3 100644 --- a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml +++ b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml @@ -3,65 +3,9 @@ config: steps: - group: packaging-tests-unix steps: - - label: "{{matrix.image}} / docker / packaging-tests-unix" - key: "packaging-tests-unix-docker" - command: ./.ci/scripts/packaging-test.sh destructiveDistroTest.docker-cloud-ess - timeout_in_minutes: 300 - matrix: - setup: - image: - - debian-11 - - debian-12 - - opensuse-leap-15 - - oraclelinux-7 - - oraclelinux-8 - - sles-12 - - sles-15 - - ubuntu-1804 - - ubuntu-2004 - - ubuntu-2204 - - rocky-8 - - rocky-9 - - rhel-7 - - rhel-8 - - rhel-9 - - almalinux-8 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - diskSizeGb: 350 - machineType: custom-16-32768 - - label: "{{matrix.image}} / packages / packaging-tests-unix" - key: "packaging-tests-unix-packages" - command: ./.ci/scripts/packaging-test.sh destructiveDistroTest.packages - timeout_in_minutes: 300 - matrix: - setup: - image: - - debian-11 - - debian-12 - - opensuse-leap-15 - - oraclelinux-7 - - oraclelinux-8 - - sles-12 - - sles-15 - - ubuntu-1804 - - ubuntu-2004 - - ubuntu-2204 - - rocky-8 - - rocky-9 - - rhel-7 - - rhel-8 - - rhel-9 - - almalinux-8 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - diskSizeGb: 350 - machineType: custom-16-32768 - - label: "{{matrix.image}} / archives / packaging-tests-unix" - key: "packaging-tests-unix-archives" - command: ./.ci/scripts/packaging-test.sh destructiveDistroTest.archives + - label: "{{matrix.image}} / {{matrix.PACKAGING_TASK}} / packaging-tests-unix" + key: "packaging-tests-unix" + command: ./.ci/scripts/packaging-test.sh destructiveDistroTest.{{matrix.PACKAGING_TASK}} timeout_in_minutes: 300 matrix: setup: @@ -82,6 +26,11 @@ steps: - rhel-8 - rhel-9 - almalinux-8 + PACKAGING_TASK: + - docker + - docker-cloud-ess + - packages + - archives agents: provider: gcp image: family/elasticsearch-{{matrix.image}} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index 3e0a47a8f453c..bf901fef90450 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -13,10 +13,8 @@ * This class models the different Docker base images that are used to build Docker distributions of Elasticsearch. */ public enum DockerBase { - DEFAULT("ubuntu:20.04", "", "apt-get"), - // "latest" here is intentional, since the image name specifies "8" - UBI("docker.elastic.co/ubi8/ubi-minimal:latest", "-ubi", "microdnf"), + DEFAULT("docker.elastic.co/ubi8/ubi-minimal:latest", "", "microdnf"), // The Iron Bank base image is UBI (albeit hardened), but we are required to parameterize the Docker build IRON_BANK("${BASE_REGISTRY}/${BASE_IMAGE}:${BASE_TAG}", "-ironbank", "yum"), diff --git a/distribution/docker/README.md b/distribution/docker/README.md index 49facab461edc..9438b4f1e82c3 100644 --- a/distribution/docker/README.md +++ b/distribution/docker/README.md @@ -3,8 +3,7 @@ The ES build can generate several types of Docker image. These are enumerated in the [DockerBase] enum. - * Default - this is what most people use, and is based on Ubuntu - * UBI - the same as the default image, but based upon [RedHat's UBI + * Default - this is what most people use, and is based on [RedHat's UBI images][ubi], specifically their minimal flavour. * Wolfi - the same as the default image, but based upon [Wolfi](https://github.com/wolfi-dev) * Cloud ESS - this directly extends the Wolfi image, and adds all ES plugins @@ -23,14 +22,7 @@ the [DockerBase] enum. software (FOSS) and Commercial off-the-shelf (COTS). In practice, this is another UBI build, this time on the regular UBI image, with extra hardening. See below for more details. - * Cloud - this is mostly the same as the default image, with some notable differences: - * `filebeat` and `metricbeat` are included - * `wget` is included - * The `ENTRYPOINT` is just `/bin/tini`, and the `CMD` is - `/app/elasticsearch.sh`. In normal use this file would be bind-mounted - in, but the image ships a stub version of this file so that the image - can still be tested. -The long-term goal is for both Cloud images to be retired in favour of the +The long-term goal is for Cloud ESS image to be retired in favour of the default image. diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index d73f9c395f15c..f5b94fb9dfd94 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -527,9 +527,7 @@ subprojects { Project subProject -> final Architecture architecture = subProject.name.contains('aarch64-') ? Architecture.AARCH64 : Architecture.X64 DockerBase base = DockerBase.DEFAULT - if (subProject.name.contains('ubi-')) { - base = DockerBase.UBI - } else if (subProject.name.contains('ironbank-')) { + if (subProject.name.contains('ironbank-')) { base = DockerBase.IRON_BANK } else if (subProject.name.contains('cloud-ess-')) { base = DockerBase.CLOUD_ESS @@ -538,11 +536,11 @@ subprojects { Project subProject -> } final String arch = architecture == Architecture.AARCH64 ? '-aarch64' : '' - final String extension = base == DockerBase.UBI ? 'ubi.tar' : + final String extension = (base == DockerBase.IRON_BANK ? 'ironbank.tar' : - (base == DockerBase.CLOUD_ESS ? 'cloud-ess.tar' : - (base == DockerBase.WOLFI ? 'wolfi.tar' : - 'docker.tar'))) + (base == DockerBase.CLOUD_ESS ? 'cloud-ess.tar' : + (base == DockerBase.WOLFI ? 'wolfi.tar' : + 'docker.tar'))) final String artifactName = "elasticsearch${arch}${base.suffix}_test" final String exportTaskName = taskName("export", architecture, base, 'DockerImage') diff --git a/distribution/docker/src/docker/Dockerfile b/distribution/docker/src/docker/Dockerfile index fd2516f2fdc9a..6cb030565d9d2 100644 --- a/distribution/docker/src/docker/Dockerfile +++ b/distribution/docker/src/docker/Dockerfile @@ -41,9 +41,7 @@ RUN chmod 0555 /bin/tini <% } else { %> # Install required packages to extract the Elasticsearch distribution -<% if (docker_base == 'default' || docker_base == 'cloud') { %> -RUN <%= retry.loop(package_manager, "${package_manager} update && DEBIAN_FRONTEND=noninteractive ${package_manager} install -y curl ") %> -<% } else if (docker_base == "wolfi") { %> +<% if (docker_base == "wolfi") { %> RUN <%= retry.loop(package_manager, "export DEBIAN_FRONTEND=noninteractive && ${package_manager} update && ${package_manager} update && ${package_manager} add --no-cache curl") %> <% } else { %> RUN <%= retry.loop(package_manager, "${package_manager} install -y findutils tar gzip") %> @@ -117,27 +115,6 @@ RUN sed -i -e 's/ES_DISTRIBUTION_TYPE=tar/ES_DISTRIBUTION_TYPE=docker/' bin/elas chmod 0775 bin config config/jvm.options.d data logs plugins && \\ find config -type f -exec chmod 0664 {} + -<% if (docker_base == "cloud") { %> -COPY filebeat-${version}.tar.gz metricbeat-${version}.tar.gz /tmp/ -RUN set -eux ; \\ - for beat in filebeat metricbeat ; do \\ - if [ ! -s /tmp/\$beat-${version}.tar.gz ]; then \\ - echo "/tmp/\$beat-${version}.tar.gz is empty - cannot uncompress" 2>&1 ; \\ - exit 1 ; \\ - fi ; \\ - if ! tar tf /tmp/\$beat-${version}.tar.gz >/dev/null; then \\ - echo "/tmp/\$beat-${version}.tar.gz is corrupt - cannot uncompress" 2>&1 ; \\ - exit 1 ; \\ - fi ; \\ - mkdir -p /opt/\$beat ; \\ - tar xf /tmp/\$beat-${version}.tar.gz -C /opt/\$beat --strip-components=1 ; \\ - done - -# Add plugins infrastructure -RUN mkdir -p /opt/plugins/archive -RUN chmod -R 0555 /opt/plugins -<% } %> - ################################################################################ # Build stage 2 (the actual Elasticsearch image): # @@ -173,21 +150,6 @@ SHELL ["/bin/bash", "-c"] # Optionally set Bash as the default shell in the container at runtime CMD ["/bin/bash"] -<% } else if (docker_base == "default" || docker_base == "cloud") { %> - -# Change default shell to bash, then install required packages with retries. -RUN yes no | dpkg-reconfigure dash && \\ - <%= retry.loop( - package_manager, - "export DEBIAN_FRONTEND=noninteractive && \n" + - " ${package_manager} update && \n" + - " ${package_manager} upgrade -y && \n" + - " ${package_manager} install -y --no-install-recommends \n" + - " ca-certificates curl netcat p11-kit unzip zip ${docker_base == 'cloud' ? 'wget' : '' } && \n" + - " ${package_manager} clean && \n" + - " rm -rf /var/lib/apt/lists/*" - ) %> - <% } else { %> RUN <%= retry.loop( @@ -201,12 +163,7 @@ RUN <%= retry.loop( <% } %> -<% if (docker_base == "default" || docker_base == "cloud") { %> -RUN groupadd -g 1000 elasticsearch && \\ - adduser --uid 1000 --gid 1000 --home /usr/share/elasticsearch elasticsearch && \\ - adduser elasticsearch root && \\ - chown -R 0:0 /usr/share/elasticsearch -<% } else if (docker_base == "wolfi") { %> +<% if (docker_base == "wolfi") { %> RUN groupadd -g 1000 elasticsearch && \ adduser -G elasticsearch -u 1000 elasticsearch -D --home /usr/share/elasticsearch elasticsearch && \ adduser elasticsearch root && \ @@ -226,10 +183,6 @@ COPY --from=builder --chown=0:0 /usr/share/elasticsearch /usr/share/elasticsearc COPY --from=builder --chown=0:0 /bin/tini /bin/tini <% } %> -<% if (docker_base == 'cloud') { %> -COPY --from=builder --chown=0:0 /opt /opt -<% } %> - ENV PATH /usr/share/elasticsearch/bin:\$PATH ENV SHELL /bin/bash COPY ${bin_dir}/docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh @@ -251,12 +204,7 @@ RUN chmod g=u /etc/passwd && \\ chmod 0775 /usr/share/elasticsearch && \\ chown elasticsearch bin config config/jvm.options.d data logs plugins -<% if (docker_base == 'default' || docker_base == 'cloud') { %> -# Update "cacerts" bundle to use Ubuntu's CA certificates (and make sure it -# stays up-to-date with changes to Ubuntu's store) -COPY bin/docker-openjdk /etc/ca-certificates/update.d/docker-openjdk -RUN /etc/ca-certificates/update.d/docker-openjdk -<% } else if (docker_base == 'wolfi') { %> +<% if (docker_base == 'wolfi') { %> RUN ln -sf /etc/ssl/certs/java/cacerts /usr/share/elasticsearch/jdk/lib/security/cacerts <% } else { %> RUN ln -sf /etc/pki/ca-trust/extracted/java/cacerts /usr/share/elasticsearch/jdk/lib/security/cacerts @@ -284,9 +232,7 @@ LABEL org.label-schema.build-date="${build_date}" \\ org.opencontainers.image.url="https://www.elastic.co/products/elasticsearch" \\ org.opencontainers.image.vendor="Elastic" \\ org.opencontainers.image.version="${version}" -<% } %> -<% if (docker_base == 'ubi') { %> LABEL name="Elasticsearch" \\ maintainer="infra@elastic.co" \\ vendor="Elastic" \\ @@ -296,21 +242,12 @@ LABEL name="Elasticsearch" \\ description="You know, for search." <% } %> -<% if (docker_base == 'ubi') { %> -RUN mkdir /licenses && cp LICENSE.txt /licenses/LICENSE -<% } else if (docker_base == 'iron_bank') { %> RUN mkdir /licenses && cp LICENSE.txt /licenses/LICENSE +<% if (docker_base == 'iron_bank') { %> COPY LICENSE /licenses/LICENSE.addendum <% } %> -<% if (docker_base == "cloud") { %> -ENTRYPOINT ["/bin/tini", "--"] -CMD ["/app/elasticsearch.sh"] -# Generate a stub command that will be overwritten at runtime -RUN mkdir /app && \\ - echo -e '#!/bin/bash\\nexec /usr/local/bin/docker-entrypoint.sh eswrapper' > /app/elasticsearch.sh && \\ - chmod 0555 /app/elasticsearch.sh -<% } else if (docker_base == "wolfi") { %> +<% if (docker_base == "wolfi") { %> # Our actual entrypoint is `tini`, a minimal but functional init program. It # calls the entrypoint we provide, while correctly forwarding signals. ENTRYPOINT ["/sbin/tini", "--", "/usr/local/bin/docker-entrypoint.sh"] diff --git a/distribution/docker/ubi-docker-aarch64-export/build.gradle b/distribution/docker/ubi-docker-aarch64-export/build.gradle deleted file mode 100644 index 537b5a093683e..0000000000000 --- a/distribution/docker/ubi-docker-aarch64-export/build.gradle +++ /dev/null @@ -1,2 +0,0 @@ -// This file is intentionally blank. All configuration of the -// export is done in the parent project. diff --git a/distribution/docker/ubi-docker-export/build.gradle b/distribution/docker/ubi-docker-export/build.gradle deleted file mode 100644 index 537b5a093683e..0000000000000 --- a/distribution/docker/ubi-docker-export/build.gradle +++ /dev/null @@ -1,2 +0,0 @@ -// This file is intentionally blank. All configuration of the -// export is done in the parent project. diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java index 8cb8354eb5d71..3ad4c247a8b9b 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java @@ -96,11 +96,10 @@ /** * This class tests the Elasticsearch Docker images. We have several: *
      - *
    • The default image with a custom, small base image
    • - *
    • A UBI-based image
    • + *
    • The default image UBI-based image
    • *
    • Another UBI image for Iron Bank
    • *
    • A WOLFI-based image
    • - *
    • Images for Cloud
    • + *
    • Image for Cloud
    • *
    */ @ThreadLeakFilters(defaultFilters = true, filters = { HttpClientThreadsFilter.class }) @@ -383,15 +382,14 @@ public void test026InstallBundledRepositoryPluginsViaConfigFile() { public void test040JavaUsesTheOsProvidedKeystore() { final String path = sh.run("realpath jdk/lib/security/cacerts").stdout(); - if (distribution.packaging == Packaging.DOCKER_UBI || distribution.packaging == Packaging.DOCKER_IRON_BANK) { + if (distribution.packaging == Packaging.DOCKER || distribution.packaging == Packaging.DOCKER_IRON_BANK) { // In these images, the `cacerts` file ought to be a symlink here assertThat(path, equalTo("/etc/pki/ca-trust/extracted/java/cacerts")); } else if (distribution.packaging == Packaging.DOCKER_WOLFI || distribution.packaging == Packaging.DOCKER_CLOUD_ESS) { // In these images, the `cacerts` file ought to be a symlink here assertThat(path, equalTo("/etc/ssl/certs/java/cacerts")); } else { - // Whereas on other images, it's a real file so the real path is the same - assertThat(path, equalTo("/usr/share/elasticsearch/jdk/lib/security/cacerts")); + fail("Unknown distribution: " + distribution.packaging); } } @@ -1126,25 +1124,25 @@ public void test171AdditionalCliOptionsAreForwarded() throws Exception { } /** - * Check that the UBI images has the correct license information in the correct place. + * Check that the Docker images have the correct license information in the correct place. */ - public void test200UbiImagesHaveLicenseDirectory() { - assumeTrue(distribution.packaging == Packaging.DOCKER_UBI); + public void test200ImagesHaveLicenseDirectory() { + assumeTrue(distribution.packaging != Packaging.DOCKER_IRON_BANK); final String[] files = sh.run("find /licenses -type f").stdout().split("\n"); assertThat(files, arrayContaining("/licenses/LICENSE")); // UBI image doesn't contain `diff` - final String ubiLicense = sh.run("cat /licenses/LICENSE").stdout(); + final String imageLicense = sh.run("cat /licenses/LICENSE").stdout(); final String distroLicense = sh.run("cat /usr/share/elasticsearch/LICENSE.txt").stdout(); - assertThat(ubiLicense, equalTo(distroLicense)); + assertThat(imageLicense, equalTo(distroLicense)); } /** - * Check that the UBI image has the expected labels + * Check that the images has the expected labels */ - public void test210UbiLabels() throws Exception { - assumeTrue(distribution.packaging == Packaging.DOCKER_UBI); + public void test210Labels() throws Exception { + assumeTrue(distribution.packaging != Packaging.DOCKER_IRON_BANK); final Map labels = getImageLabels(distribution); diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java index 02e1ce35764cf..a47dd0e57642e 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java @@ -436,7 +436,7 @@ private void verifyKeystorePermissions() { switch (distribution.packaging) { case TAR, ZIP -> assertThat(keystore, file(File, ARCHIVE_OWNER, ARCHIVE_OWNER, p660)); case DEB, RPM -> assertThat(keystore, file(File, "root", "elasticsearch", p660)); - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> assertThat(keystore, DockerFileMatcher.file(p660)); + case DOCKER, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> assertThat(keystore, DockerFileMatcher.file(p660)); default -> throw new IllegalStateException("Unknown Elasticsearch packaging type."); } } diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java index b4a00ca56924a..a157cc84e624e 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java @@ -245,7 +245,7 @@ protected static void install() throws Exception { installation = Packages.installPackage(sh, distribution); Packages.verifyPackageInstallation(installation, distribution, sh); } - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> { + case DOCKER, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> { installation = Docker.runContainer(distribution); Docker.verifyContainerInstallation(installation); } @@ -333,7 +333,6 @@ public Shell.Result runElasticsearchStartCommand(String password, boolean daemon case RPM: return Packages.runElasticsearchStartCommand(sh); case DOCKER: - case DOCKER_UBI: case DOCKER_IRON_BANK: case DOCKER_CLOUD_ESS: case DOCKER_WOLFI: @@ -355,7 +354,6 @@ public void stopElasticsearch() throws Exception { Packages.stopElasticsearch(sh); break; case DOCKER: - case DOCKER_UBI: case DOCKER_IRON_BANK: case DOCKER_CLOUD_ESS: case DOCKER_WOLFI: @@ -371,7 +369,7 @@ public void awaitElasticsearchStartup(Shell.Result result) throws Exception { switch (distribution.packaging) { case TAR, ZIP -> Archives.assertElasticsearchStarted(installation); case DEB, RPM -> Packages.assertElasticsearchStarted(sh, installation); - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> Docker.waitForElasticsearchToStart(); + case DOCKER, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> Docker.waitForElasticsearchToStart(); default -> throw new IllegalStateException("Unknown Elasticsearch packaging type."); } } diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java index 11b8324384631..55c59db6219d3 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java @@ -29,8 +29,6 @@ public Distribution(Path path) { this.packaging = Packaging.TAR; } else if (filename.endsWith(".docker.tar")) { this.packaging = Packaging.DOCKER; - } else if (filename.endsWith(".ubi.tar")) { - this.packaging = Packaging.DOCKER_UBI; } else if (filename.endsWith(".ironbank.tar")) { this.packaging = Packaging.DOCKER_IRON_BANK; } else if (filename.endsWith(".cloud-ess.tar")) { @@ -61,7 +59,7 @@ public boolean isPackage() { */ public boolean isDocker() { return switch (packaging) { - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> true; + case DOCKER, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> true; default -> false; }; } @@ -73,7 +71,6 @@ public enum Packaging { DEB(".deb", Platforms.isDPKG()), RPM(".rpm", Platforms.isRPM()), DOCKER(".docker.tar", Platforms.isDocker()), - DOCKER_UBI(".ubi.tar", Platforms.isDocker()), DOCKER_IRON_BANK(".ironbank.tar", Platforms.isDocker()), DOCKER_CLOUD_ESS(".cloud-ess.tar", Platforms.isDocker()), DOCKER_WOLFI(".wolfi.tar", Platforms.isDocker()); diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java index e3eac23d3ecce..5dc47993072a8 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java @@ -163,7 +163,6 @@ String build() { public static String getImageName(Distribution distribution) { String suffix = switch (distribution.packaging) { case DOCKER -> ""; - case DOCKER_UBI -> "-ubi"; case DOCKER_IRON_BANK -> "-ironbank"; case DOCKER_CLOUD_ESS -> "-cloud-ess"; case DOCKER_WOLFI -> "-wolfi"; diff --git a/settings.gradle b/settings.gradle index d04d45bffc3ad..333f8272447c2 100644 --- a/settings.gradle +++ b/settings.gradle @@ -66,8 +66,6 @@ List projects = [ 'distribution:docker:docker-export', 'distribution:docker:ironbank-docker-aarch64-export', 'distribution:docker:ironbank-docker-export', - 'distribution:docker:ubi-docker-aarch64-export', - 'distribution:docker:ubi-docker-export', 'distribution:docker:wolfi-docker-aarch64-export', 'distribution:docker:wolfi-docker-export', 'distribution:packages:aarch64-deb', From 830c5048bae2a39242391c930a031ff00d0cce5d Mon Sep 17 00:00:00 2001 From: Mike Pellegrini Date: Fri, 22 Nov 2024 08:57:00 -0500 Subject: [PATCH 173/386] Always Emit Inference ID in Semantic Text Mapping (#117294) --- docs/changelog/117294.yaml | 5 ++++ .../xpack/inference/InferenceFeatures.java | 3 ++- .../mapper/SemanticTextFieldMapper.java | 5 +++- .../mapper/SemanticTextFieldMapperTests.java | 16 ++++++++++-- .../10_semantic_text_field_mapping.yml | 26 +++++++++++++++++++ 5 files changed, 51 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/117294.yaml diff --git a/docs/changelog/117294.yaml b/docs/changelog/117294.yaml new file mode 100644 index 0000000000000..f6e80690de7ff --- /dev/null +++ b/docs/changelog/117294.yaml @@ -0,0 +1,5 @@ +pr: 117294 +summary: Always Emit Inference ID in Semantic Text Mapping +area: Mapping +type: bug +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index f70e7f367127d..c82f287792a7c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -39,7 +39,8 @@ public Set getTestFeatures() { SemanticTextFieldMapper.SEMANTIC_TEXT_IN_OBJECT_FIELD_FIX, SemanticTextFieldMapper.SEMANTIC_TEXT_SINGLE_FIELD_UPDATE_FIX, SemanticTextFieldMapper.SEMANTIC_TEXT_DELETE_FIX, - SemanticTextFieldMapper.SEMANTIC_TEXT_ZERO_SIZE_FIX + SemanticTextFieldMapper.SEMANTIC_TEXT_ZERO_SIZE_FIX, + SemanticTextFieldMapper.SEMANTIC_TEXT_ALWAYS_EMIT_INFERENCE_ID_FIX ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index 89a54ffe29177..3744bf2a6dbed 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -93,6 +93,9 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie public static final NodeFeature SEMANTIC_TEXT_SINGLE_FIELD_UPDATE_FIX = new NodeFeature("semantic_text.single_field_update_fix"); public static final NodeFeature SEMANTIC_TEXT_DELETE_FIX = new NodeFeature("semantic_text.delete_fix"); public static final NodeFeature SEMANTIC_TEXT_ZERO_SIZE_FIX = new NodeFeature("semantic_text.zero_size_fix"); + public static final NodeFeature SEMANTIC_TEXT_ALWAYS_EMIT_INFERENCE_ID_FIX = new NodeFeature( + "semantic_text.always_emit_inference_id_fix" + ); public static final String CONTENT_TYPE = "semantic_text"; public static final String DEFAULT_ELSER_2_INFERENCE_ID = DEFAULT_ELSER_ID; @@ -119,7 +122,7 @@ public static class Builder extends FieldMapper.Builder { "[" + INFERENCE_ID_FIELD + "] on mapper [" + leafName() + "] of type [" + CONTENT_TYPE + "] must not be empty" ); } - }); + }).alwaysSerialize(); private final Parameter searchInferenceId = Parameter.stringParam( SEARCH_INFERENCE_ID_FIELD, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index 6e58226f85f28..71ff9fc7d84cf 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -109,6 +109,12 @@ protected String minimalIsInvalidRoutingPathErrorMessage(Mapper mapper) { return "cannot have nested fields when index is in [index.mode=time_series]"; } + @Override + protected void metaMapping(XContentBuilder b) throws IOException { + super.metaMapping(b); + b.field(INFERENCE_ID_FIELD, DEFAULT_ELSER_2_INFERENCE_ID); + } + @Override protected Object getSampleValueForDocument() { return null; @@ -166,10 +172,11 @@ protected void assertSearchable(MappedFieldType fieldType) { public void testDefaults() throws Exception { final String fieldName = "field"; final XContentBuilder fieldMapping = fieldMapping(this::minimalMapping); + final XContentBuilder expectedMapping = fieldMapping(this::metaMapping); MapperService mapperService = createMapperService(fieldMapping); DocumentMapper mapper = mapperService.documentMapper(); - assertEquals(Strings.toString(fieldMapping), mapper.mappingSource().toString()); + assertEquals(Strings.toString(expectedMapping), mapper.mappingSource().toString()); assertSemanticTextField(mapperService, fieldName, false); assertInferenceEndpoints(mapperService, fieldName, DEFAULT_ELSER_2_INFERENCE_ID, DEFAULT_ELSER_2_INFERENCE_ID); @@ -208,10 +215,15 @@ public void testSetInferenceEndpoints() throws IOException { final XContentBuilder fieldMapping = fieldMapping( b -> b.field("type", "semantic_text").field(SEARCH_INFERENCE_ID_FIELD, searchInferenceId) ); + final XContentBuilder expectedMapping = fieldMapping( + b -> b.field("type", "semantic_text") + .field(INFERENCE_ID_FIELD, DEFAULT_ELSER_2_INFERENCE_ID) + .field(SEARCH_INFERENCE_ID_FIELD, searchInferenceId) + ); final MapperService mapperService = createMapperService(fieldMapping); assertSemanticTextField(mapperService, fieldName, false); assertInferenceEndpoints(mapperService, fieldName, DEFAULT_ELSER_2_INFERENCE_ID, searchInferenceId); - assertSerialization.accept(fieldMapping, mapperService); + assertSerialization.accept(expectedMapping, mapperService); } { final XContentBuilder fieldMapping = fieldMapping( diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml index 71fb1fd95989f..882f1df03e926 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml @@ -371,3 +371,29 @@ setup: - match: { error.type: illegal_argument_exception } - match: { error.reason: "semantic_text field [level_1.level_2.sparse_field] cannot be in an object field with subobjects disabled" } + +--- +"Mapping always includes inference ID": + - requires: + cluster_features: "semantic_text.always_emit_inference_id_fix" + reason: always emit inference ID fix added in 8.17.0 + test_runner_features: [capabilities] + capabilities: + - method: GET + path: /_inference + capabilities: [default_elser_2] + + - do: + indices.create: + index: test-always-include-inference-id-index + body: + mappings: + properties: + semantic_field: + type: semantic_text + + - do: + indices.get_mapping: + index: test-always-include-inference-id-index + + - exists: test-always-include-inference-id-index.mappings.properties.semantic_field.inference_id From 13a51f2d425347abbf338c0abf776ff95f2db91b Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Fri, 22 Nov 2024 15:19:53 +0100 Subject: [PATCH 174/386] Distinguish `LicensedFeature` by family field (#116809) This PR fixes unintentional licensed feature overlaps for features with the same name but different family fields. --- docs/changelog/116809.yaml | 5 +++ .../license/LicensedFeature.java | 4 +- .../license/XPackLicenseStateTests.java | 45 +++++++++++++++++++ 3 files changed, 52 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/116809.yaml diff --git a/docs/changelog/116809.yaml b/docs/changelog/116809.yaml new file mode 100644 index 0000000000000..61dbeb233d576 --- /dev/null +++ b/docs/changelog/116809.yaml @@ -0,0 +1,5 @@ +pr: 116809 +summary: "Distinguish `LicensedFeature` by family field" +area: License +type: bug +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java index 56c8e87d1c502..d86c15aa14bc9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java @@ -136,11 +136,11 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; LicensedFeature that = (LicensedFeature) o; - return Objects.equals(name, that.name); + return Objects.equals(name, that.name) && Objects.equals(family, that.family); } @Override public int hashCode() { - return Objects.hash(name); + return Objects.hash(name, family); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java index 04fe20901749b..e889d25cd7a96 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java @@ -14,6 +14,7 @@ import java.util.Arrays; import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Predicate; import java.util.stream.Collectors; @@ -228,6 +229,50 @@ public void testLastUsedMomentaryFeature() { assertThat(lastUsed.get(usage), equalTo(200L)); } + public void testLastUsedMomentaryFeatureWithSameNameDifferentFamily() { + LicensedFeature.Momentary featureFamilyA = LicensedFeature.momentary("familyA", "goldFeature", GOLD); + LicensedFeature.Momentary featureFamilyB = LicensedFeature.momentary("familyB", "goldFeature", GOLD); + + AtomicInteger currentTime = new AtomicInteger(100); // non zero start time + XPackLicenseState licenseState = new XPackLicenseState(currentTime::get); + + featureFamilyA.check(licenseState); + featureFamilyB.check(licenseState); + + Map lastUsed = licenseState.getLastUsed(); + assertThat("feature.check tracks usage separately by family", lastUsed, aMapWithSize(2)); + Set actualFeatures = lastUsed.entrySet() + .stream() + .map(it -> new FeatureInfoWithTimestamp(it.getKey().feature().getFamily(), it.getKey().feature().getName(), it.getValue())) + .collect(Collectors.toSet()); + assertThat( + actualFeatures, + containsInAnyOrder( + new FeatureInfoWithTimestamp("familyA", "goldFeature", 100L), + new FeatureInfoWithTimestamp("familyB", "goldFeature", 100L) + ) + ); + + currentTime.set(200); + featureFamilyB.check(licenseState); + + lastUsed = licenseState.getLastUsed(); + assertThat("feature.check tracks usage separately by family", lastUsed, aMapWithSize(2)); + actualFeatures = lastUsed.entrySet() + .stream() + .map(it -> new FeatureInfoWithTimestamp(it.getKey().feature().getFamily(), it.getKey().feature().getName(), it.getValue())) + .collect(Collectors.toSet()); + assertThat( + actualFeatures, + containsInAnyOrder( + new FeatureInfoWithTimestamp("familyA", "goldFeature", 100L), + new FeatureInfoWithTimestamp("familyB", "goldFeature", 200L) + ) + ); + } + + private record FeatureInfoWithTimestamp(String family, String featureName, Long timestamp) {} + public void testLastUsedPersistentFeature() { LicensedFeature.Persistent goldFeature = LicensedFeature.persistent("family", "goldFeature", GOLD); AtomicInteger currentTime = new AtomicInteger(100); // non zero start time From 29be961fd14ce35ad678f206781f0b463f6d0c7d Mon Sep 17 00:00:00 2001 From: Dimitris Rempapis Date: Fri, 22 Nov 2024 16:20:32 +0200 Subject: [PATCH 175/386] Search Queries in parallel - part 1 (#116812) Runs some spots in parallel to increase test execution performance and get some coverage on parallel query execution --- .../template/SimpleIndexTemplateIT.java | 24 +-- .../aggregations/bucket/RandomSamplerIT.java | 46 ++-- .../search/sort/FieldSortIT.java | 197 +++++------------- .../search/sort/GeoDistanceSortBuilderIT.java | 46 +--- .../search/source/MetadataFetchingIT.java | 15 +- .../search/source/SourceFetchingIT.java | 40 ++-- 6 files changed, 118 insertions(+), 250 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index 0647a24aa39c8..de9e3f28a2109 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -50,6 +50,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertRequestBuilderThrows; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -843,24 +844,13 @@ public void testMultipleTemplate() throws IOException { ensureGreen(); - // ax -> matches template - assertResponse( + assertResponses(response -> { + assertHitCount(response, 1); + assertEquals("value1", response.getHits().getAt(0).field("field1").getValue().toString()); + assertNull(response.getHits().getAt(0).field("field2")); + }, prepareSearch("ax").setQuery(termQuery("field1", "value1")).addStoredField("field1").addStoredField("field2"), - response -> { - assertHitCount(response, 1); - assertEquals("value1", response.getHits().getAt(0).field("field1").getValue().toString()); - assertNull(response.getHits().getAt(0).field("field2")); - } - ); - - // bx -> matches template - assertResponse( - prepareSearch("bx").setQuery(termQuery("field1", "value1")).addStoredField("field1").addStoredField("field2"), - response -> { - assertHitCount(response, 1); - assertEquals("value1", response.getHits().getAt(0).field("field1").getValue().toString()); - assertNull(response.getHits().getAt(0).field("field2")); - } + prepareSearch("bx").setQuery(termQuery("field1", "value1")).addStoredField("field1").addStoredField("field2") ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java index 544f0a08eaa6c..0aa28b9f9dbe8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java @@ -10,6 +10,7 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.sampler.random.InternalRandomSampler; import org.elasticsearch.search.aggregations.bucket.sampler.random.RandomSamplerAggregationBuilder; @@ -20,11 +21,13 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.IntStream; import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.lessThan; @@ -112,27 +115,28 @@ public void testRandomSamplerConsistentSeed() { } ); - for (int i = 0; i < NUM_SAMPLE_RUNS; i++) { - assertResponse( - prepareSearch("idx").setPreference("shard:0") - .addAggregation( - new RandomSamplerAggregationBuilder("sampler").setProbability(PROBABILITY) - .setSeed(0) - .subAggregation(avg("mean_monotonic").field(MONOTONIC_VALUE)) - .subAggregation(avg("mean_numeric").field(NUMERIC_VALUE)) - .setShardSeed(42) - ), - response -> { - InternalRandomSampler sampler = response.getAggregations().get("sampler"); - double monotonicValue = ((Avg) sampler.getAggregations().get("mean_monotonic")).getValue(); - double numericValue = ((Avg) sampler.getAggregations().get("mean_numeric")).getValue(); - long docCount = sampler.getDocCount(); - assertEquals(monotonicValue, sampleMonotonicValue[0], tolerance); - assertEquals(numericValue, sampleNumericValue[0], tolerance); - assertEquals(docCount, sampledDocCount[0]); - } - ); - } + assertResponses(response -> { + InternalRandomSampler sampler = response.getAggregations().get("sampler"); + double monotonicValue = ((Avg) sampler.getAggregations().get("mean_monotonic")).getValue(); + double numericValue = ((Avg) sampler.getAggregations().get("mean_numeric")).getValue(); + long docCount = sampler.getDocCount(); + assertEquals(monotonicValue, sampleMonotonicValue[0], tolerance); + assertEquals(numericValue, sampleNumericValue[0], tolerance); + assertEquals(docCount, sampledDocCount[0]); + }, + IntStream.rangeClosed(0, NUM_SAMPLE_RUNS - 1) + .mapToObj( + num -> prepareSearch("idx").setPreference("shard:0") + .addAggregation( + new RandomSamplerAggregationBuilder("sampler").setProbability(PROBABILITY) + .setSeed(0) + .subAggregation(avg("mean_monotonic").field(MONOTONIC_VALUE)) + .subAggregation(avg("mean_numeric").field(NUMERIC_VALUE)) + .setShardSeed(42) + ) + ) + .toArray(SearchRequestBuilder[]::new) + ); } public void testRandomSampler() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java index d1841ebaf8071..87665c3d784f1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -54,6 +54,7 @@ import java.util.Set; import java.util.TreeMap; import java.util.concurrent.ExecutionException; +import java.util.function.Consumer; import java.util.function.Function; import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; @@ -66,6 +67,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -323,6 +325,12 @@ public void testRandomSorting() throws IOException, InterruptedException, Execut } public void test3078() { + Consumer assertConsumer = response -> { + assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); + assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); + assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); + }; + assertAcked(indicesAdmin().prepareCreate("test").setMapping("field", "type=keyword").get()); ensureGreen(); @@ -332,11 +340,7 @@ public void test3078() { refresh(); assertResponse( prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)), - response -> { - assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); - assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); - assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); - } + assertConsumer ); // reindex and refresh prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); @@ -344,22 +348,14 @@ public void test3078() { assertResponse( prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)), - response -> { - assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); - assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); - assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); - } + assertConsumer ); // reindex - no refresh prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); assertResponse( prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)), - response -> { - assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); - assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); - assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); - } + assertConsumer ); // force merge forceMerge(); @@ -368,20 +364,12 @@ public void test3078() { prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); assertResponse( prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)), - response -> { - assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); - assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); - assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); - } + assertConsumer ); refresh(); assertResponse( prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)), - response -> { - assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); - assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); - assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); - } + assertConsumer ); } @@ -395,39 +383,19 @@ public void testScoreSortDirection() throws Exception { refresh(); - assertResponse( + assertResponses(response -> { + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getScore(), Matchers.lessThan(response.getHits().getAt(0).getScore())); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + assertThat(response.getHits().getAt(2).getScore(), Matchers.lessThan(response.getHits().getAt(1).getScore())); + assertThat(response.getHits().getAt(2).getId(), equalTo("3")); + }, prepareSearch("test").setQuery( QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field")) ), - response -> { - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getScore(), Matchers.lessThan(response.getHits().getAt(0).getScore())); - assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - assertThat(response.getHits().getAt(2).getScore(), Matchers.lessThan(response.getHits().getAt(1).getScore())); - assertThat(response.getHits().getAt(2).getId(), equalTo("3")); - } - ); - assertResponse( - prepareSearch("test").setQuery( - QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field")) - ).addSort("_score", SortOrder.DESC), - response -> { - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getScore(), Matchers.lessThan(response.getHits().getAt(0).getScore())); - assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - assertThat(response.getHits().getAt(2).getScore(), Matchers.lessThan(response.getHits().getAt(1).getScore())); - assertThat(response.getHits().getAt(2).getId(), equalTo("3")); - } - ); - assertResponse( prepareSearch("test").setQuery( QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field")) - ).addSort("_score", SortOrder.DESC), - response -> { - assertThat(response.getHits().getAt(2).getId(), equalTo("3")); - assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - } + ).addSort("_score", SortOrder.DESC) ); } @@ -878,30 +846,20 @@ public void testSortMissingStrings() throws IOException { throw new RuntimeException(); } - logger.info("--> sort with no missing (same as missing _last)"); - assertResponse( - prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC)), - response -> { - assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); + assertResponses(response -> { + assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getId(), equalTo("3")); - assertThat(response.getHits().getAt(2).getId(), equalTo("2")); - } + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("3")); + assertThat(response.getHits().getAt(2).getId(), equalTo("2")); + }, + // "--> sort with no missing (same as missing _last)" + prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC)), + // "--> sort with missing _last" + prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC).missing("_last")) ); - logger.info("--> sort with missing _last"); - assertResponse( - prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC).missing("_last")), - response -> { - assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getId(), equalTo("3")); - assertThat(response.getHits().getAt(2).getId(), equalTo("2")); - } - ); logger.info("--> sort with missing _first"); assertResponse( prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC).missing("_first")), @@ -1263,59 +1221,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).longValue(), equalTo(2L)); } ); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); - assertThat(response.getHits().getHits().length, equalTo(3)); - - assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(-4)); - - assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(1)); - - assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); - }); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); - assertThat(response.getHits().getHits().length, equalTo(3)); - - assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(20)); - - assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(10)); - - assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); - }); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); - assertThat(response.getHits().getHits().length, equalTo(3)); - - assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(-4)); - - assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(1)); - - assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); - }); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); - assertThat(response.getHits().getHits().length, equalTo(3)); - - assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(20)); - - assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(10)); - - assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); - }); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.ASC), response -> { + assertResponses(response -> { assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); @@ -1327,8 +1233,12 @@ public void testSortMVField() throws Exception { assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); - }); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.DESC), response -> { + }, + prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.ASC), + prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.ASC), + prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.ASC) + ); + assertResponses(response -> { assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); @@ -1340,7 +1250,11 @@ public void testSortMVField() throws Exception { assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); - }); + }, + prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.DESC), + prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.DESC), + prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.DESC) + ); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.ASC), response -> { assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); @@ -1478,8 +1392,7 @@ public void testSortOnRareField() throws IOException { } refresh(); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC), response -> { - + Consumer assertResponse = response -> { assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1490,27 +1403,17 @@ public void testSortOnRareField() throws IOException { assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); assertThat(response.getHits().getAt(2).getSortValues()[0], equalTo("03")); - }); + }; + + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC), assertResponse); + for (int i = 0; i < 15; i++) { prepareIndex("test").setId(Integer.toString(300 + i)) .setSource(jsonBuilder().startObject().array("some_other_field", "foobar").endObject()) .get(); refresh(); } - - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC), response -> { - - assertThat(response.getHits().getHits().length, equalTo(3)); - - assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo("20")); - - assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(response.getHits().getAt(1).getSortValues()[0], equalTo("10")); - - assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(response.getHits().getAt(2).getSortValues()[0], equalTo("03")); - }); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC), assertResponse); } public void testSortMetaField() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java index 1383f33a41d84..aabca1b9333f8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java @@ -34,6 +34,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSortValues; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.closeTo; @@ -292,49 +293,22 @@ public void testSinglePointGeoDistanceSort() throws ExecutionException, Interrup String hashPoint = "s037ms06g7h0"; - GeoDistanceSortBuilder geoDistanceSortBuilder = new GeoDistanceSortBuilder(LOCATION_FIELD, hashPoint); - - assertResponse( - prepareSearch().setQuery(matchAllQuery()).addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)), - response -> checkCorrectSortOrderForGeoSort(response) - ); - - geoDistanceSortBuilder = new GeoDistanceSortBuilder(LOCATION_FIELD, new GeoPoint(2, 2)); - - assertResponse( - prepareSearch().setQuery(matchAllQuery()).addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)), - response -> checkCorrectSortOrderForGeoSort(response) - ); - - geoDistanceSortBuilder = new GeoDistanceSortBuilder(LOCATION_FIELD, 2, 2); - - assertResponse( - prepareSearch().setQuery(matchAllQuery()).addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)), - response -> checkCorrectSortOrderForGeoSort(response) - ); - - assertResponse( + assertResponses( + response -> checkCorrectSortOrderForGeoSort(response), + prepareSearch().setQuery(matchAllQuery()) + .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, hashPoint).sortMode(SortMode.MIN).order(SortOrder.ASC)), + prepareSearch().setQuery(matchAllQuery()) + .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, new GeoPoint(2, 2)).sortMode(SortMode.MIN).order(SortOrder.ASC)), + prepareSearch().setQuery(matchAllQuery()) + .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, 2, 2).sortMode(SortMode.MIN).order(SortOrder.ASC)), prepareSearch().setSource(new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0))), - response -> checkCorrectSortOrderForGeoSort(response) - ); - - assertResponse( prepareSearch().setSource(new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, "s037ms06g7h0"))), - response -> checkCorrectSortOrderForGeoSort(response) - ); - - assertResponse( prepareSearch().setSource(new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0))), - response -> checkCorrectSortOrderForGeoSort(response) - ); - - assertResponse( prepareSearch().setSource( new SearchSourceBuilder().sort( SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0).validation(GeoValidationMethod.COERCE) ) - ), - response -> checkCorrectSortOrderForGeoSort(response) + ) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java index ec9c680e17fc3..9d53eb03eb04e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java @@ -22,12 +22,14 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class MetadataFetchingIT extends ESIntegTestCase { + public void testSimple() { assertAcked(prepareCreate("test")); ensureGreen(); @@ -35,17 +37,14 @@ public void testSimple() { prepareIndex("test").setId("1").setSource("field", "value").get(); refresh(); - assertResponse(prepareSearch("test").storedFields("_none_").setFetchSource(false).setVersion(true), response -> { + assertResponses(response -> { assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); assertThat(response.getHits().getAt(0).getVersion(), notNullValue()); - }); - - assertResponse(prepareSearch("test").storedFields("_none_"), response -> { - assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); - }); + }, + prepareSearch("test").storedFields("_none_").setFetchSource(false).setVersion(true), + prepareSearch("test").storedFields("_none_") + ); } public void testInnerHits() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java index 616fc2e1f3483..0e7f8b604a8df 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java @@ -12,11 +12,13 @@ import org.elasticsearch.test.ESIntegTestCase; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.IsEqual.equalTo; public class SourceFetchingIT extends ESIntegTestCase { + public void testSourceDefaultBehavior() { createIndex("test"); ensureGreen(); @@ -24,18 +26,16 @@ public void testSourceDefaultBehavior() { indexDoc("test", "1", "field", "value"); refresh(); - assertResponse(prepareSearch("test"), response -> assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue())); + assertResponses( + response -> assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()), + prepareSearch("test"), + prepareSearch("test").addStoredField("_source") + ); assertResponse( prepareSearch("test").addStoredField("bla"), response -> assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()) ); - - assertResponse( - prepareSearch("test").addStoredField("_source"), - response -> assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()) - ); - } public void testSourceFiltering() { @@ -55,20 +55,20 @@ public void testSourceFiltering() { response -> assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()) ); - assertResponse(prepareSearch("test").setFetchSource("field1", null), response -> { + assertResponses(response -> { assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value")); - }); + }, + prepareSearch("test").setFetchSource("field1", null), + prepareSearch("test").setFetchSource(new String[] { "*" }, new String[] { "field2" }) + ); + assertResponse(prepareSearch("test").setFetchSource("hello", null), response -> { assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(0)); }); - assertResponse(prepareSearch("test").setFetchSource(new String[] { "*" }, new String[] { "field2" }), response -> { - assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value")); - }); + } /** @@ -82,15 +82,13 @@ public void testSourceWithWildcardFiltering() { prepareIndex("test").setId("1").setSource("field", "value").get(); refresh(); - assertResponse(prepareSearch("test").setFetchSource(new String[] { "*.notexisting", "field" }, null), response -> { - assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field"), equalTo("value")); - }); - assertResponse(prepareSearch("test").setFetchSource(new String[] { "field.notexisting.*", "field" }, null), response -> { + assertResponses(response -> { assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field"), equalTo("value")); - }); + }, + prepareSearch("test").setFetchSource(new String[] { "*.notexisting", "field" }, null), + prepareSearch("test").setFetchSource(new String[] { "field.notexisting.*", "field" }, null) + ); } } From bff8ce65c9f06f1b5c47e33f25a47a79fa2897aa Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 23 Nov 2024 01:39:21 +1100 Subject: [PATCH 176/386] Mute org.elasticsearch.xpack.inference.DefaultEndPointsIT testInferDeploysDefaultElser #114913 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 8be390e670c9b..fcc3b6b6e9b12 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -244,6 +244,9 @@ tests: - class: org.elasticsearch.xpack.searchablesnapshots.RetrySearchIntegTests method: testRetryPointInTime issue: https://github.com/elastic/elasticsearch/issues/117116 +- class: org.elasticsearch.xpack.inference.DefaultEndPointsIT + method: testInferDeploysDefaultElser + issue: https://github.com/elastic/elasticsearch/issues/114913 # Examples: # From f6ac6e1c3b5fde2137b2e8ef16fb0f93848d2bd1 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Fri, 22 Nov 2024 16:30:57 +0100 Subject: [PATCH 177/386] [Build] Remove deprecated BuildParams (#116984) --- .../ElasticsearchJavaPluginFuncTest.groovy | 3 - .../src/main/groovy/elasticsearch.fips.gradle | 2 - .../src/main/groovy/elasticsearch.ide.gradle | 1 - .../groovy/elasticsearch.stable-api.gradle | 1 - .../gradle/internal/info/BuildParams.java | 80 ------------------- .../internal/info/GlobalBuildInfoPlugin.java | 7 -- distribution/packages/build.gradle | 1 - libs/plugin-analysis-api/build.gradle | 2 - libs/simdvec/build.gradle | 1 - modules/aggregations/build.gradle | 2 - modules/data-streams/build.gradle | 1 - modules/ingest-attachment/build.gradle | 2 - .../qa/full-cluster-restart/build.gradle | 1 - modules/legacy-geo/build.gradle | 2 - modules/mapper-extras/build.gradle | 2 - modules/reindex/build.gradle | 1 - modules/repository-azure/build.gradle | 10 +-- modules/repository-gcs/build.gradle | 1 - modules/repository-s3/build.gradle | 7 +- modules/rest-root/build.gradle | 2 - plugins/analysis-icu/build.gradle | 2 - plugins/discovery-azure-classic/build.gradle | 6 +- plugins/discovery-ec2/build.gradle | 2 - plugins/discovery-gce/qa/gce/build.gradle | 2 - plugins/mapper-annotated-text/build.gradle | 2 - plugins/mapper-murmur3/build.gradle | 2 - plugins/repository-hdfs/build.gradle | 1 - .../build.gradle | 1 - qa/multi-cluster-search/build.gradle | 1 - qa/repository-multi-version/build.gradle | 1 - qa/rolling-upgrade/build.gradle | 1 - qa/verify-version-constants/build.gradle | 1 - server/build.gradle | 2 - .../apm-integration/build.gradle | 10 ++- test/external-modules/build.gradle | 10 ++- .../delayed-aggs/build.gradle | 1 - .../die-with-dignity/build.gradle | 10 ++- .../external-modules/error-query/build.gradle | 1 - .../esql-heap-attack/build.gradle | 1 - test/external-modules/jvm-crash/build.gradle | 1 - test/framework/build.gradle | 1 - test/immutable-collections-patch/build.gradle | 1 - x-pack/plugin/analytics/build.gradle | 7 +- .../plugin/async-search/qa/rest/build.gradle | 7 +- .../plugin/autoscaling/qa/rest/build.gradle | 7 +- x-pack/plugin/build.gradle | 8 +- x-pack/plugin/ccr/qa/build.gradle | 7 +- .../downgrade-to-basic-license/build.gradle | 7 ++ .../plugin/ccr/qa/multi-cluster/build.gradle | 7 ++ x-pack/plugin/core/build.gradle | 9 ++- .../qa/early-deprecation-rest/build.gradle | 8 +- .../plugin/deprecation/qa/rest/build.gradle | 8 +- .../downsample/qa/mixed-cluster/build.gradle | 1 - x-pack/plugin/downsample/qa/rest/build.gradle | 2 - .../downsample/qa/with-security/build.gradle | 1 - .../rest-with-advanced-security/build.gradle | 9 ++- .../enrich/qa/rest-with-security/build.gradle | 9 ++- x-pack/plugin/enrich/qa/rest/build.gradle | 9 ++- .../qa/full-cluster-restart/build.gradle | 1 - x-pack/plugin/eql/build.gradle | 8 +- .../eql/qa/ccs-rolling-upgrade/build.gradle | 2 - x-pack/plugin/eql/qa/correctness/build.gradle | 8 +- x-pack/plugin/eql/qa/mixed-node/build.gradle | 8 +- x-pack/plugin/eql/qa/rest/build.gradle | 9 ++- x-pack/plugin/eql/qa/security/build.gradle | 9 ++- x-pack/plugin/esql/build.gradle | 8 +- .../esql/qa/server/mixed-cluster/build.gradle | 7 +- .../qa/server/multi-clusters/build.gradle | 1 - x-pack/plugin/identity-provider/build.gradle | 8 +- .../qa/idp-rest-tests/build.gradle | 8 +- .../plugin/ilm/qa/multi-cluster/build.gradle | 8 +- x-pack/plugin/ilm/qa/multi-node/build.gradle | 8 +- x-pack/plugin/inference/build.gradle | 1 - .../inference/qa/mixed-cluster/build.gradle | 8 +- .../inference/qa/rolling-upgrade/build.gradle | 1 - x-pack/plugin/kql/build.gradle | 7 ++ x-pack/plugin/logsdb/build.gradle | 2 - .../plugin/logsdb/qa/with-basic/build.gradle | 2 - .../mapper-aggregate-metric/build.gradle | 2 - .../mapper-constant-keyword/build.gradle | 7 +- .../plugin/mapper-unsigned-long/build.gradle | 5 +- x-pack/plugin/mapper-version/build.gradle | 8 +- x-pack/plugin/ml/build.gradle | 7 +- .../ml/qa/basic-multi-node/build.gradle | 7 +- x-pack/plugin/ml/qa/disabled/build.gradle | 7 +- .../build.gradle | 8 +- .../ml/qa/single-node-tests/build.gradle | 7 +- .../qa/azure/build.gradle | 2 - .../qa/gcs/build.gradle | 2 - .../qa/s3/build.gradle | 8 +- .../qa/azure/build.gradle | 7 +- .../searchable-snapshots/qa/gcs/build.gradle | 7 +- .../searchable-snapshots/qa/hdfs/build.gradle | 2 - .../searchable-snapshots/qa/s3/build.gradle | 8 +- x-pack/plugin/security/cli/build.gradle | 8 +- .../qa/basic-enable-security/build.gradle | 2 - .../plugin/security/qa/jwt-realm/build.gradle | 8 +- .../security/qa/multi-cluster/build.gradle | 1 - .../plugin/security/qa/profile/build.gradle | 7 +- .../security/qa/security-basic/build.gradle | 8 +- .../qa/security-disabled/build.gradle | 8 +- .../plugin/security/qa/tls-basic/build.gradle | 9 ++- .../qa/full-cluster-restart/build.gradle | 8 +- .../shutdown/qa/rolling-upgrade/build.gradle | 2 +- x-pack/plugin/slm/build.gradle | 7 +- x-pack/plugin/slm/qa/multi-node/build.gradle | 8 +- .../qa/azure/build.gradle | 2 - .../qa/gcs/build.gradle | 7 +- .../qa/s3/build.gradle | 2 - .../qa/azure/build.gradle | 2 - .../qa/gcs/build.gradle | 2 - .../qa/hdfs/build.gradle | 2 - .../snapshot-repo-test-kit/qa/s3/build.gradle | 2 - x-pack/plugin/spatial/build.gradle | 7 +- x-pack/plugin/sql/build.gradle | 9 ++- x-pack/plugin/sql/qa/jdbc/build.gradle | 8 +- .../qa/jdbc/security/with-ssl/build.gradle | 7 +- x-pack/plugin/sql/qa/mixed-node/build.gradle | 12 ++- .../qa/server/security/with-ssl/build.gradle | 7 +- x-pack/plugin/sql/sql-cli/build.gradle | 7 +- .../build.gradle | 8 +- x-pack/plugin/watcher/qa/rest/build.gradle | 7 +- x-pack/plugin/wildcard/build.gradle | 7 +- .../build.gradle | 8 +- x-pack/qa/full-cluster-restart/build.gradle | 8 +- x-pack/qa/mixed-tier-cluster/build.gradle | 12 ++- .../legacy-with-basic-license/build.gradle | 8 +- .../legacy-with-full-license/build.gradle | 8 +- .../legacy-with-restricted-trust/build.gradle | 8 +- x-pack/qa/oidc-op-tests/build.gradle | 7 +- x-pack/qa/rolling-upgrade-basic/build.gradle | 8 +- .../build.gradle | 8 +- x-pack/qa/rolling-upgrade/build.gradle | 8 +- x-pack/qa/smoke-test-plugins-ssl/build.gradle | 9 ++- x-pack/qa/smoke-test-plugins/build.gradle | 8 +- x-pack/qa/third-party/jira/build.gradle | 9 ++- x-pack/qa/third-party/pagerduty/build.gradle | 7 +- x-pack/qa/third-party/slack/build.gradle | 7 +- 138 files changed, 522 insertions(+), 278 deletions(-) delete mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParams.java diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/ElasticsearchJavaPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/ElasticsearchJavaPluginFuncTest.groovy index 9fc6aa7276b2d..36a43c4b739b6 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/ElasticsearchJavaPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/ElasticsearchJavaPluginFuncTest.groovy @@ -20,9 +20,6 @@ class ElasticsearchJavaPluginFuncTest extends AbstractGradleInternalPluginFuncTe when: buildFile.text << """ import org.elasticsearch.gradle.Architecture - import org.elasticsearch.gradle.internal.info.BuildParams - BuildParams.init { it.setMinimumRuntimeVersion(JavaVersion.VERSION_1_10) } - assert tasks.named('compileJava').get().sourceCompatibility == JavaVersion.VERSION_1_10.toString() assert tasks.named('compileJava').get().targetCompatibility == JavaVersion.VERSION_1_10.toString() """ diff --git a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle index 3c9cf121813c9..14e2323b4d14d 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle @@ -9,11 +9,9 @@ import org.elasticsearch.gradle.internal.ExportElasticsearchBuildResourcesTask -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.rest.RestTestBasePlugin import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask import org.elasticsearch.gradle.testclusters.TestClustersAware -import org.elasticsearch.gradle.testclusters.TestDistribution //apply plugin: org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index 431d51d6c1275..9237c3ae8918c 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -9,7 +9,6 @@ import org.elasticsearch.gradle.util.Pair import org.elasticsearch.gradle.util.GradleUtils -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.TestUtil import org.jetbrains.gradle.ext.JUnit diff --git a/build-tools-internal/src/main/groovy/elasticsearch.stable-api.gradle b/build-tools-internal/src/main/groovy/elasticsearch.stable-api.gradle index 27b490329f8cb..3f506ae954df8 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.stable-api.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.stable-api.gradle @@ -3,7 +3,6 @@ import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.BwcVersions import org.elasticsearch.gradle.internal.JarApiComparisonTask -import org.elasticsearch.gradle.internal.info.BuildParams import static org.elasticsearch.gradle.internal.InternalDistributionBwcSetupPlugin.buildBwcTaskName diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParams.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParams.java deleted file mode 100644 index ea8aeda8fc099..0000000000000 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParams.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ -package org.elasticsearch.gradle.internal.info; - -import java.lang.reflect.Modifier; -import java.util.Arrays; -import java.util.function.Consumer; - -@Deprecated -public class BuildParams { - private static Boolean isCi; - - /** - * Initialize global build parameters. This method accepts and a initialization function which in turn accepts a - * {@link MutableBuildParams}. Initialization can be done in "stages", therefore changes override existing values, and values from - * previous calls to {@link #init(Consumer)} carry forward. In cases where you want to clear existing values - * {@link MutableBuildParams#reset()} may be used. - * - * @param initializer Build parameter initializer - */ - public static void init(Consumer initializer) { - initializer.accept(MutableBuildParams.INSTANCE); - } - - public static Boolean isCi() { - return value(isCi); - } - - private static T value(T object) { - if (object == null) { - String callingMethod = Thread.currentThread().getStackTrace()[2].getMethodName(); - - throw new IllegalStateException( - "Build parameter '" - + propertyName(callingMethod) - + "' has not been initialized.\n" - + "Perhaps the plugin responsible for initializing this property has not been applied." - ); - } - - return object; - } - - private static String propertyName(String methodName) { - String propertyName = methodName.startsWith("is") ? methodName.substring("is".length()) : methodName.substring("get".length()); - return propertyName.substring(0, 1).toLowerCase() + propertyName.substring(1); - } - - public static class MutableBuildParams { - private static MutableBuildParams INSTANCE = new MutableBuildParams(); - - private MutableBuildParams() {} - - /** - * Resets any existing values from previous initializations. - */ - public void reset() { - Arrays.stream(BuildParams.class.getDeclaredFields()).filter(f -> Modifier.isStatic(f.getModifiers())).forEach(f -> { - try { - // Since we are mutating private static fields from a public static inner class we need to suppress - // accessibility controls here. - f.setAccessible(true); - f.set(null, null); - } catch (IllegalAccessException e) { - throw new RuntimeException(e); - } - }); - } - - public void setIsCi(boolean isCi) { - BuildParams.isCi = isCi; - } - } -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java index 761b0601a1c24..0535026b2594e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java @@ -152,13 +152,6 @@ public void apply(Project project) { spec.getParameters().getBuildParams().set(buildParams); }); - BuildParams.init(params -> { - params.reset(); - params.setIsCi( - System.getenv("JENKINS_URL") != null || System.getenv("BUILDKITE_BUILD_URL") != null || System.getProperty("isCI") != null - ); - }); - // Enforce the minimum compiler version assertMinimumCompilerVersion(minimumCompilerVersion); diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 7d60137ac86b1..486c95d15c7a1 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -10,7 +10,6 @@ import org.apache.tools.ant.filters.ReplaceTokens import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.OS -import org.elasticsearch.gradle.internal.info.BuildParams import org.redline_rpm.header.Flags import java.nio.file.Files diff --git a/libs/plugin-analysis-api/build.gradle b/libs/plugin-analysis-api/build.gradle index 3f1670d76a0c1..41fbbdbafe998 100644 --- a/libs/plugin-analysis-api/build.gradle +++ b/libs/plugin-analysis-api/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License diff --git a/libs/simdvec/build.gradle b/libs/simdvec/build.gradle index ffc50ecb1f6ff..95b8ddf28cf2f 100644 --- a/libs/simdvec/build.gradle +++ b/libs/simdvec/build.gradle @@ -7,7 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask apply plugin: 'elasticsearch.publish' diff --git a/modules/aggregations/build.gradle b/modules/aggregations/build.gradle index 2835180904620..94fdddf6d711a 100644 --- a/modules/aggregations/build.gradle +++ b/modules/aggregations/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License diff --git a/modules/data-streams/build.gradle b/modules/data-streams/build.gradle index b6fc1e3722ccd..b017ae9921b0e 100644 --- a/modules/data-streams/build.gradle +++ b/modules/data-streams/build.gradle @@ -1,4 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.test-with-dependencies' diff --git a/modules/ingest-attachment/build.gradle b/modules/ingest-attachment/build.gradle index 821de8f834a44..8fe2b82fe21fb 100644 --- a/modules/ingest-attachment/build.gradle +++ b/modules/ingest-attachment/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License diff --git a/modules/ingest-geoip/qa/full-cluster-restart/build.gradle b/modules/ingest-geoip/qa/full-cluster-restart/build.gradle index 8e7d20108a869..29cc6d7184bf2 100644 --- a/modules/ingest-geoip/qa/full-cluster-restart/build.gradle +++ b/modules/ingest-geoip/qa/full-cluster-restart/build.gradle @@ -9,7 +9,6 @@ import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/modules/legacy-geo/build.gradle b/modules/legacy-geo/build.gradle index 1b4fd9d52bbaf..55171221396a3 100644 --- a/modules/legacy-geo/build.gradle +++ b/modules/legacy-geo/build.gradle @@ -7,8 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-cluster-test' esplugin { diff --git a/modules/mapper-extras/build.gradle b/modules/mapper-extras/build.gradle index a7bdc11e15550..eda55fe6de9da 100644 --- a/modules/mapper-extras/build.gradle +++ b/modules/mapper-extras/build.gradle @@ -7,8 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index bb1500ba55664..b4a1c9cd6248d 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -10,7 +10,6 @@ import org.elasticsearch.gradle.Architecture import org.elasticsearch.gradle.OS import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.AntFixture import org.elasticsearch.gradle.transform.UnzipTransform diff --git a/modules/repository-azure/build.gradle b/modules/repository-azure/build.gradle index 4babac68f1e71..8c1ca3891bc1e 100644 --- a/modules/repository-azure/build.gradle +++ b/modules/repository-azure/build.gradle @@ -1,8 +1,3 @@ -import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams -import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin -import org.elasticsearch.gradle.internal.test.RestIntegTestTask - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License @@ -11,6 +6,11 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ + +import org.apache.tools.ant.filters.ReplaceTokens +import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin +import org.elasticsearch.gradle.internal.test.RestIntegTestTask + apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/modules/repository-gcs/build.gradle b/modules/repository-gcs/build.gradle index 605d886a71056..811645d154c7a 100644 --- a/modules/repository-gcs/build.gradle +++ b/modules/repository-gcs/build.gradle @@ -9,7 +9,6 @@ import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin import java.nio.file.Files diff --git a/modules/repository-s3/build.gradle b/modules/repository-s3/build.gradle index c1cd1a13719a7..1301d17606d63 100644 --- a/modules/repository-s3/build.gradle +++ b/modules/repository-s3/build.gradle @@ -1,7 +1,3 @@ -import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams -import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License @@ -10,6 +6,9 @@ import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ +import org.apache.tools.ant.filters.ReplaceTokens +import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin + apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/modules/rest-root/build.gradle b/modules/rest-root/build.gradle index 05a545a1ed671..adb8aeb02863f 100644 --- a/modules/rest-root/build.gradle +++ b/modules/rest-root/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License diff --git a/plugins/analysis-icu/build.gradle b/plugins/analysis-icu/build.gradle index f9245ed32c325..05cd2cb44124c 100644 --- a/plugins/analysis-icu/build.gradle +++ b/plugins/analysis-icu/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License diff --git a/plugins/discovery-azure-classic/build.gradle b/plugins/discovery-azure-classic/build.gradle index 16786c6c31074..3ec2ec531ae92 100644 --- a/plugins/discovery-azure-classic/build.gradle +++ b/plugins/discovery-azure-classic/build.gradle @@ -1,6 +1,3 @@ -import org.elasticsearch.gradle.LoggedExec -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License @@ -9,6 +6,9 @@ import org.elasticsearch.gradle.internal.info.BuildParams * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ + +import org.elasticsearch.gradle.LoggedExec + apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index f281db5279660..980e2467206d7 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License diff --git a/plugins/discovery-gce/qa/gce/build.gradle b/plugins/discovery-gce/qa/gce/build.gradle index a22678b9a67dc..72cb429b49072 100644 --- a/plugins/discovery-gce/qa/gce/build.gradle +++ b/plugins/discovery-gce/qa/gce/build.gradle @@ -7,9 +7,7 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ - import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.AntFixture import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE diff --git a/plugins/mapper-annotated-text/build.gradle b/plugins/mapper-annotated-text/build.gradle index 545dfe49bfcf3..ff7230701aa0a 100644 --- a/plugins/mapper-annotated-text/build.gradle +++ b/plugins/mapper-annotated-text/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License diff --git a/plugins/mapper-murmur3/build.gradle b/plugins/mapper-murmur3/build.gradle index e5108814154a3..15d7f6249695b 100644 --- a/plugins/mapper-murmur3/build.gradle +++ b/plugins/mapper-murmur3/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index b7f7816a3a0e1..6c2dc56b17eb2 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -9,7 +9,6 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.OS -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.internal-yaml-rest-test' diff --git a/qa/ccs-rolling-upgrade-remote-cluster/build.gradle b/qa/ccs-rolling-upgrade-remote-cluster/build.gradle index ce5b840e6dc91..e63b1629db39c 100644 --- a/qa/ccs-rolling-upgrade-remote-cluster/build.gradle +++ b/qa/ccs-rolling-upgrade-remote-cluster/build.gradle @@ -8,7 +8,6 @@ */ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/qa/multi-cluster-search/build.gradle b/qa/multi-cluster-search/build.gradle index 906a49134bb51..d46bf3f18f8cc 100644 --- a/qa/multi-cluster-search/build.gradle +++ b/qa/multi-cluster-search/build.gradle @@ -15,7 +15,6 @@ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/qa/repository-multi-version/build.gradle b/qa/repository-multi-version/build.gradle index 79a8be4c1be24..646a7974868c4 100644 --- a/qa/repository-multi-version/build.gradle +++ b/qa/repository-multi-version/build.gradle @@ -7,7 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/qa/rolling-upgrade/build.gradle b/qa/rolling-upgrade/build.gradle index 2f717f201f248..1d7475427b33b 100644 --- a/qa/rolling-upgrade/build.gradle +++ b/qa/rolling-upgrade/build.gradle @@ -7,7 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/qa/verify-version-constants/build.gradle b/qa/verify-version-constants/build.gradle index ee29da53dc51b..67fc962e087cb 100644 --- a/qa/verify-version-constants/build.gradle +++ b/qa/verify-version-constants/build.gradle @@ -8,7 +8,6 @@ */ import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/server/build.gradle b/server/build.gradle index bc8decfa8babc..0bd807751ecbb 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -7,8 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.publish' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/test/external-modules/apm-integration/build.gradle b/test/external-modules/apm-integration/build.gradle index 91e01d363749c..7f64b33b81423 100644 --- a/test/external-modules/apm-integration/build.gradle +++ b/test/external-modules/apm-integration/build.gradle @@ -1,4 +1,12 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + import org.elasticsearch.gradle.util.GradleUtils apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/test/external-modules/build.gradle b/test/external-modules/build.gradle index 1b1e61a69e190..3ba6b309071f3 100644 --- a/test/external-modules/build.gradle +++ b/test/external-modules/build.gradle @@ -1,5 +1,11 @@ - -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ subprojects { apply plugin: 'elasticsearch.base-internal-es-plugin' diff --git a/test/external-modules/delayed-aggs/build.gradle b/test/external-modules/delayed-aggs/build.gradle index f57bd37d65171..fae5e93b37fc3 100644 --- a/test/external-modules/delayed-aggs/build.gradle +++ b/test/external-modules/delayed-aggs/build.gradle @@ -6,7 +6,6 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.legacy-yaml-rest-test' diff --git a/test/external-modules/die-with-dignity/build.gradle b/test/external-modules/die-with-dignity/build.gradle index 1f98e43052589..6c1da40406a5d 100644 --- a/test/external-modules/die-with-dignity/build.gradle +++ b/test/external-modules/die-with-dignity/build.gradle @@ -1,4 +1,12 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + import org.elasticsearch.gradle.util.GradleUtils apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/test/external-modules/error-query/build.gradle b/test/external-modules/error-query/build.gradle index ff4783552ebf5..3c72145c11f8b 100644 --- a/test/external-modules/error-query/build.gradle +++ b/test/external-modules/error-query/build.gradle @@ -7,7 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.legacy-yaml-rest-test' tasks.named('yamlRestTest').configure { diff --git a/test/external-modules/esql-heap-attack/build.gradle b/test/external-modules/esql-heap-attack/build.gradle index 3d6291f6d011a..ba85e0dbd8693 100644 --- a/test/external-modules/esql-heap-attack/build.gradle +++ b/test/external-modules/esql-heap-attack/build.gradle @@ -6,7 +6,6 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' // Necessary to use tests in Serverless diff --git a/test/external-modules/jvm-crash/build.gradle b/test/external-modules/jvm-crash/build.gradle index 73ad8b851a220..0b06142e81939 100644 --- a/test/external-modules/jvm-crash/build.gradle +++ b/test/external-modules/jvm-crash/build.gradle @@ -6,7 +6,6 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' // Necessary to use tests in Serverless diff --git a/test/framework/build.gradle b/test/framework/build.gradle index c61a3b1851ea9..126b95041da11 100644 --- a/test/framework/build.gradle +++ b/test/framework/build.gradle @@ -6,7 +6,6 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams; apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.publish' diff --git a/test/immutable-collections-patch/build.gradle b/test/immutable-collections-patch/build.gradle index 381c0cd6dd044..85a199af2d477 100644 --- a/test/immutable-collections-patch/build.gradle +++ b/test/immutable-collections-patch/build.gradle @@ -9,7 +9,6 @@ import org.elasticsearch.gradle.OS import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.java' diff --git a/x-pack/plugin/analytics/build.gradle b/x-pack/plugin/analytics/build.gradle index ddc075cc9adcc..00f28b4badc3d 100644 --- a/x-pack/plugin/analytics/build.gradle +++ b/x-pack/plugin/analytics/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/x-pack/plugin/async-search/qa/rest/build.gradle b/x-pack/plugin/async-search/qa/rest/build.gradle index c950646930779..eb758c2c0ef5e 100644 --- a/x-pack/plugin/async-search/qa/rest/build.gradle +++ b/x-pack/plugin/async-search/qa/rest/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.base-internal-es-plugin' apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/autoscaling/qa/rest/build.gradle b/x-pack/plugin/autoscaling/qa/rest/build.gradle index c79644ee31225..903e76fd986cf 100644 --- a/x-pack/plugin/autoscaling/qa/rest/build.gradle +++ b/x-pack/plugin/autoscaling/qa/rest/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 48b1d478ddf94..26040529b04df 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -1,6 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask import org.elasticsearch.gradle.util.GradleUtils diff --git a/x-pack/plugin/ccr/qa/build.gradle b/x-pack/plugin/ccr/qa/build.gradle index 4be504e616920..d5bc38d2e8dd5 100644 --- a/x-pack/plugin/ccr/qa/build.gradle +++ b/x-pack/plugin/ccr/qa/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.java' diff --git a/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle b/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle index ac8ce1b0fd331..86f974ed13359 100644 --- a/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle +++ b/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle @@ -1,3 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.testclusters.TestClusterValueSource import org.elasticsearch.gradle.testclusters.TestClustersPlugin diff --git a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle index 86abbbbeedf6b..61678784e6b38 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle @@ -1,3 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.testclusters.TestClusterValueSource diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index d4c3f67bf3ebb..51d770936e64e 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -1,7 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.Version - import java.nio.file.Paths apply plugin: 'elasticsearch.internal-es-plugin' diff --git a/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle b/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle index a9580f4e14d6b..7e61533c818ec 100644 --- a/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle +++ b/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.util.GradleUtils -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.base-internal-es-plugin' apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/deprecation/qa/rest/build.gradle b/x-pack/plugin/deprecation/qa/rest/build.gradle index 9a8b228763fe0..45b543d910a75 100644 --- a/x-pack/plugin/deprecation/qa/rest/build.gradle +++ b/x-pack/plugin/deprecation/qa/rest/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.util.GradleUtils -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.base-internal-es-plugin' apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle b/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle index c4f2a239d48e2..236c851febd6c 100644 --- a/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle +++ b/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle @@ -6,7 +6,6 @@ */ import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-yaml-rest-test' diff --git a/x-pack/plugin/downsample/qa/rest/build.gradle b/x-pack/plugin/downsample/qa/rest/build.gradle index c5cfbea000ebe..54e07558464d1 100644 --- a/x-pack/plugin/downsample/qa/rest/build.gradle +++ b/x-pack/plugin/downsample/qa/rest/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-test-artifact' diff --git a/x-pack/plugin/downsample/qa/with-security/build.gradle b/x-pack/plugin/downsample/qa/with-security/build.gradle index 849c242f372bd..29980b95d0291 100644 --- a/x-pack/plugin/downsample/qa/with-security/build.gradle +++ b/x-pack/plugin/downsample/qa/with-security/build.gradle @@ -6,7 +6,6 @@ */ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' diff --git a/x-pack/plugin/enrich/qa/rest-with-advanced-security/build.gradle b/x-pack/plugin/enrich/qa/rest-with-advanced-security/build.gradle index 2e649e718b081..6a1f820e36205 100644 --- a/x-pack/plugin/enrich/qa/rest-with-advanced-security/build.gradle +++ b/x-pack/plugin/enrich/qa/rest-with-advanced-security/build.gradle @@ -1,6 +1,11 @@ -apply plugin: 'elasticsearch.legacy-java-rest-test' +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ -import org.elasticsearch.gradle.internal.info.BuildParams +apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { javaRestTestImplementation project(path: xpackModule('core')) diff --git a/x-pack/plugin/enrich/qa/rest-with-security/build.gradle b/x-pack/plugin/enrich/qa/rest-with-security/build.gradle index 844cfcc61adff..17a213a6e7f0d 100644 --- a/x-pack/plugin/enrich/qa/rest-with-security/build.gradle +++ b/x-pack/plugin/enrich/qa/rest-with-security/build.gradle @@ -1,6 +1,11 @@ -apply plugin: 'elasticsearch.legacy-java-rest-test' +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ -import org.elasticsearch.gradle.internal.info.BuildParams +apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { javaRestTestImplementation project(path: xpackModule('core')) diff --git a/x-pack/plugin/enrich/qa/rest/build.gradle b/x-pack/plugin/enrich/qa/rest/build.gradle index 637ab21a98fd7..cf3c687004cbb 100644 --- a/x-pack/plugin/enrich/qa/rest/build.gradle +++ b/x-pack/plugin/enrich/qa/rest/build.gradle @@ -1,10 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.legacy-java-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' - import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams restResources { restApi { diff --git a/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle b/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle index 47a1ffaa37fa4..1e1973a118074 100644 --- a/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle +++ b/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle @@ -5,7 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/eql/build.gradle b/x-pack/plugin/eql/build.gradle index b0b5fefa37fcd..9ae67f0e27c2b 100644 --- a/x-pack/plugin/eql/build.gradle +++ b/x-pack/plugin/eql/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { diff --git a/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle b/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle index cbea0896264d5..bc1a44f94d18a 100644 --- a/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle +++ b/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle @@ -5,9 +5,7 @@ * 2.0. */ - import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/x-pack/plugin/eql/qa/correctness/build.gradle b/x-pack/plugin/eql/qa/correctness/build.gradle index a791356499f5c..7ca6e8f134d20 100644 --- a/x-pack/plugin/eql/qa/correctness/build.gradle +++ b/x-pack/plugin/eql/qa/correctness/build.gradle @@ -1,9 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.java' apply plugin: 'elasticsearch.legacy-java-rest-test' apply plugin: 'elasticsearch.internal-testclusters' import org.elasticsearch.gradle.testclusters.RunTask -import org.elasticsearch.gradle.internal.info.BuildParams dependencies { javaRestTestImplementation project(':test:framework') diff --git a/x-pack/plugin/eql/qa/mixed-node/build.gradle b/x-pack/plugin/eql/qa/mixed-node/build.gradle index d3aa227c7ef88..bbeb439ab6155 100644 --- a/x-pack/plugin/eql/qa/mixed-node/build.gradle +++ b/x-pack/plugin/eql/qa/mixed-node/build.gradle @@ -1,8 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.legacy-java-rest-test' apply plugin: 'elasticsearch.bwc-test' import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask dependencies { diff --git a/x-pack/plugin/eql/qa/rest/build.gradle b/x-pack/plugin/eql/qa/rest/build.gradle index 00f196d863f2e..0ffecefb934f7 100644 --- a/x-pack/plugin/eql/qa/rest/build.gradle +++ b/x-pack/plugin/eql/qa/rest/build.gradle @@ -1,10 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-test-artifact' -import org.elasticsearch.gradle.internal.info.BuildParams - restResources { restApi { include '_common', 'bulk', 'indices', 'eql' diff --git a/x-pack/plugin/eql/qa/security/build.gradle b/x-pack/plugin/eql/qa/security/build.gradle index 1f0f949cab706..9072a9a7bad3e 100644 --- a/x-pack/plugin/eql/qa/security/build.gradle +++ b/x-pack/plugin/eql/qa/security/build.gradle @@ -1,6 +1,11 @@ -apply plugin: 'elasticsearch.internal-java-rest-test' +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ -import org.elasticsearch.gradle.internal.info.BuildParams +apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { javaRestTestImplementation project(path: xpackModule('eql:qa:common')) diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 201863108a6c8..f92c895cc5b7b 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -1,8 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + plugins { id 'idea' } -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask; import org.elasticsearch.gradle.internal.util.SourceDirectoryCommandLineArgumentProvider; import static org.elasticsearch.gradle.util.PlatformUtils.normalize diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle index 68c0e8e30f814..eac5d5764d4b2 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle @@ -1,8 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.util.GradleUtils -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle b/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle index 2c432eb94ebf1..7f3859e2229ef 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle +++ b/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle @@ -6,7 +6,6 @@ */ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/identity-provider/build.gradle b/x-pack/plugin/identity-provider/build.gradle index f3b0def7eee97..f9c121da0f550 100644 --- a/x-pack/plugin/identity-provider/build.gradle +++ b/x-pack/plugin/identity-provider/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.publish' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle b/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle index b109c01181729..cbdb25825623d 100644 --- a/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle +++ b/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { diff --git a/x-pack/plugin/ilm/qa/multi-cluster/build.gradle b/x-pack/plugin/ilm/qa/multi-cluster/build.gradle index 256225c5ef3bf..8bc2967fc63de 100644 --- a/x-pack/plugin/ilm/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/ilm/qa/multi-cluster/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask -import org.elasticsearch.gradle.internal.info.BuildParams import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' diff --git a/x-pack/plugin/ilm/qa/multi-node/build.gradle b/x-pack/plugin/ilm/qa/multi-node/build.gradle index d420ac9effdde..4cd41e58b11ac 100644 --- a/x-pack/plugin/ilm/qa/multi-node/build.gradle +++ b/x-pack/plugin/ilm/qa/multi-node/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/inference/build.gradle b/x-pack/plugin/inference/build.gradle index 29d5add35ff49..3c19e11a450b4 100644 --- a/x-pack/plugin/inference/build.gradle +++ b/x-pack/plugin/inference/build.gradle @@ -4,7 +4,6 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/x-pack/plugin/inference/qa/mixed-cluster/build.gradle b/x-pack/plugin/inference/qa/mixed-cluster/build.gradle index c05e71fa1cd55..0f8c732154e85 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/build.gradle +++ b/x-pack/plugin/inference/qa/mixed-cluster/build.gradle @@ -1,6 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/build.gradle b/x-pack/plugin/inference/qa/rolling-upgrade/build.gradle index bfaff7c84d9ad..214d775b46236 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/build.gradle +++ b/x-pack/plugin/inference/qa/rolling-upgrade/build.gradle @@ -5,7 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/kql/build.gradle b/x-pack/plugin/kql/build.gradle index 79f2c91114bd9..76a4bd5aff777 100644 --- a/x-pack/plugin/kql/build.gradle +++ b/x-pack/plugin/kql/build.gradle @@ -1,3 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import static org.elasticsearch.gradle.util.PlatformUtils.normalize apply plugin: 'elasticsearch.internal-es-plugin' diff --git a/x-pack/plugin/logsdb/build.gradle b/x-pack/plugin/logsdb/build.gradle index 60578f832d153..1aef69e0e3fac 100644 --- a/x-pack/plugin/logsdb/build.gradle +++ b/x-pack/plugin/logsdb/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.internal-es-plugin' diff --git a/x-pack/plugin/logsdb/qa/with-basic/build.gradle b/x-pack/plugin/logsdb/qa/with-basic/build.gradle index 44ebd83bf4f4c..9729ac9c29cef 100644 --- a/x-pack/plugin/logsdb/qa/with-basic/build.gradle +++ b/x-pack/plugin/logsdb/qa/with-basic/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { diff --git a/x-pack/plugin/mapper-aggregate-metric/build.gradle b/x-pack/plugin/mapper-aggregate-metric/build.gradle index bae5acc21fc75..2a7841929b21d 100644 --- a/x-pack/plugin/mapper-aggregate-metric/build.gradle +++ b/x-pack/plugin/mapper-aggregate-metric/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License diff --git a/x-pack/plugin/mapper-constant-keyword/build.gradle b/x-pack/plugin/mapper-constant-keyword/build.gradle index 3b11d951fe37a..4f50246450f3f 100644 --- a/x-pack/plugin/mapper-constant-keyword/build.gradle +++ b/x-pack/plugin/mapper-constant-keyword/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.legacy-yaml-rest-test' diff --git a/x-pack/plugin/mapper-unsigned-long/build.gradle b/x-pack/plugin/mapper-unsigned-long/build.gradle index faad1db822560..7eff1bfe94a3a 100644 --- a/x-pack/plugin/mapper-unsigned-long/build.gradle +++ b/x-pack/plugin/mapper-unsigned-long/build.gradle @@ -1,6 +1,3 @@ -import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License @@ -8,6 +5,8 @@ import org.elasticsearch.gradle.internal.info.BuildParams * 2.0. */ +import org.elasticsearch.gradle.Version + evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.internal-es-plugin' diff --git a/x-pack/plugin/mapper-version/build.gradle b/x-pack/plugin/mapper-version/build.gradle index fb760b3446dfd..a87def29620c7 100644 --- a/x-pack/plugin/mapper-version/build.gradle +++ b/x-pack/plugin/mapper-version/build.gradle @@ -1,8 +1,12 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ evaluationDependsOn(xpackModule('core')) - apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 67c26c78a6741..716c401a9fcc8 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle index 3854c70b0f389..07dc1cc3c612a 100644 --- a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle +++ b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/ml/qa/disabled/build.gradle b/x-pack/plugin/ml/qa/disabled/build.gradle index 0d1d8d6484afc..9d157b3e7fa32 100644 --- a/x-pack/plugin/ml/qa/disabled/build.gradle +++ b/x-pack/plugin/ml/qa/disabled/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle index b43132c2daf50..c0d6913d85590 100644 --- a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle @@ -1,8 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' diff --git a/x-pack/plugin/ml/qa/single-node-tests/build.gradle b/x-pack/plugin/ml/qa/single-node-tests/build.gradle index 5ed1c5179716f..02421d9bb3d14 100644 --- a/x-pack/plugin/ml/qa/single-node-tests/build.gradle +++ b/x-pack/plugin/ml/qa/single-node-tests/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/repositories-metering-api/qa/azure/build.gradle b/x-pack/plugin/repositories-metering-api/qa/azure/build.gradle index 4683c13f1fc0c..43c78bfc887b7 100644 --- a/x-pack/plugin/repositories-metering-api/qa/azure/build.gradle +++ b/x-pack/plugin/repositories-metering-api/qa/azure/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { diff --git a/x-pack/plugin/repositories-metering-api/qa/gcs/build.gradle b/x-pack/plugin/repositories-metering-api/qa/gcs/build.gradle index 62fe47c08f5f5..984590f42256c 100644 --- a/x-pack/plugin/repositories-metering-api/qa/gcs/build.gradle +++ b/x-pack/plugin/repositories-metering-api/qa/gcs/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { diff --git a/x-pack/plugin/repositories-metering-api/qa/s3/build.gradle b/x-pack/plugin/repositories-metering-api/qa/s3/build.gradle index 3c58e6a06af69..5c83e8980a474 100644 --- a/x-pack/plugin/repositories-metering-api/qa/s3/build.gradle +++ b/x-pack/plugin/repositories-metering-api/qa/s3/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' diff --git a/x-pack/plugin/searchable-snapshots/qa/azure/build.gradle b/x-pack/plugin/searchable-snapshots/qa/azure/build.gradle index e2f77fae89225..de5ec42147d3f 100644 --- a/x-pack/plugin/searchable-snapshots/qa/azure/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/azure/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/searchable-snapshots/qa/gcs/build.gradle b/x-pack/plugin/searchable-snapshots/qa/gcs/build.gradle index c0a420aff313a..0340453d0840b 100644 --- a/x-pack/plugin/searchable-snapshots/qa/gcs/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/gcs/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle b/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle index e8d97da9a9e37..b41e0f8dcc1cf 100644 --- a/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' apply plugin: 'elasticsearch.internal-available-ports' diff --git a/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle b/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle index 430df2a7e8122..1659c592e5e64 100644 --- a/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' diff --git a/x-pack/plugin/security/cli/build.gradle b/x-pack/plugin/security/cli/build.gradle index 8fd3dd29f87a4..d450a38dd1d29 100644 --- a/x-pack/plugin/security/cli/build.gradle +++ b/x-pack/plugin/security/cli/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.build' diff --git a/x-pack/plugin/security/qa/basic-enable-security/build.gradle b/x-pack/plugin/security/qa/basic-enable-security/build.gradle index a6930d38d41e5..72deed1af72dd 100644 --- a/x-pack/plugin/security/qa/basic-enable-security/build.gradle +++ b/x-pack/plugin/security/qa/basic-enable-security/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { diff --git a/x-pack/plugin/security/qa/jwt-realm/build.gradle b/x-pack/plugin/security/qa/jwt-realm/build.gradle index bc7178f11d9fc..1f7b7c1038fad 100644 --- a/x-pack/plugin/security/qa/jwt-realm/build.gradle +++ b/x-pack/plugin/security/qa/jwt-realm/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/security/qa/multi-cluster/build.gradle b/x-pack/plugin/security/qa/multi-cluster/build.gradle index 8ee449d39dcce..5b682cfdccade 100644 --- a/x-pack/plugin/security/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/security/qa/multi-cluster/build.gradle @@ -5,7 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/security/qa/profile/build.gradle b/x-pack/plugin/security/qa/profile/build.gradle index 7465ef9917258..b0a1927ab9dfe 100644 --- a/x-pack/plugin/security/qa/profile/build.gradle +++ b/x-pack/plugin/security/qa/profile/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/security/qa/security-basic/build.gradle b/x-pack/plugin/security/qa/security-basic/build.gradle index 30751705bd75f..8740354646346 100644 --- a/x-pack/plugin/security/qa/security-basic/build.gradle +++ b/x-pack/plugin/security/qa/security-basic/build.gradle @@ -1,8 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-java-rest-test' -import org.elasticsearch.gradle.internal.info.BuildParams - dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('security')))) javaRestTestImplementation(testArtifact(project(xpackModule('core')))) diff --git a/x-pack/plugin/security/qa/security-disabled/build.gradle b/x-pack/plugin/security/qa/security-disabled/build.gradle index 0a05eae479d33..6fa100f392b9a 100644 --- a/x-pack/plugin/security/qa/security-disabled/build.gradle +++ b/x-pack/plugin/security/qa/security-disabled/build.gradle @@ -1,3 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + /* * This QA project tests the security plugin when security is explicitly disabled. * It is intended to cover security functionality which is supposed to @@ -5,7 +12,6 @@ * For example: If a cluster has a pipeline with the set_security_user processor * defined, it should be not fail */ -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/security/qa/tls-basic/build.gradle b/x-pack/plugin/security/qa/tls-basic/build.gradle index e3b51bde45cc8..c0df6a4f27f58 100644 --- a/x-pack/plugin/security/qa/tls-basic/build.gradle +++ b/x-pack/plugin/security/qa/tls-basic/build.gradle @@ -1,6 +1,11 @@ -apply plugin: 'elasticsearch.legacy-java-rest-test' +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ -import org.elasticsearch.gradle.internal.info.BuildParams +apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('security')))) diff --git a/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle b/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle index 515ffca4a59bf..60b0b372ba14c 100644 --- a/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle +++ b/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle b/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle index 4c98276abe154..17996ce82a453 100644 --- a/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle +++ b/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle @@ -6,7 +6,7 @@ */ import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams + import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/x-pack/plugin/slm/build.gradle b/x-pack/plugin/slm/build.gradle index d9511fe67e8e0..b54e31315f709 100644 --- a/x-pack/plugin/slm/build.gradle +++ b/x-pack/plugin/slm/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/x-pack/plugin/slm/qa/multi-node/build.gradle b/x-pack/plugin/slm/qa/multi-node/build.gradle index d6b1fe8a1e219..afbae8932e292 100644 --- a/x-pack/plugin/slm/qa/multi-node/build.gradle +++ b/x-pack/plugin/slm/qa/multi-node/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/snapshot-based-recoveries/qa/azure/build.gradle b/x-pack/plugin/snapshot-based-recoveries/qa/azure/build.gradle index cb2831f0cf273..7f69d6b7e56eb 100644 --- a/x-pack/plugin/snapshot-based-recoveries/qa/azure/build.gradle +++ b/x-pack/plugin/snapshot-based-recoveries/qa/azure/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { diff --git a/x-pack/plugin/snapshot-based-recoveries/qa/gcs/build.gradle b/x-pack/plugin/snapshot-based-recoveries/qa/gcs/build.gradle index 7550ab8585e13..4d39ca95312aa 100644 --- a/x-pack/plugin/snapshot-based-recoveries/qa/gcs/build.gradle +++ b/x-pack/plugin/snapshot-based-recoveries/qa/gcs/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/snapshot-based-recoveries/qa/s3/build.gradle b/x-pack/plugin/snapshot-based-recoveries/qa/s3/build.gradle index e676e1f1f2162..07909bf4cdbc1 100644 --- a/x-pack/plugin/snapshot-based-recoveries/qa/s3/build.gradle +++ b/x-pack/plugin/snapshot-based-recoveries/qa/s3/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/azure/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/azure/build.gradle index af4ed719a9c2f..5f195e983d191 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/azure/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/azure/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ - -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/gcs/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/gcs/build.gradle index b7e1036ab3e26..176a441279aab 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/gcs/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/gcs/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle index 14e2b05bc140e..81eb82a522389 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/s3/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/s3/build.gradle index 313a11f8ce431..33398d5b8064b 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/s3/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/s3/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' diff --git a/x-pack/plugin/spatial/build.gradle b/x-pack/plugin/spatial/build.gradle index 4304bae5b9991..6299908f0dc14 100644 --- a/x-pack/plugin/spatial/build.gradle +++ b/x-pack/plugin/spatial/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/x-pack/plugin/sql/build.gradle b/x-pack/plugin/sql/build.gradle index d1dcbc3adbd95..69468bf574956 100644 --- a/x-pack/plugin/sql/build.gradle +++ b/x-pack/plugin/sql/build.gradle @@ -1,8 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' -import org.elasticsearch.gradle.internal.info.BuildParams - esplugin { name = 'x-pack-sql' description 'The Elasticsearch plugin that powers SQL for Elasticsearch' diff --git a/x-pack/plugin/sql/qa/jdbc/build.gradle b/x-pack/plugin/sql/qa/jdbc/build.gradle index a444399ed28ce..e93d3b72f1de9 100644 --- a/x-pack/plugin/sql/qa/jdbc/build.gradle +++ b/x-pack/plugin/sql/qa/jdbc/build.gradle @@ -1,7 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.BwcVersions.UnreleasedVersionInfo import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask description = 'Integration tests for SQL JDBC driver' diff --git a/x-pack/plugin/sql/qa/jdbc/security/with-ssl/build.gradle b/x-pack/plugin/sql/qa/jdbc/security/with-ssl/build.gradle index 971c7bf319244..1637cad33c76d 100644 --- a/x-pack/plugin/sql/qa/jdbc/security/with-ssl/build.gradle +++ b/x-pack/plugin/sql/qa/jdbc/security/with-ssl/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.test-with-ssl' diff --git a/x-pack/plugin/sql/qa/mixed-node/build.gradle b/x-pack/plugin/sql/qa/mixed-node/build.gradle index 06e3b61d5b303..35600fda0eb33 100644 --- a/x-pack/plugin/sql/qa/mixed-node/build.gradle +++ b/x-pack/plugin/sql/qa/mixed-node/build.gradle @@ -1,10 +1,16 @@ -apply plugin: 'elasticsearch.legacy-java-rest-test' -apply plugin: 'elasticsearch.bwc-test' +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask +apply plugin: 'elasticsearch.legacy-java-rest-test' +apply plugin: 'elasticsearch.bwc-test' + dependencies { javaRestTestImplementation project(':x-pack:qa') javaRestTestImplementation(project(xpackModule('ql:test-fixtures'))) diff --git a/x-pack/plugin/sql/qa/server/security/with-ssl/build.gradle b/x-pack/plugin/sql/qa/server/security/with-ssl/build.gradle index 51a3f83a909af..0b9c515c48be2 100644 --- a/x-pack/plugin/sql/qa/server/security/with-ssl/build.gradle +++ b/x-pack/plugin/sql/qa/server/security/with-ssl/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.test-with-ssl' diff --git a/x-pack/plugin/sql/sql-cli/build.gradle b/x-pack/plugin/sql/sql-cli/build.gradle index cd24dcc15c863..bd8788191cfa2 100644 --- a/x-pack/plugin/sql/sql-cli/build.gradle +++ b/x-pack/plugin/sql/sql-cli/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ /* * This project is named sql-cli because it is in the "org.elasticsearch.plugin" diff --git a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle index eb0551a4d10e1..b4ee0bee76d9d 100644 --- a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle @@ -1,8 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' diff --git a/x-pack/plugin/watcher/qa/rest/build.gradle b/x-pack/plugin/watcher/qa/rest/build.gradle index 8382a71092720..2d5fc8349b5e0 100644 --- a/x-pack/plugin/watcher/qa/rest/build.gradle +++ b/x-pack/plugin/watcher/qa/rest/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-java-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-test' diff --git a/x-pack/plugin/wildcard/build.gradle b/x-pack/plugin/wildcard/build.gradle index b582f3fcea903..760ad407575d7 100644 --- a/x-pack/plugin/wildcard/build.gradle +++ b/x-pack/plugin/wildcard/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.legacy-yaml-rest-test' diff --git a/x-pack/qa/core-rest-tests-with-security/build.gradle b/x-pack/qa/core-rest-tests-with-security/build.gradle index 8a67a2c1dde0d..65f2282014dc4 100644 --- a/x-pack/qa/core-rest-tests-with-security/build.gradle +++ b/x-pack/qa/core-rest-tests-with-security/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.internal-yaml-rest-test' -import org.elasticsearch.gradle.internal.info.BuildParams dependencies { testImplementation project(':x-pack:qa') diff --git a/x-pack/qa/full-cluster-restart/build.gradle b/x-pack/qa/full-cluster-restart/build.gradle index d6b05242f613b..ee0955c6db082 100644 --- a/x-pack/qa/full-cluster-restart/build.gradle +++ b/x-pack/qa/full-cluster-restart/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/qa/mixed-tier-cluster/build.gradle b/x-pack/qa/mixed-tier-cluster/build.gradle index 79e7d6a655993..bee28c47dc867 100644 --- a/x-pack/qa/mixed-tier-cluster/build.gradle +++ b/x-pack/qa/mixed-tier-cluster/build.gradle @@ -1,10 +1,16 @@ -apply plugin: 'elasticsearch.legacy-java-rest-test' -apply plugin: 'elasticsearch.bwc-test' +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask +apply plugin: 'elasticsearch.legacy-java-rest-test' +apply plugin: 'elasticsearch.bwc-test' + dependencies { javaRestTestImplementation project(':x-pack:qa') } diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle index 9c0648abca21b..83c231da7529c 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle b/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle index ca79bb7ec3825..6e95d718b19de 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle b/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle index b9f8369763476..5c6235e092458 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE diff --git a/x-pack/qa/oidc-op-tests/build.gradle b/x-pack/qa/oidc-op-tests/build.gradle index b53539b224861..43d1cd12cdfb7 100644 --- a/x-pack/qa/oidc-op-tests/build.gradle +++ b/x-pack/qa/oidc-op-tests/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/qa/rolling-upgrade-basic/build.gradle b/x-pack/qa/rolling-upgrade-basic/build.gradle index 09b3b7db7c917..9a447f35eb13c 100644 --- a/x-pack/qa/rolling-upgrade-basic/build.gradle +++ b/x-pack/qa/rolling-upgrade-basic/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle b/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle index 0d1cfbd5ff022..ebcb4cd9760fe 100644 --- a/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle +++ b/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index 60fb55e9a2593..2049ccb5d9cc8 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.BwcVersions -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/x-pack/qa/smoke-test-plugins-ssl/build.gradle b/x-pack/qa/smoke-test-plugins-ssl/build.gradle index da2d095c001d4..461ebc4beb443 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/build.gradle +++ b/x-pack/qa/smoke-test-plugins-ssl/build.gradle @@ -1,6 +1,11 @@ -import org.apache.tools.ant.filters.ReplaceTokens +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ -import org.elasticsearch.gradle.internal.info.BuildParams +import org.apache.tools.ant.filters.ReplaceTokens apply plugin: 'elasticsearch.legacy-yaml-rest-test' diff --git a/x-pack/qa/smoke-test-plugins/build.gradle b/x-pack/qa/smoke-test-plugins/build.gradle index 427aa39f02e49..a51a67dd75b8a 100644 --- a/x-pack/qa/smoke-test-plugins/build.gradle +++ b/x-pack/qa/smoke-test-plugins/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.rest-resources' diff --git a/x-pack/qa/third-party/jira/build.gradle b/x-pack/qa/third-party/jira/build.gradle index 626693a8f295f..7e3d0485545a6 100644 --- a/x-pack/qa/third-party/jira/build.gradle +++ b/x-pack/qa/third-party/jira/build.gradle @@ -1,10 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import groovy.json.JsonSlurper import javax.net.ssl.HttpsURLConnection import java.nio.charset.StandardCharsets -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.legacy-yaml-rest-test' dependencies { diff --git a/x-pack/qa/third-party/pagerduty/build.gradle b/x-pack/qa/third-party/pagerduty/build.gradle index 86ed67ccbb2d6..20f7b9b654b66 100644 --- a/x-pack/qa/third-party/pagerduty/build.gradle +++ b/x-pack/qa/third-party/pagerduty/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-yaml-rest-test' diff --git a/x-pack/qa/third-party/slack/build.gradle b/x-pack/qa/third-party/slack/build.gradle index ff501a7c99c9b..54821a9d2b71a 100644 --- a/x-pack/qa/third-party/slack/build.gradle +++ b/x-pack/qa/third-party/slack/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-yaml-rest-test' From b45564364b42cd32a5817d3cfb2597993d087e9e Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 22 Nov 2024 07:40:06 -0800 Subject: [PATCH 178/386] Add java version variants of entitlements checker (#116878) As each version of Java is released, there may be additional methods we want to instrument for entitlements. Since new methods won't exist in the base version of Java that Elasticsearch is compiled with, we need to hava different classes and compilation for each version. This commit adds a scaffolding for adding the classes for new versions of Java. Unfortunately it requires several classes in different locations. But hopefully these are infrequent enough that the boilerplate is ok. We could consider adding a helper Gradle task to templatize the new classes in the future if it is too cumbersome. Note that the example for Java23 does not have anything meaningful in it yet, it's only meant as an example until we find go through classes and methods that were added after Java 21. --- .../gradle/internal/MrjarPlugin.java | 10 +++++ .../impl/InstrumenterImpl.java | 24 +++++++---- libs/entitlement/bridge/build.gradle | 17 ++++---- .../bridge/EntitlementCheckerHandle.java | 25 +----------- .../entitlement/bridge/HandleLoader.java | 40 +++++++++++++++++++ .../bridge/Java23EntitlementChecker.java | 12 ++++++ .../Java23EntitlementCheckerHandle.java | 27 +++++++++++++ libs/entitlement/build.gradle | 12 +++++- .../EntitlementInitialization.java | 34 +++++++++++++++- ...Java23ElasticsearchEntitlementChecker.java | 26 ++++++++++++ 10 files changed, 184 insertions(+), 43 deletions(-) create mode 100644 libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/HandleLoader.java create mode 100644 libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementChecker.java create mode 100644 libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementCheckerHandle.java create mode 100644 libs/entitlement/src/main23/java/org/elasticsearch/entitlement/runtime/api/Java23ElasticsearchEntitlementChecker.java diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index d1585120b0803..7c488e6e73fee 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -21,6 +21,7 @@ import org.gradle.api.plugins.JavaPluginExtension; import org.gradle.api.tasks.SourceSet; import org.gradle.api.tasks.SourceSetContainer; +import org.gradle.api.tasks.TaskProvider; import org.gradle.api.tasks.compile.CompileOptions; import org.gradle.api.tasks.compile.JavaCompile; import org.gradle.api.tasks.javadoc.Javadoc; @@ -87,6 +88,7 @@ public void apply(Project project) { String mainSourceSetName = SourceSet.MAIN_SOURCE_SET_NAME + javaVersion; SourceSet mainSourceSet = addSourceSet(project, javaExtension, mainSourceSetName, mainSourceSets, javaVersion); configureSourceSetInJar(project, mainSourceSet, javaVersion); + addJar(project, mainSourceSet, javaVersion); mainSourceSets.add(mainSourceSetName); testSourceSets.add(mainSourceSetName); @@ -147,6 +149,14 @@ private SourceSet addSourceSet( return sourceSet; } + private void addJar(Project project, SourceSet sourceSet, int javaVersion) { + project.getConfigurations().register("java" + javaVersion); + TaskProvider jarTask = project.getTasks().register("java" + javaVersion + "Jar", Jar.class, task -> { + task.from(sourceSet.getOutput()); + }); + project.getArtifacts().add("java" + javaVersion, jarTask); + } + private void configurePreviewFeatures(Project project, SourceSet sourceSet, int javaVersion) { project.getTasks().withType(JavaCompile.class).named(sourceSet.getCompileJavaTaskName()).configure(compileTask -> { CompileOptions compileOptions = compileTask.getOptions(); diff --git a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java index 53e76372b107d..dc20b16400f3d 100644 --- a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java @@ -36,6 +36,22 @@ import static org.objectweb.asm.Opcodes.INVOKEVIRTUAL; public class InstrumenterImpl implements Instrumenter { + + private static final String checkerClassDescriptor; + private static final String handleClass; + static { + int javaVersion = Runtime.version().feature(); + final String classNamePrefix; + if (javaVersion >= 23) { + classNamePrefix = "Java23"; + } else { + classNamePrefix = ""; + } + String checkerClass = "org/elasticsearch/entitlement/bridge/" + classNamePrefix + "EntitlementChecker"; + handleClass = checkerClass + "Handle"; + checkerClassDescriptor = Type.getObjectType(checkerClass).getDescriptor(); + } + /** * To avoid class name collisions during testing without an agent to replace classes in-place. */ @@ -269,13 +285,7 @@ private void invokeInstrumentationMethod() { } protected void pushEntitlementChecker(MethodVisitor mv) { - mv.visitMethodInsn( - INVOKESTATIC, - "org/elasticsearch/entitlement/bridge/EntitlementCheckerHandle", - "instance", - "()Lorg/elasticsearch/entitlement/bridge/EntitlementChecker;", - false - ); + mv.visitMethodInsn(INVOKESTATIC, handleClass, "instance", "()" + checkerClassDescriptor, false); } public record ClassFileInfo(String fileName, byte[] bytecodes) {} diff --git a/libs/entitlement/bridge/build.gradle b/libs/entitlement/bridge/build.gradle index 3d59dd3eaf33e..a9f8f6e3a3b0a 100644 --- a/libs/entitlement/bridge/build.gradle +++ b/libs/entitlement/bridge/build.gradle @@ -7,19 +7,18 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ +import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask + apply plugin: 'elasticsearch.build' +apply plugin: 'elasticsearch.mrjar' -configurations { - bridgeJar { - canBeConsumed = true - canBeResolved = false +tasks.named('jar').configure { + // guarding for intellij + if (sourceSets.findByName("main23")) { + from sourceSets.main23.output } } -artifacts { - bridgeJar(jar) -} - -tasks.named('forbiddenApisMain').configure { +tasks.withType(CheckForbiddenApisTask).configureEach { replaceSignatureFiles 'jdk-signatures' } diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementCheckerHandle.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementCheckerHandle.java index 2fe4a163a4136..26c9c83b8eb51 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementCheckerHandle.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementCheckerHandle.java @@ -9,9 +9,6 @@ package org.elasticsearch.entitlement.bridge; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; - /** * Makes the {@link EntitlementChecker} available to injected bytecode. */ @@ -35,27 +32,7 @@ private static class Holder { * The {@code EntitlementInitialization} class is what actually instantiates it and makes it available; * here, we copy it into a static final variable for maximum performance. */ - private static final EntitlementChecker instance; - static { - String initClazz = "org.elasticsearch.entitlement.initialization.EntitlementInitialization"; - final Class clazz; - try { - clazz = ClassLoader.getSystemClassLoader().loadClass(initClazz); - } catch (ClassNotFoundException e) { - throw new AssertionError("java.base cannot find entitlement initialziation", e); - } - final Method checkerMethod; - try { - checkerMethod = clazz.getMethod("checker"); - } catch (NoSuchMethodException e) { - throw new AssertionError("EntitlementInitialization is missing checker() method", e); - } - try { - instance = (EntitlementChecker) checkerMethod.invoke(null); - } catch (IllegalAccessException | InvocationTargetException e) { - throw new AssertionError(e); - } - } + private static final EntitlementChecker instance = HandleLoader.load(EntitlementChecker.class); } // no construction diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/HandleLoader.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/HandleLoader.java new file mode 100644 index 0000000000000..bbfec47884f79 --- /dev/null +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/HandleLoader.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.bridge; + +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; + +class HandleLoader { + + static T load(Class checkerClass) { + String initClassName = "org.elasticsearch.entitlement.initialization.EntitlementInitialization"; + final Class initClazz; + try { + initClazz = ClassLoader.getSystemClassLoader().loadClass(initClassName); + } catch (ClassNotFoundException e) { + throw new AssertionError("java.base cannot find entitlement initialization", e); + } + final Method checkerMethod; + try { + checkerMethod = initClazz.getMethod("checker"); + } catch (NoSuchMethodException e) { + throw new AssertionError("EntitlementInitialization is missing checker() method", e); + } + try { + return checkerClass.cast(checkerMethod.invoke(null)); + } catch (IllegalAccessException | InvocationTargetException e) { + throw new AssertionError(e); + } + } + + // no instance + private HandleLoader() {} +} diff --git a/libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementChecker.java b/libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementChecker.java new file mode 100644 index 0000000000000..244632e80ffa0 --- /dev/null +++ b/libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementChecker.java @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.bridge; + +public interface Java23EntitlementChecker extends EntitlementChecker {} diff --git a/libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementCheckerHandle.java b/libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementCheckerHandle.java new file mode 100644 index 0000000000000..f41c5dcdf14fd --- /dev/null +++ b/libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementCheckerHandle.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.bridge; + +/** + * Java23 variant of {@link EntitlementChecker} handle holder. + */ +public class Java23EntitlementCheckerHandle { + + public static Java23EntitlementChecker instance() { + return Holder.instance; + } + + private static class Holder { + private static final Java23EntitlementChecker instance = HandleLoader.load(Java23EntitlementChecker.class); + } + + // no construction + private Java23EntitlementCheckerHandle() {} +} diff --git a/libs/entitlement/build.gradle b/libs/entitlement/build.gradle index 12e0bb48a54b7..841591873153c 100644 --- a/libs/entitlement/build.gradle +++ b/libs/entitlement/build.gradle @@ -6,10 +6,13 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ + +import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask + apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.publish' - apply plugin: 'elasticsearch.embedded-providers' +apply plugin: 'elasticsearch.mrjar' embeddedProviders { impl 'entitlement', project(':libs:entitlement:asm-provider') @@ -23,8 +26,13 @@ dependencies { testImplementation(project(":test:framework")) { exclude group: 'org.elasticsearch', module: 'entitlement' } + + // guarding for intellij + if (sourceSets.findByName("main23")) { + main23CompileOnly project(path: ':libs:entitlement:bridge', configuration: 'java23') + } } -tasks.named('forbiddenApisMain').configure { +tasks.withType(CheckForbiddenApisTask).configureEach { replaceSignatureFiles 'jdk-signatures' } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index 6d31abe4cf054..ca57e7b255bca 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -27,6 +27,8 @@ import java.lang.instrument.Instrumentation; import java.lang.module.ModuleFinder; import java.lang.module.ModuleReference; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; @@ -59,7 +61,7 @@ public static EntitlementChecker checker() { // Note: referenced by agent reflectively public static void initialize(Instrumentation inst) throws Exception { - manager = new ElasticsearchEntitlementChecker(createPolicyManager()); + manager = initChecker(); Map methodMap = INSTRUMENTER_FACTORY.lookupMethodsToInstrument( "org.elasticsearch.entitlement.bridge.EntitlementChecker" @@ -137,6 +139,36 @@ private static Set getModuleNames(Path pluginRoot, boolean isModular) { return Set.of(ALL_UNNAMED); } + private static ElasticsearchEntitlementChecker initChecker() throws IOException { + final PolicyManager policyManager = createPolicyManager(); + + int javaVersion = Runtime.version().feature(); + final String classNamePrefix; + if (javaVersion >= 23) { + classNamePrefix = "Java23"; + } else { + classNamePrefix = ""; + } + final String className = "org.elasticsearch.entitlement.runtime.api." + classNamePrefix + "ElasticsearchEntitlementChecker"; + Class clazz; + try { + clazz = Class.forName(className); + } catch (ClassNotFoundException e) { + throw new AssertionError("entitlement lib cannot find entitlement impl", e); + } + Constructor constructor; + try { + constructor = clazz.getConstructor(PolicyManager.class); + } catch (NoSuchMethodException e) { + throw new AssertionError("entitlement impl is missing no arg constructor", e); + } + try { + return (ElasticsearchEntitlementChecker) constructor.newInstance(policyManager); + } catch (IllegalAccessException | InvocationTargetException | InstantiationException e) { + throw new AssertionError(e); + } + } + private static String internalName(Class c) { return c.getName().replace('.', '/'); } diff --git a/libs/entitlement/src/main23/java/org/elasticsearch/entitlement/runtime/api/Java23ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main23/java/org/elasticsearch/entitlement/runtime/api/Java23ElasticsearchEntitlementChecker.java new file mode 100644 index 0000000000000..d0f9f4f48609c --- /dev/null +++ b/libs/entitlement/src/main23/java/org/elasticsearch/entitlement/runtime/api/Java23ElasticsearchEntitlementChecker.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.api; + +import org.elasticsearch.entitlement.bridge.Java23EntitlementChecker; +import org.elasticsearch.entitlement.runtime.policy.PolicyManager; + +public class Java23ElasticsearchEntitlementChecker extends ElasticsearchEntitlementChecker implements Java23EntitlementChecker { + + public Java23ElasticsearchEntitlementChecker(PolicyManager policyManager) { + super(policyManager); + } + + @Override + public void check$java_lang_System$exit(Class callerClass, int status) { + // TODO: this is just an example, we shouldn't really override a method implemented in the superclass + super.check$java_lang_System$exit(callerClass, status); + } +} From 080359873352660f71265bcfddd532d079cf6ff0 Mon Sep 17 00:00:00 2001 From: Ankita Kumar Date: Fri, 22 Nov 2024 10:52:15 -0500 Subject: [PATCH 179/386] Update test (#117202) --- .../elasticsearch/index/reindex/ReindexNodeShutdownIT.java | 7 ++++--- muted-tests.yml | 3 --- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/ReindexNodeShutdownIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/ReindexNodeShutdownIT.java index 4a001bb2d0969..a4b030e3c793f 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/ReindexNodeShutdownIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/ReindexNodeShutdownIT.java @@ -35,7 +35,7 @@ * The test works as follows: * 1. Start a large (reasonably long running) reindexing request on the coordinator-only node. * 2. Check that the reindexing task appears on the coordinating node - * 3. With a 10s timeout value for MAXIMUM_REINDEXING_TIMEOUT_SETTING, + * 3. With a 60s timeout value for MAXIMUM_REINDEXING_TIMEOUT_SETTING, * wait for the reindexing task to complete before closing the node * 4. Confirm that the reindexing task succeeds with the wait (it will fail without it) */ @@ -58,8 +58,9 @@ public void testReindexWithShutdown() throws Exception { final String masterNodeName = internalCluster().startMasterOnlyNode(); final String dataNodeName = internalCluster().startDataOnlyNode(); + /* Maximum time to wait for reindexing tasks to complete before shutdown */ final Settings COORD_SETTINGS = Settings.builder() - .put(MAXIMUM_REINDEXING_TIMEOUT_SETTING.getKey(), TimeValue.timeValueSeconds(10)) + .put(MAXIMUM_REINDEXING_TIMEOUT_SETTING.getKey(), TimeValue.timeValueSeconds(60)) .build(); final String coordNodeName = internalCluster().startCoordinatingOnlyNode(Settings.EMPTY); @@ -118,7 +119,7 @@ public void onFailure(Exception e) { internalCluster().stopNode(coordNodeName); } - // Make sure all documents from the source index have been reindexed into the destination index + // Make sure all documents from the source index have been re-indexed into the destination index private void checkDestinationIndex(String dataNodeName, int numDocs) throws Exception { assertTrue(indexExists(DEST_INDEX)); flushAndRefresh(DEST_INDEX); diff --git a/muted-tests.yml b/muted-tests.yml index fcc3b6b6e9b12..f5cf062d519ea 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -127,9 +127,6 @@ tests: - class: org.elasticsearch.search.SearchServiceTests method: testParseSourceValidation issue: https://github.com/elastic/elasticsearch/issues/115936 -- class: org.elasticsearch.index.reindex.ReindexNodeShutdownIT - method: testReindexWithShutdown - issue: https://github.com/elastic/elasticsearch/issues/115996 - class: org.elasticsearch.search.query.SearchQueryIT method: testAllDocsQueryString issue: https://github.com/elastic/elasticsearch/issues/115728 From 94c3e8226bcf559ca93752bd9cb4a0eb6cc08361 Mon Sep 17 00:00:00 2001 From: Ying Mao Date: Fri, 22 Nov 2024 11:10:34 -0500 Subject: [PATCH 180/386] Fixing bug setting index when parsing Google Vertex AI results (#117287) * Using record ID as index value when parsing Google Vertex AI rerank results * Update docs/changelog/117287.yaml * PR feedback --- docs/changelog/117287.yaml | 5 +++ .../GoogleVertexAiRerankResponseEntity.java | 28 ++++++++++++-- ...ogleVertexAiRerankResponseEntityTests.java | 37 ++++++++++++++++++- 3 files changed, 65 insertions(+), 5 deletions(-) create mode 100644 docs/changelog/117287.yaml diff --git a/docs/changelog/117287.yaml b/docs/changelog/117287.yaml new file mode 100644 index 0000000000000..08da9dd8087b2 --- /dev/null +++ b/docs/changelog/117287.yaml @@ -0,0 +1,5 @@ +pr: 117287 +summary: Fixing bug setting index when parsing Google Vertex AI results +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntity.java index 24946ee5875a5..78673277797d2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntity.java @@ -30,6 +30,8 @@ public class GoogleVertexAiRerankResponseEntity { private static final String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in Google Vertex AI rerank response"; + private static final String INVALID_ID_FIELD_FORMAT_TEMPLATE = "Expected numeric value for record ID field in Google Vertex AI rerank " + + "response but received [%s]"; /** * Parses the Google Vertex AI rerank response. @@ -109,14 +111,27 @@ private static List doParse(XContentParser parser) throw new IllegalStateException(format(FAILED_TO_FIND_FIELD_TEMPLATE, RankedDoc.SCORE.getPreferredName())); } - return new RankedDocsResults.RankedDoc(index, parsedRankedDoc.score, parsedRankedDoc.content); + if (parsedRankedDoc.id == null) { + throw new IllegalStateException(format(FAILED_TO_FIND_FIELD_TEMPLATE, RankedDoc.ID.getPreferredName())); + } + + try { + return new RankedDocsResults.RankedDoc( + Integer.parseInt(parsedRankedDoc.id), + parsedRankedDoc.score, + parsedRankedDoc.content + ); + } catch (NumberFormatException e) { + throw new IllegalStateException(format(INVALID_ID_FIELD_FORMAT_TEMPLATE, parsedRankedDoc.id)); + } }); } - private record RankedDoc(@Nullable Float score, @Nullable String content) { + private record RankedDoc(@Nullable Float score, @Nullable String content, @Nullable String id) { private static final ParseField CONTENT = new ParseField("content"); private static final ParseField SCORE = new ParseField("score"); + private static final ParseField ID = new ParseField("id"); private static final ObjectParser PARSER = new ObjectParser<>( "google_vertex_ai_rerank_response", true, @@ -126,6 +141,7 @@ private record RankedDoc(@Nullable Float score, @Nullable String content) { static { PARSER.declareString(Builder::setContent, CONTENT); PARSER.declareFloat(Builder::setScore, SCORE); + PARSER.declareString(Builder::setId, ID); } public static RankedDoc parse(XContentParser parser) { @@ -137,6 +153,7 @@ private static final class Builder { private String content; private Float score; + private String id; private Builder() {} @@ -150,8 +167,13 @@ public Builder setContent(String content) { return this; } + public Builder setId(String id) { + this.id = id; + return this; + } + public RankedDoc build() { - return new RankedDoc(score, content); + return new RankedDoc(score, content, id); } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntityTests.java index 32450e3facfd0..7ff79e2618425 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntityTests.java @@ -39,7 +39,7 @@ public void testFromResponse_CreatesResultsForASingleItem() throws IOException { new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(parsedResults.getRankedDocs(), is(List.of(new RankedDocsResults.RankedDoc(0, 0.97F, "content 2")))); + assertThat(parsedResults.getRankedDocs(), is(List.of(new RankedDocsResults.RankedDoc(2, 0.97F, "content 2")))); } public void testFromResponse_CreatesResultsForMultipleItems() throws IOException { @@ -68,7 +68,7 @@ public void testFromResponse_CreatesResultsForMultipleItems() throws IOException assertThat( parsedResults.getRankedDocs(), - is(List.of(new RankedDocsResults.RankedDoc(0, 0.97F, "content 2"), new RankedDocsResults.RankedDoc(1, 0.90F, "content 1"))) + is(List.of(new RankedDocsResults.RankedDoc(2, 0.97F, "content 2"), new RankedDocsResults.RankedDoc(1, 0.90F, "content 1"))) ); } @@ -161,4 +161,37 @@ public void testFromResponse_FailsWhenScoreFieldIsNotPresent() { assertThat(thrownException.getMessage(), is("Failed to find required field [score] in Google Vertex AI rerank response")); } + + public void testFromResponse_FailsWhenIDFieldIsNotInteger() { + String responseJson = """ + { + "records": [ + { + "id": "abcd", + "title": "title 2", + "content": "content 2", + "score": 0.97 + }, + { + "id": "1", + "title": "title 1", + "content": "content 1", + "score": 0.96 + } + ] + } + """; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> GoogleVertexAiRerankResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Expected numeric value for record ID field in Google Vertex AI rerank response but received [abcd]") + ); + } } From 893dfd3c9aa3ac8c11d56e063f30ca7acea733bd Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 22 Nov 2024 11:28:06 -0500 Subject: [PATCH 181/386] ESQL: Make WEIGHTED_AVG not preview (#117356) It's not PREVIEW. --- docs/reference/esql/functions/aggregation-functions.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index 7cdc42ea6cbf9..3a27e1944a684 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -20,7 +20,7 @@ The <> command supports these aggregate functions: * <> * <> * <> -* experimental:[] <> +* <> // end::agg_list[] include::layout/avg.asciidoc[] From 9b4c89d50763984ce1059d6cc1412336c7b7c3de Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 22 Nov 2024 09:04:35 -0800 Subject: [PATCH 182/386] Don't run the DRA staging build on the 8.x branch (#117355) --- .buildkite/scripts/dra-workflow.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/scripts/dra-workflow.sh b/.buildkite/scripts/dra-workflow.sh index 81b8225e443a4..f2dc40ca1927f 100755 --- a/.buildkite/scripts/dra-workflow.sh +++ b/.buildkite/scripts/dra-workflow.sh @@ -6,7 +6,7 @@ WORKFLOW="${DRA_WORKFLOW:-snapshot}" BRANCH="${BUILDKITE_BRANCH:-}" # Don't publish main branch to staging -if [[ "$BRANCH" == "main" && "$WORKFLOW" == "staging" ]]; then +if [[ ("$BRANCH" == "main" || "$BRANCH" == *.x) && "$WORKFLOW" == "staging" ]]; then exit 0 fi From f325c1541088995f35e7d39cf181a9b970d3c90a Mon Sep 17 00:00:00 2001 From: Stanislav Malyshev Date: Fri, 22 Nov 2024 11:00:26 -0700 Subject: [PATCH 183/386] FIx async search tests - do not warn on the presence of .async-search (#117301) --- muted-tests.yml | 3 --- .../test/rest/ESRestTestCase.java | 25 +++++++++++++++++++ 2 files changed, 25 insertions(+), 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index f5cf062d519ea..c4af8bf1c0d21 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -214,9 +214,6 @@ tests: - class: org.elasticsearch.upgrades.QueryBuilderBWCIT method: testQueryBuilderBWC {cluster=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/116990 -- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT - method: test {yaml=reference/esql/esql-across-clusters/line_197} - issue: https://github.com/elastic/elasticsearch/issues/117099 - class: org.elasticsearch.xpack.apmdata.APMYamlTestSuiteIT method: test {yaml=/10_apm/Test template reinstallation} issue: https://github.com/elastic/elasticsearch/issues/116445 diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index dd08107bd67fb..a4195a07e7621 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -1131,6 +1131,7 @@ protected static void wipeAllIndices(boolean preserveSecurityIndices) throws IOE } final Request deleteRequest = new Request("DELETE", Strings.collectionToCommaDelimitedString(indexPatterns)); deleteRequest.addParameter("expand_wildcards", "open,closed,hidden"); + deleteRequest.setOptions(deleteRequest.getOptions().toBuilder().setWarningsHandler(ignoreAsyncSearchWarning()).build()); final Response response = adminClient().performRequest(deleteRequest); try (InputStream is = response.getEntity().getContent()) { assertTrue((boolean) XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true).get("acknowledged")); @@ -1143,6 +1144,30 @@ protected static void wipeAllIndices(boolean preserveSecurityIndices) throws IOE } } + // Make warnings handler that ignores the .async-search warning since .async-search may randomly appear when async requests are slow + // See: https://github.com/elastic/elasticsearch/issues/117099 + protected static WarningsHandler ignoreAsyncSearchWarning() { + return new WarningsHandler() { + @Override + public boolean warningsShouldFailRequest(List warnings) { + if (warnings.isEmpty()) { + return false; + } + return warnings.equals( + List.of( + "this request accesses system indices: [.async-search], " + + "but in a future major version, direct access to system indices will be prevented by default" + ) + ) == false; + } + + @Override + public String toString() { + return "ignore .async-search warning"; + } + }; + } + protected static void wipeDataStreams() throws IOException { try { if (hasXPack()) { From dae59da5f969a57fc18172c605914591902381a2 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Fri, 22 Nov 2024 10:15:54 -0800 Subject: [PATCH 184/386] Fix constand_keyword test run and properly test recent behavior change (#117284) --- .../index/mapper/MapperFeatures.java | 7 ++++- .../mapper-constant-keyword/build.gradle | 2 +- .../ConstantKeywordClientYamlTestSuiteIT.java | 10 +++++++ .../test/20_synthetic_source.yml | 26 +++++++++++++++++-- 4 files changed, 41 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index 5743baeec536d..333c37381c587 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -57,6 +57,10 @@ public Set getFeatures() { ); } + public static final NodeFeature CONSTANT_KEYWORD_SYNTHETIC_SOURCE_WRITE_FIX = new NodeFeature( + "mapper.constant_keyword.synthetic_source_write_fix" + ); + @Override public Set getTestFeatures() { return Set.of( @@ -66,7 +70,8 @@ public Set getTestFeatures() { SourceFieldMapper.SOURCE_MODE_FROM_INDEX_SETTING, IgnoredSourceFieldMapper.IGNORED_SOURCE_AS_TOP_LEVEL_METADATA_ARRAY_FIELD, IgnoredSourceFieldMapper.ALWAYS_STORE_OBJECT_ARRAYS_IN_NESTED_OBJECTS, - MapperService.LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT + MapperService.LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT, + CONSTANT_KEYWORD_SYNTHETIC_SOURCE_WRITE_FIX ); } } diff --git a/x-pack/plugin/mapper-constant-keyword/build.gradle b/x-pack/plugin/mapper-constant-keyword/build.gradle index 4f50246450f3f..c1e0eb61b611b 100644 --- a/x-pack/plugin/mapper-constant-keyword/build.gradle +++ b/x-pack/plugin/mapper-constant-keyword/build.gradle @@ -6,7 +6,7 @@ */ apply plugin: 'elasticsearch.internal-es-plugin' -apply plugin: 'elasticsearch.legacy-yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' esplugin { name 'constant-keyword' diff --git a/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/java/org/elasticsearch/xpack/constantkeyword/ConstantKeywordClientYamlTestSuiteIT.java b/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/java/org/elasticsearch/xpack/constantkeyword/ConstantKeywordClientYamlTestSuiteIT.java index 789059d9e11c0..5b6048b481abf 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/java/org/elasticsearch/xpack/constantkeyword/ConstantKeywordClientYamlTestSuiteIT.java +++ b/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/java/org/elasticsearch/xpack/constantkeyword/ConstantKeywordClientYamlTestSuiteIT.java @@ -10,8 +10,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; /** Runs yaml rest tests */ public class ConstantKeywordClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -24,4 +26,12 @@ public ConstantKeywordClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidat public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("constant-keyword").build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/resources/rest-api-spec/test/20_synthetic_source.yml b/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/resources/rest-api-spec/test/20_synthetic_source.yml index d40f69f483dbb..012b1006b8d20 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/resources/rest-api-spec/test/20_synthetic_source.yml +++ b/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/resources/rest-api-spec/test/20_synthetic_source.yml @@ -1,7 +1,7 @@ constant_keyword: - requires: - cluster_features: [ "mapper.source.mode_from_index_setting" ] - reason: "Source mode configured through index setting" + cluster_features: [ "mapper.constant_keyword.synthetic_source_write_fix" ] + reason: "Behavior fix" - do: indices.create: @@ -26,6 +26,15 @@ constant_keyword: body: kwd: foo + - do: + index: + index: test + id: 2 + refresh: true + body: + kwd: foo + const_kwd: bar + - do: search: index: test @@ -33,6 +42,19 @@ constant_keyword: query: ids: values: [1] + + - match: + hits.hits.0._source: + kwd: foo + + - do: + search: + index: test + body: + query: + ids: + values: [2] + - match: hits.hits.0._source: kwd: foo From f3eb27e234568699020c720f53fb33084030dbc9 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Fri, 22 Nov 2024 20:48:51 +0200 Subject: [PATCH 185/386] ESQL: use field_caps native nested fields filtering (#117201) * Just filter the nested fields natively with field_caps support --------- Co-authored-by: Elastic Machine Co-authored-by: Craig Taverner --- docs/changelog/117201.yaml | 6 + .../esql/qa/rest/FieldExtractorTestCase.java | 318 ++++++++++++++++++ .../xpack/esql/action/EsqlCapabilities.java | 7 +- .../xpack/esql/session/IndexResolver.java | 25 +- 4 files changed, 334 insertions(+), 22 deletions(-) create mode 100644 docs/changelog/117201.yaml diff --git a/docs/changelog/117201.yaml b/docs/changelog/117201.yaml new file mode 100644 index 0000000000000..f8a2be35c70a3 --- /dev/null +++ b/docs/changelog/117201.yaml @@ -0,0 +1,6 @@ +pr: 117201 +summary: "Use `field_caps` native nested fields filtering" +area: ES|QL +type: bug +issues: + - 117054 diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java index 6f45c9d92fd12..813354db697e1 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java @@ -28,6 +28,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.hamcrest.Matcher; import org.junit.Before; @@ -1107,6 +1108,323 @@ public void testTypeConflictInObject() throws IOException { ); } + /** + * Test for https://github.com/elastic/elasticsearch/issues/117054 fix + */ + public void testOneNestedSubField_AndSameNameSupportedField() throws IOException { + assumeIndexResolverNestedFieldsNameClashFixed(); + ESRestTestCase.createIndex("test", Settings.EMPTY, """ + "properties": { + "Responses": { + "properties": { + "process": { + "type": "nested", + "properties": { + "pid": { + "type": "long" + } + } + } + } + }, + "process": { + "properties": { + "parent": { + "properties": { + "command_line": { + "type": "wildcard", + "fields": { + "text": { + "type": "text" + } + } + } + } + } + } + } + } + """); + + Map result = runEsql("FROM test"); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")) + ).entry("values", Collections.EMPTY_LIST) + ); + + index("test", """ + {"Responses.process.pid": 123,"process.parent.command_line":"run.bat"}"""); + + result = runEsql("FROM test"); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")) + ).entry("values", List.of(matchesList().item("run.bat").item("run.bat"))) + ); + + result = runEsql(""" + FROM test | where process.parent.command_line == "run.bat" + """); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")) + ).entry("values", List.of(matchesList().item("run.bat").item("run.bat"))) + ); + + ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test | SORT Responses.process.pid")); + String err = EntityUtils.toString(e.getResponse().getEntity()); + assertThat(err, containsString("line 1:18: Unknown column [Responses.process.pid]")); + + e = expectThrows(ResponseException.class, () -> runEsql(""" + FROM test + | SORT Responses.process.pid + | WHERE Responses.process IS NULL + """)); + err = EntityUtils.toString(e.getResponse().getEntity()); + assertThat(err, containsString("line 2:8: Unknown column [Responses.process.pid]")); + } + + public void testOneNestedSubField_AndSameNameSupportedField_TwoIndices() throws IOException { + assumeIndexResolverNestedFieldsNameClashFixed(); + ESRestTestCase.createIndex("test1", Settings.EMPTY, """ + "properties": { + "Responses": { + "properties": { + "process": { + "type": "nested", + "properties": { + "pid": { + "type": "long" + } + } + } + } + } + } + """); + ESRestTestCase.createIndex("test2", Settings.EMPTY, """ + "properties": { + "process": { + "properties": { + "parent": { + "properties": { + "command_line": { + "type": "wildcard", + "fields": { + "text": { + "type": "text" + } + } + } + } + } + } + } + } + """); + index("test1", """ + {"Responses.process.pid": 123}"""); + index("test2", """ + {"process.parent.command_line":"run.bat"}"""); + + Map result = runEsql("FROM test* | SORT process.parent.command_line ASC NULLS FIRST"); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")) + ).entry("values", List.of(matchesList().item(null).item(null), matchesList().item("run.bat").item("run.bat"))) + ); + + result = runEsql(""" + FROM test* | where process.parent.command_line == "run.bat" + """); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")) + ).entry("values", List.of(matchesList().item("run.bat").item("run.bat"))) + ); + + ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test* | SORT Responses.process.pid")); + String err = EntityUtils.toString(e.getResponse().getEntity()); + assertThat(err, containsString("line 1:19: Unknown column [Responses.process.pid]")); + + e = expectThrows(ResponseException.class, () -> runEsql(""" + FROM test* + | SORT Responses.process.pid + | WHERE Responses.process IS NULL + """)); + err = EntityUtils.toString(e.getResponse().getEntity()); + assertThat(err, containsString("line 2:8: Unknown column [Responses.process.pid]")); + } + + public void testOneNestedField_AndSameNameSupportedField_TwoIndices() throws IOException { + assumeIndexResolverNestedFieldsNameClashFixed(); + ESRestTestCase.createIndex("test1", Settings.EMPTY, """ + "properties": { + "Responses": { + "properties": { + "process": { + "type": "nested", + "properties": { + "pid": { + "type": "long" + } + } + } + } + }, + "process": { + "properties": { + "parent": { + "properties": { + "command_line": { + "type": "wildcard", + "fields": { + "text": { + "type": "text" + } + } + } + } + } + } + } + } + """); + ESRestTestCase.createIndex("test2", Settings.EMPTY, """ + "properties": { + "Responses": { + "properties": { + "process": { + "type": "integer", + "fields": { + "pid": { + "type": "long" + } + } + } + } + }, + "process": { + "properties": { + "parent": { + "properties": { + "command_line": { + "type": "wildcard", + "fields": { + "text": { + "type": "text" + } + } + } + } + } + } + } + } + """); + index("test1", """ + {"Responses.process.pid": 111,"process.parent.command_line":"run1.bat"}"""); + index("test2", """ + {"Responses.process": 222,"process.parent.command_line":"run2.bat"}"""); + + Map result = runEsql("FROM test* | SORT process.parent.command_line"); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of( + columnInfo("Responses.process", "integer"), + columnInfo("Responses.process.pid", "long"), + columnInfo("process.parent.command_line", "keyword"), + columnInfo("process.parent.command_line.text", "text") + ) + ) + .entry( + "values", + List.of( + matchesList().item(null).item(null).item("run1.bat").item("run1.bat"), + matchesList().item(222).item(222).item("run2.bat").item("run2.bat") + ) + ) + ); + + result = runEsql(""" + FROM test* | where Responses.process.pid == 111 + """); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of( + columnInfo("Responses.process", "integer"), + columnInfo("Responses.process.pid", "long"), + columnInfo("process.parent.command_line", "keyword"), + columnInfo("process.parent.command_line.text", "text") + ) + ).entry("values", List.of()) + ); + + result = runEsql("FROM test* | SORT process.parent.command_line"); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of( + columnInfo("Responses.process", "integer"), + columnInfo("Responses.process.pid", "long"), + columnInfo("process.parent.command_line", "keyword"), + columnInfo("process.parent.command_line.text", "text") + ) + ) + .entry( + "values", + List.of( + matchesList().item(null).item(null).item("run1.bat").item("run1.bat"), + matchesList().item(222).item(222).item("run2.bat").item("run2.bat") + ) + ) + ); + + result = runEsql(""" + FROM test* + | SORT process.parent.command_line + | WHERE Responses.process IS NULL + """); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of( + columnInfo("Responses.process", "integer"), + columnInfo("Responses.process.pid", "long"), + columnInfo("process.parent.command_line", "keyword"), + columnInfo("process.parent.command_line.text", "text") + ) + ).entry("values", List.of(matchesList().item(null).item(null).item("run1.bat").item("run1.bat"))) + ); + } + + private void assumeIndexResolverNestedFieldsNameClashFixed() throws IOException { + // especially for BWC tests but also for regular tests + var capsName = EsqlCapabilities.Cap.FIX_NESTED_FIELDS_NAME_CLASH_IN_INDEXRESOLVER.name().toLowerCase(Locale.ROOT); + boolean requiredClusterCapability = clusterHasCapability("POST", "/_query", List.of(), List.of(capsName)).orElse(false); + assumeTrue( + "This test makes sense for versions that have the fix for https://github.com/elastic/elasticsearch/issues/117054", + requiredClusterCapability + ); + } + private CheckedConsumer empNoInObject(String empNoType) { return index -> { index.startObject("properties"); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index c33acf95aa33f..54b02c87b285b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -511,7 +511,12 @@ public enum Cap { /** * LOOKUP JOIN */ - JOIN_LOOKUP(Build.current().isSnapshot()); + JOIN_LOOKUP(Build.current().isSnapshot()), + + /** + * Fix for https://github.com/elastic/elasticsearch/issues/117054 + */ + FIX_NESTED_FIELDS_NAME_CLASH_IN_INDEXRESOLVER; private final boolean enabled; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java index 0be8cf820d345..f61be4b59830e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java @@ -98,9 +98,8 @@ public IndexResolution mergedMappings(String indexPattern, FieldCapabilitiesResp // TODO flattened is simpler - could we get away with that? String[] names = fieldsCaps.keySet().toArray(new String[0]); Arrays.sort(names); - Set forbiddenFields = new HashSet<>(); Map rootFields = new HashMap<>(); - name: for (String name : names) { + for (String name : names) { Map fields = rootFields; String fullName = name; boolean isAlias = false; @@ -111,9 +110,6 @@ public IndexResolution mergedMappings(String indexPattern, FieldCapabilitiesResp break; } String parent = name.substring(0, nextDot); - if (forbiddenFields.contains(parent)) { - continue name; - } EsField obj = fields.get(parent); if (obj == null) { obj = new EsField(parent, OBJECT, new HashMap<>(), false, true); @@ -125,16 +121,10 @@ public IndexResolution mergedMappings(String indexPattern, FieldCapabilitiesResp fields = obj.getProperties(); name = name.substring(nextDot + 1); } - - List caps = fieldsCaps.get(fullName); - if (allNested(caps)) { - forbiddenFields.add(name); - continue; - } // TODO we're careful to make isAlias match IndexResolver - but do we use it? EsField field = firstUnsupportedParent == null - ? createField(fieldCapsResponse, name, fullName, caps, isAlias) + ? createField(fieldCapsResponse, name, fullName, fieldsCaps.get(fullName), isAlias) : new UnsupportedEsField( fullName, firstUnsupportedParent.getOriginalType(), @@ -164,15 +154,6 @@ public IndexResolution mergedMappings(String indexPattern, FieldCapabilitiesResp return IndexResolution.valid(new EsIndex(indexPattern, rootFields, concreteIndices), concreteIndices.keySet(), unavailableRemotes); } - private boolean allNested(List caps) { - for (IndexFieldCapabilities cap : caps) { - if (false == cap.type().equalsIgnoreCase("nested")) { - return false; - } - } - return true; - } - private static Map> collectFieldCaps(FieldCapabilitiesResponse fieldCapsResponse) { Set seenHashes = new HashSet<>(); Map> fieldsCaps = new HashMap<>(); @@ -278,6 +259,8 @@ private static FieldCapabilitiesRequest createFieldCapsRequest(String index, Set // lenient because we throw our own errors looking at the response e.g. if something was not resolved // also because this way security doesn't throw authorization exceptions but rather honors ignore_unavailable req.indicesOptions(FIELD_CAPS_INDICES_OPTIONS); + // we ignore the nested data type fields starting with https://github.com/elastic/elasticsearch/pull/111495 + req.filters("-nested"); req.setMergeResults(false); return req; } From 34d96526f95c9924a48722db8be0f7be31c1bfc4 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 22 Nov 2024 14:21:42 -0500 Subject: [PATCH 186/386] ESQL: Fix limit task test (#117270) Fix a test for the task results when running the `LIMIT` operation. We were releasing a few permits to get the query started. And when you combine that with the page worth of permits that the test was releasing we'd sometimes finish the entire limited query, stopping the task too early to find a running task. Closes #107293 --- .../xpack/esql/action/EsqlActionTaskIT.java | 20 ++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index cde4f10ef556c..e6db79c7d8abd 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -79,6 +79,11 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { private String REDUCE_DESCRIPTION; private boolean nodeLevelReduction; + /** + * Number of docs released by {@link #startEsql}. + */ + private int prereleasedDocs; + @Before public void setup() { assumeTrue("requires query pragmas", canUseQueryPragmas()); @@ -104,6 +109,7 @@ public void testTaskContents() throws Exception { ActionFuture response = startEsql(); try { getTasksStarting(); + logger.info("unblocking script"); scriptPermits.release(pageSize()); List foundTasks = getTasksRunning(); int luceneSources = 0; @@ -216,9 +222,15 @@ private ActionFuture startEsql() { return startEsql("from test | stats sum(pause_me)"); } + /** + * Start an ESQL query, releasing a few docs from the {@code pause_me} + * script so it'll actually start but won't finish it's first page. + */ private ActionFuture startEsql(String query) { scriptPermits.drainPermits(); - scriptPermits.release(between(1, 5)); + // Allow a few docs to calculate os the query gets "started" + prereleasedDocs = between(1, pageSize() / 2); + scriptPermits.release(prereleasedDocs); var settingsBuilder = Settings.builder() // Force shard partitioning because that's all the tests know how to match. It is easier to reason about too. .put("data_partitioning", "shard") @@ -444,6 +456,7 @@ public void testTaskContentsForTopNQuery() throws Exception { ActionFuture response = startEsql("from test | sort pause_me | keep pause_me"); try { getTasksStarting(); + logger.info("unblocking script"); scriptPermits.release(pageSize()); getTasksRunning(); } finally { @@ -455,7 +468,6 @@ public void testTaskContentsForTopNQuery() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107293") public void testTaskContentsForLimitQuery() throws Exception { String limit = Integer.toString(randomIntBetween(pageSize() + 1, 2 * numberOfDocs())); READ_DESCRIPTION = """ @@ -475,7 +487,8 @@ public void testTaskContentsForLimitQuery() throws Exception { ActionFuture response = startEsql("from test | keep pause_me | limit " + limit); try { getTasksStarting(); - scriptPermits.release(pageSize()); + logger.info("unblocking script"); + scriptPermits.release(pageSize() - prereleasedDocs); getTasksRunning(); } finally { scriptPermits.release(numberOfDocs()); @@ -504,6 +517,7 @@ public void testTaskContentsForGroupingStatsQuery() throws Exception { ActionFuture response = startEsql("from test | stats max(foo) by pause_me"); try { getTasksStarting(); + logger.info("unblocking script"); scriptPermits.release(pageSize()); getTasksRunning(); } finally { From 4ecc7518ef9b9cd063240ffabb543f84abb6b5f0 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 22 Nov 2024 14:41:18 -0500 Subject: [PATCH 187/386] ESQL: Add docs for MV_PERCENTILE (#117377) We built this a while back. Let's document it. --- docs/reference/esql/functions/mv-functions.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/esql/functions/mv-functions.asciidoc b/docs/reference/esql/functions/mv-functions.asciidoc index 4093e44c16911..3da0249c9c0db 100644 --- a/docs/reference/esql/functions/mv-functions.asciidoc +++ b/docs/reference/esql/functions/mv-functions.asciidoc @@ -19,6 +19,7 @@ * <> * <> * <> +* <> * <> * <> * <> @@ -37,6 +38,7 @@ include::layout/mv_max.asciidoc[] include::layout/mv_median.asciidoc[] include::layout/mv_median_absolute_deviation.asciidoc[] include::layout/mv_min.asciidoc[] +include::layout/mv_percentile.asciidoc[] include::layout/mv_pseries_weighted_sum.asciidoc[] include::layout/mv_slice.asciidoc[] include::layout/mv_sort.asciidoc[] From e90eb7ab0df06239a69a1945ca6ef5effc065433 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 22 Nov 2024 14:58:48 -0500 Subject: [PATCH 188/386] Improve halfbyte transposition performance, marginally improving bbq performance (#117350) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The transposition of the bits in half-byte queries for BBQ is pretty convoluted and slow. This commit greatly simplifies & improves performance for this small part of bbq queries and indexing. Here are the results of a small JMH benchmark for this particular function. ``` TransposeBinBenchmark.transposeBinNew 1024 thrpt 5 857.779 ± 44.031 ops/ms TransposeBinBenchmark.transposeBinOrig 1024 thrpt 5 94.950 ± 2.898 ops/ms ``` While this is a huge improvement for this small function, the impact at query and index time is only marginal. But, the code simplification itself is enough to warrant this change in my opinion. --- docs/changelog/117350.yaml | 5 ++ .../index/codec/vectors/BQSpaceUtils.java | 68 +++++++------------ .../index/codec/vectors/BinaryQuantizer.java | 8 +-- 3 files changed, 32 insertions(+), 49 deletions(-) create mode 100644 docs/changelog/117350.yaml diff --git a/docs/changelog/117350.yaml b/docs/changelog/117350.yaml new file mode 100644 index 0000000000000..dca54f2037a87 --- /dev/null +++ b/docs/changelog/117350.yaml @@ -0,0 +1,5 @@ +pr: 117350 +summary: "Improve halfbyte transposition performance, marginally improving bbq performance" +area: Vector Search +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/BQSpaceUtils.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/BQSpaceUtils.java index 68363b5926a6b..f9fad74835683 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/BQSpaceUtils.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/BQSpaceUtils.java @@ -23,56 +23,38 @@ public class BQSpaceUtils { public static final short B_QUERY = 4; - // the first four bits masked - private static final int B_QUERY_MASK = 15; /** * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 + * Transpose the query vector into a byte array allowing for efficient bitwise operations with the + * index bit vectors. The idea here is to organize the query vector bits such that the first bit + * of every dimension is in the first set dimensions bits, or (dimensions/8) bytes. The second, + * third, and fourth bits are in the second, third, and fourth set of dimensions bits, + * respectively. This allows for direct bitwise comparisons with the stored index vectors through + * summing the bitwise results with the relative required bit shifts. + * * @param q the query vector, assumed to be half-byte quantized with values between 0 and 15 - * @param dimensions the number of dimensions in the query vector * @param quantQueryByte the byte array to store the transposed query vector */ - public static void transposeBin(byte[] q, int dimensions, byte[] quantQueryByte) { - // TODO: rewrite this in Panama Vector API - int qOffset = 0; - final byte[] v1 = new byte[4]; - final byte[] v = new byte[32]; - for (int i = 0; i < dimensions; i += 32) { - // for every four bytes we shift left (with remainder across those bytes) - for (int j = 0; j < v.length; j += 4) { - v[j] = (byte) (q[qOffset + j] << B_QUERY | ((q[qOffset + j] >>> B_QUERY) & B_QUERY_MASK)); - v[j + 1] = (byte) (q[qOffset + j + 1] << B_QUERY | ((q[qOffset + j + 1] >>> B_QUERY) & B_QUERY_MASK)); - v[j + 2] = (byte) (q[qOffset + j + 2] << B_QUERY | ((q[qOffset + j + 2] >>> B_QUERY) & B_QUERY_MASK)); - v[j + 3] = (byte) (q[qOffset + j + 3] << B_QUERY | ((q[qOffset + j + 3] >>> B_QUERY) & B_QUERY_MASK)); - } - for (int j = 0; j < B_QUERY; j++) { - moveMaskEpi8Byte(v, v1); - for (int k = 0; k < 4; k++) { - quantQueryByte[(B_QUERY - j - 1) * (dimensions / 8) + i / 8 + k] = v1[k]; - v1[k] = 0; - } - for (int k = 0; k < v.length; k += 4) { - v[k] = (byte) (v[k] + v[k]); - v[k + 1] = (byte) (v[k + 1] + v[k + 1]); - v[k + 2] = (byte) (v[k + 2] + v[k + 2]); - v[k + 3] = (byte) (v[k + 3] + v[k + 3]); - } - } - qOffset += 32; - } - } - - private static void moveMaskEpi8Byte(byte[] v, byte[] v1b) { - int m = 0; - for (int k = 0; k < v.length; k++) { - if ((v[k] & 0b10000000) == 0b10000000) { - v1b[m] |= 0b00000001; - } - if (k % 8 == 7) { - m++; - } else { - v1b[m] <<= 1; + public static void transposeHalfByte(byte[] q, byte[] quantQueryByte) { + for (int i = 0; i < q.length;) { + assert q[i] >= 0 && q[i] <= 15; + int lowerByte = 0; + int lowerMiddleByte = 0; + int upperMiddleByte = 0; + int upperByte = 0; + for (int j = 7; j >= 0 && i < q.length; j--) { + lowerByte |= (q[i] & 1) << j; + lowerMiddleByte |= ((q[i] >> 1) & 1) << j; + upperMiddleByte |= ((q[i] >> 2) & 1) << j; + upperByte |= ((q[i] >> 3) & 1) << j; + i++; } + int index = ((i + 7) / 8) - 1; + quantQueryByte[index] = (byte) lowerByte; + quantQueryByte[index + quantQueryByte.length / 4] = (byte) lowerMiddleByte; + quantQueryByte[index + quantQueryByte.length / 2] = (byte) upperMiddleByte; + quantQueryByte[index + 3 * quantQueryByte.length / 4] = (byte) upperByte; } } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/BinaryQuantizer.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/BinaryQuantizer.java index 192fb9092ac3a..aa72904fe1341 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/BinaryQuantizer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/BinaryQuantizer.java @@ -223,9 +223,7 @@ public QueryAndIndexResults quantizeQueryAndIndex(float[] vector, byte[] indexDe // q¯ = Δ · q¯𝑢 + 𝑣𝑙 · 1𝐷 // q¯ is an approximation of q′ (scalar quantized approximation) - // FIXME: vectors need to be padded but that's expensive; update transponseBin to deal - byteQuery = BQVectorUtils.pad(byteQuery, discretizedDimensions); - BQSpaceUtils.transposeBin(byteQuery, discretizedDimensions, queryDestination); + BQSpaceUtils.transposeHalfByte(byteQuery, queryDestination); QueryFactors factors = new QueryFactors(quantResult.quantizedSum, distToC, lower, width, normVmC, vDotC); final float[] indexCorrections; if (similarityFunction == EUCLIDEAN) { @@ -366,9 +364,7 @@ public QueryFactors quantizeForQuery(float[] vector, byte[] destination, float[] // q¯ = Δ · q¯𝑢 + 𝑣𝑙 · 1𝐷 // q¯ is an approximation of q′ (scalar quantized approximation) - // FIXME: vectors need to be padded but that's expensive; update transponseBin to deal - byteQuery = BQVectorUtils.pad(byteQuery, discretizedDimensions); - BQSpaceUtils.transposeBin(byteQuery, discretizedDimensions, destination); + BQSpaceUtils.transposeHalfByte(byteQuery, destination); QueryFactors factors; if (similarityFunction != EUCLIDEAN) { From 8f943a6a7cc948c7ae0c8248c8f53383b01e537d Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Fri, 22 Nov 2024 21:11:28 +0100 Subject: [PATCH 189/386] [Build] Update Gradle wrapper to 8.11.1 (#115886) replace deprecated gradle api fix permission api usage in debian and rpm package creation remove deprecated usage of #ProjectDependency..getDependencyProject() improves gradle configuration cache reading in our weekly benchmark by almost 30% --- .../gradle/wrapper/gradle-wrapper.properties | 4 +- .../internal/ElasticsearchJavadocPlugin.java | 8 +++- .../test/TestWithDependenciesPlugin.java | 35 ++++++++++----- .../test/rest/RestTestBasePlugin.java | 8 +++- .../AbstractCustomJavaToolchainResolver.java | 1 + .../src/main/resources/minimumGradleVersion | 2 +- .../gradle/LazyFileOutputStream.java | 6 +++ .../gradle/plugin/BasePluginBuildPlugin.java | 22 ++++++++- distribution/packages/build.gradle | 16 +++---- gradle/verification-metadata.xml | 45 +++++++++++++++++++ gradle/wrapper/gradle-wrapper.properties | 4 +- .../gradle/wrapper/gradle-wrapper.properties | 4 +- 12 files changed, 125 insertions(+), 30 deletions(-) diff --git a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties index 6acc1431eaec1..22286c90de3d1 100644 --- a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties +++ b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=2ab88d6de2c23e6adae7363ae6e29cbdd2a709e992929b48b6530fd0c7133bd6 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-all.zip +distributionSha256Sum=89d4e70e4e84e2d2dfbb63e4daa53e21b25017cc70c37e4eea31ee51fb15098a +distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavadocPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavadocPlugin.java index 42a44edd7f9a5..dbd8181624f65 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavadocPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavadocPlugin.java @@ -18,6 +18,7 @@ import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.Dependency; import org.gradle.api.artifacts.ProjectDependency; +import org.gradle.api.internal.artifacts.dependencies.ProjectDependencyInternal; import org.gradle.api.plugins.BasePluginExtension; import org.gradle.api.plugins.JavaPlugin; import org.gradle.api.tasks.javadoc.Javadoc; @@ -82,12 +83,15 @@ private void configureJavadocForConfiguration(Project project, boolean shadow, C .sorted(Comparator.comparing(Dependency::getGroup)) .filter(d -> d instanceof ProjectDependency) .map(d -> (ProjectDependency) d) - .filter(p -> p.getDependencyProject() != null) .forEach(projectDependency -> configureDependency(project, shadow, projectDependency)); } private void configureDependency(Project project, boolean shadowed, ProjectDependency dep) { - var upstreamProject = dep.getDependencyProject(); + // we should use variant aware dependency management to resolve artifacts required for javadoc here + Project upstreamProject = project.project(((ProjectDependencyInternal) dep).getIdentityPath().getPath()); + if (upstreamProject == null) { + return; + } if (shadowed) { /* * Include the source of shadowed upstream projects so we don't diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithDependenciesPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithDependenciesPlugin.java index 487fe012a5941..e24b1afa7747b 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithDependenciesPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithDependenciesPlugin.java @@ -10,11 +10,15 @@ package org.elasticsearch.gradle.internal.test; import org.apache.commons.lang.StringUtils; -import org.elasticsearch.gradle.plugin.PluginBuildPlugin; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.artifacts.Configuration; +import org.gradle.api.artifacts.Dependency; import org.gradle.api.artifacts.ProjectDependency; +import org.gradle.api.artifacts.dsl.DependencyHandler; +import org.gradle.api.attributes.Attribute; +import org.gradle.api.attributes.LibraryElements; +import org.gradle.api.internal.artifacts.dependencies.ProjectDependencyInternal; import org.gradle.api.plugins.ExtraPropertiesExtension; import org.gradle.api.tasks.Copy; import org.gradle.api.tasks.SourceSetContainer; @@ -45,23 +49,34 @@ public void apply(final Project project) { Configuration testImplementationConfig = project.getConfigurations().getByName("testImplementation"); testImplementationConfig.getDependencies().all(dep -> { - if (dep instanceof ProjectDependency - && ((ProjectDependency) dep).getDependencyProject().getPlugins().hasPlugin(PluginBuildPlugin.class)) { - project.getGradle() - .projectsEvaluated(gradle -> addPluginResources(project, ((ProjectDependency) dep).getDependencyProject())); + if (dep instanceof ProjectDependency && dep.getGroup().contains("plugin")) { + addPluginResources(project, ((ProjectDependency) dep)); } }); } - private static void addPluginResources(final Project project, final Project pluginProject) { - final File outputDir = new File(project.getBuildDir(), "/generated-test-resources/" + pluginProject.getName()); - String camelProjectName = stream(pluginProject.getName().split("-")).map(t -> StringUtils.capitalize(t)) + private static void addPluginResources(final Project project, final ProjectDependency projectDependency) { + final File outputDir = new File(project.getBuildDir(), "/generated-test-resources/" + projectDependency.getName()); + String camelProjectName = stream(projectDependency.getName().split("-")).map(t -> StringUtils.capitalize(t)) .collect(Collectors.joining()); String taskName = "copy" + camelProjectName + "Metadata"; + String metadataConfiguration = "resolved" + camelProjectName + "Metadata"; + Configuration pluginMetadata = project.getConfigurations().maybeCreate(metadataConfiguration); + pluginMetadata.getAttributes().attribute(Attribute.of("pluginMetadata", Boolean.class), true); + pluginMetadata.getAttributes() + .attribute( + LibraryElements.LIBRARY_ELEMENTS_ATTRIBUTE, + project.getObjects().named(LibraryElements.class, LibraryElements.RESOURCES) + ); + DependencyHandler dependencyHandler = project.getDependencies(); + ProjectDependencyInternal pluginProject = (ProjectDependencyInternal) projectDependency; + + String path = pluginProject.getIdentityPath().getPath(); + Dependency pluginMetadataDependency = dependencyHandler.project(Map.of("path", path)); + dependencyHandler.add(metadataConfiguration, pluginMetadataDependency); project.getTasks().register(taskName, Copy.class, copy -> { copy.into(outputDir); - copy.from(pluginProject.getTasks().named("pluginProperties")); - copy.from(pluginProject.file("src/main/plugin-metadata")); + copy.from(pluginMetadata); }); Map map = Map.of("builtBy", taskName); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java index 548791b9496c2..32a766953f9b8 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java @@ -43,6 +43,7 @@ import org.gradle.api.file.ConfigurableFileCollection; import org.gradle.api.file.FileCollection; import org.gradle.api.file.FileTree; +import org.gradle.api.internal.artifacts.dependencies.ProjectDependencyInternal; import org.gradle.api.provider.ProviderFactory; import org.gradle.api.tasks.ClasspathNormalizer; import org.gradle.api.tasks.PathSensitivity; @@ -251,7 +252,7 @@ private void copyDependencies(Project project, DependencySet dependencies, Confi configuration.getDependencies() .stream() .filter(d -> d instanceof ProjectDependency) - .map(d -> project.getDependencies().project(Map.of("path", ((ProjectDependency) d).getDependencyProject().getPath()))) + .map(d -> project.getDependencies().project(Map.of("path", ((ProjectDependencyInternal) d).getIdentityPath().getPath()))) .forEach(dependencies::add); } @@ -328,8 +329,11 @@ private Configuration createPluginConfiguration(Project project, String name, bo Collection additionalDependencies = new LinkedHashSet<>(); for (Iterator iterator = dependencies.iterator(); iterator.hasNext();) { Dependency dependency = iterator.next(); + // this logic of relying on other projects metadata should probably live in a build service if (dependency instanceof ProjectDependency projectDependency) { - Project dependencyProject = projectDependency.getDependencyProject(); + Project dependencyProject = project.project( + ((ProjectDependencyInternal) projectDependency).getIdentityPath().getPath() + ); List extendedPlugins = dependencyProject.getExtensions() .getByType(PluginPropertiesExtension.class) .getExtendedPlugins(); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AbstractCustomJavaToolchainResolver.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AbstractCustomJavaToolchainResolver.java index ac458a632e818..0c6a6bc26156b 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AbstractCustomJavaToolchainResolver.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AbstractCustomJavaToolchainResolver.java @@ -34,6 +34,7 @@ static String toArchString(Architecture architecture) { case X86_64 -> "x64"; case AARCH64 -> "aarch64"; case X86 -> "x86"; + default -> throw new UnsupportedOperationException("Architecture " + architecture); }; } diff --git a/build-tools-internal/src/main/resources/minimumGradleVersion b/build-tools-internal/src/main/resources/minimumGradleVersion index dd78a707858a7..876e3136ea819 100644 --- a/build-tools-internal/src/main/resources/minimumGradleVersion +++ b/build-tools-internal/src/main/resources/minimumGradleVersion @@ -1 +1 @@ -8.10.2 \ No newline at end of file +8.11.1 \ No newline at end of file diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java b/build-tools/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java index 2f5b110fc59a9..c3da389fc30d4 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java @@ -39,6 +39,12 @@ public void write(byte b[], int off, int len) throws IOException { bootstrap(); delegate.write(b, off, len); } + + @Override + public void write(byte b[]) throws IOException { + bootstrap(); + delegate.write(b); + } }; } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java index b3a792b418384..42e576012c0c9 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java @@ -24,6 +24,8 @@ import org.gradle.api.Task; import org.gradle.api.Transformer; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; +import org.gradle.api.attributes.Attribute; +import org.gradle.api.attributes.LibraryElements; import org.gradle.api.file.CopySpec; import org.gradle.api.file.FileCollection; import org.gradle.api.file.RegularFile; @@ -126,9 +128,27 @@ private TaskProvider createBundleTasks(final Project project, PluginPropert // know about the plugin (used by test security code to statically initialize the plugin in unit tests) var testSourceSet = project.getExtensions().getByType(SourceSetContainer.class).getByName("test"); Map map = Map.of("builtBy", buildProperties); - testSourceSet.getOutput().dir(map, new File(project.getBuildDir(), "generated-resources")); + + File generatedResources = new File(project.getBuildDir(), "generated-resources"); + testSourceSet.getOutput().dir(map, generatedResources); testSourceSet.getResources().srcDir(pluginMetadata); + // expose the plugin properties and metadata for other plugins to use in their tests. + // See TestWithDependenciesPlugin for how this is used. + project.getConfigurations().create("pluginMetadata", conf -> { + conf.getAttributes().attribute(Attribute.of("pluginMetadata", Boolean.class), true); + conf.getAttributes() + .attribute( + LibraryElements.LIBRARY_ELEMENTS_ATTRIBUTE, + project.getObjects().named(LibraryElements.class, LibraryElements.RESOURCES) + ); + }); + + project.getArtifacts().add("pluginMetadata", new File(project.getBuildDir(), "generated-descriptor"), artifact -> { + artifact.builtBy(buildProperties); + }); + project.getArtifacts().add("pluginMetadata", pluginMetadata); + // getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, "plugin-metadata"); var bundleSpec = createBundleSpec(project, pluginMetadata, buildProperties); extension.setBundleSpec(bundleSpec); // create the actual bundle task, which zips up all the files for the plugin diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 486c95d15c7a1..5f45b4b72974f 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -43,7 +43,7 @@ import java.util.regex.Pattern */ plugins { - id "com.netflix.nebula.ospackage-base" version "11.9.1" + id "com.netflix.nebula.ospackage-base" version "11.10.0" } ['deb', 'rpm'].each { type -> @@ -195,7 +195,7 @@ def commonPackageConfig(String type, String architecture) { configurationFile '/etc/elasticsearch/users_roles' from("${packagingFiles}") { dirPermissions { - unix(02750) + unix(0750) } into('/etc') permissionGroup 'elasticsearch' @@ -208,7 +208,7 @@ def commonPackageConfig(String type, String architecture) { from("${packagingFiles}/etc/elasticsearch") { into('/etc/elasticsearch') dirPermissions { - unix(02750) + unix(0750) } setgid = true filePermissions { @@ -260,7 +260,7 @@ def commonPackageConfig(String type, String architecture) { // ========= empty dirs ========= // NOTE: these are created under packagingFiles as empty, but the permissions are set here - Closure copyEmptyDir = { path, u, g, mode -> + Closure copyEmptyDir = { path, u, g, gid, mode -> File file = new File(path) into(file.parent) { from "${packagingFiles}/${file.parent}" @@ -272,12 +272,12 @@ def commonPackageConfig(String type, String architecture) { dirPermissions { unix(mode) } - setgid (mode == 02750) + setgid(gid) } } - copyEmptyDir('/var/log/elasticsearch', 'elasticsearch', 'elasticsearch', 02750) - copyEmptyDir('/var/lib/elasticsearch', 'elasticsearch', 'elasticsearch', 02750) - copyEmptyDir('/usr/share/elasticsearch/plugins', 'root', 'root', 0755) + copyEmptyDir('/var/log/elasticsearch', 'elasticsearch', 'elasticsearch', true, 0750) + copyEmptyDir('/var/lib/elasticsearch', 'elasticsearch', 'elasticsearch', true, 0750) + copyEmptyDir('/usr/share/elasticsearch/plugins', 'root', 'root', false, 0755) // the oss package conflicts with the default distribution and vice versa conflicts('elasticsearch-oss') diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2f465e06a662a..59b4ea0d9eb75 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -944,6 +944,11 @@ + + + + + @@ -3045,6 +3050,11 @@ + + + + + @@ -3060,6 +3070,11 @@ + + + + + @@ -3130,6 +3145,11 @@ + + + + + @@ -3438,6 +3458,11 @@ + + + + + @@ -3453,6 +3478,16 @@ + + + + + + + + + + @@ -3868,6 +3903,11 @@ + + + + + @@ -4623,6 +4663,11 @@ + + + + + diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 6acc1431eaec1..22286c90de3d1 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=2ab88d6de2c23e6adae7363ae6e29cbdd2a709e992929b48b6530fd0c7133bd6 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-all.zip +distributionSha256Sum=89d4e70e4e84e2d2dfbb63e4daa53e21b25017cc70c37e4eea31ee51fb15098a +distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/plugins/examples/gradle/wrapper/gradle-wrapper.properties b/plugins/examples/gradle/wrapper/gradle-wrapper.properties index 6acc1431eaec1..22286c90de3d1 100644 --- a/plugins/examples/gradle/wrapper/gradle-wrapper.properties +++ b/plugins/examples/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=2ab88d6de2c23e6adae7363ae6e29cbdd2a709e992929b48b6530fd0c7133bd6 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-all.zip +distributionSha256Sum=89d4e70e4e84e2d2dfbb63e4daa53e21b25017cc70c37e4eea31ee51fb15098a +distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME From 5d4072d689d518b5c82d666dcb33d7a17529cf5b Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 22 Nov 2024 12:14:48 -0800 Subject: [PATCH 190/386] Fix CCS exchange when multi cluster aliases point to same cluster (#117297) [esql] > Unexpected error from Elasticsearch: illegal_state_exception - sink exchanger for id [ruxoDDxXTGW55oIPHoCT-g:964613010] already exists. This issue occurs when two or more clusterAliases point to the same physical remote cluster. The exchange service assumes the destination is unique, which is not true in this topology. This PR addresses the problem by appending a suffix using a monotonic increasing number, ensuring that different exchanges are created in such cases. Another issue arising from this behavior is that data on a remote cluster is processed multiple times, leading to incorrect results. I can work on the fix for this once we agree that this is an issue. --- docs/changelog/117297.yaml | 5 ++ .../test/AbstractMultiClustersTestCase.java | 29 ++++++++---- .../operator/exchange/ExchangeService.java | 5 ++ .../action/CrossClustersCancellationIT.java | 46 +++++++++++++++++++ .../xpack/esql/action/EsqlActionTaskIT.java | 3 +- .../xpack/esql/plugin/ComputeService.java | 22 ++++++--- 6 files changed, 93 insertions(+), 17 deletions(-) create mode 100644 docs/changelog/117297.yaml diff --git a/docs/changelog/117297.yaml b/docs/changelog/117297.yaml new file mode 100644 index 0000000000000..4a0051bbae644 --- /dev/null +++ b/docs/changelog/117297.yaml @@ -0,0 +1,5 @@ +pr: 117297 +summary: Fix CCS exchange when multi cluster aliases point to same cluster +area: ES|QL +type: bug +issues: [] diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractMultiClustersTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractMultiClustersTestCase.java index 7b18cf575f190..ea82c9d21ab89 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractMultiClustersTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractMultiClustersTestCase.java @@ -17,6 +17,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Strings; import org.elasticsearch.plugins.Plugin; @@ -44,6 +45,7 @@ import static org.elasticsearch.discovery.DiscoveryModule.DISCOVERY_SEED_PROVIDERS_SETTING; import static org.elasticsearch.discovery.SettingsBasedSeedHostsProvider.DISCOVERY_SEED_HOSTS_SETTING; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.not; @@ -149,19 +151,23 @@ public static void stopClusters() throws IOException { } protected void disconnectFromRemoteClusters() throws Exception { - Settings.Builder settings = Settings.builder(); final Set clusterAliases = clusterGroup.clusterAliases(); for (String clusterAlias : clusterAliases) { if (clusterAlias.equals(LOCAL_CLUSTER) == false) { - settings.putNull("cluster.remote." + clusterAlias + ".seeds"); - settings.putNull("cluster.remote." + clusterAlias + ".mode"); - settings.putNull("cluster.remote." + clusterAlias + ".proxy_address"); + removeRemoteCluster(clusterAlias); } } + } + + protected void removeRemoteCluster(String clusterAlias) throws Exception { + Settings.Builder settings = Settings.builder(); + settings.putNull("cluster.remote." + clusterAlias + ".seeds"); + settings.putNull("cluster.remote." + clusterAlias + ".mode"); + settings.putNull("cluster.remote." + clusterAlias + ".proxy_address"); client().admin().cluster().prepareUpdateSettings(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).setPersistentSettings(settings).get(); assertBusy(() -> { for (TransportService transportService : cluster(LOCAL_CLUSTER).getInstances(TransportService.class)) { - assertThat(transportService.getRemoteClusterService().getRegisteredRemoteClusterNames(), empty()); + assertThat(transportService.getRemoteClusterService().getRegisteredRemoteClusterNames(), not(contains(clusterAlias))); } }); } @@ -178,12 +184,17 @@ protected void configureAndConnectsToRemoteClusters() throws Exception { } protected void configureRemoteCluster(String clusterAlias, Collection seedNodes) throws Exception { - final String remoteClusterSettingPrefix = "cluster.remote." + clusterAlias + "."; - Settings.Builder settings = Settings.builder(); - final List seedAddresses = seedNodes.stream().map(node -> { + final var seedAddresses = seedNodes.stream().map(node -> { final TransportService transportService = cluster(clusterAlias).getInstance(TransportService.class, node); - return transportService.boundAddress().publishAddress().toString(); + return transportService.boundAddress().publishAddress(); }).toList(); + configureRemoteClusterWithSeedAddresses(clusterAlias, seedAddresses); + } + + protected void configureRemoteClusterWithSeedAddresses(String clusterAlias, Collection seedNodes) throws Exception { + final String remoteClusterSettingPrefix = "cluster.remote." + clusterAlias + "."; + Settings.Builder settings = Settings.builder(); + final List seedAddresses = seedNodes.stream().map(TransportAddress::toString).toList(); boolean skipUnavailable = skipUnavailableForRemoteClusters().containsKey(clusterAlias) ? skipUnavailableForRemoteClusters().get(clusterAlias) : DEFAULT_SKIP_UNAVAILABLE; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java index 06059944f1310..e6bae7ba385e6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java @@ -40,6 +40,7 @@ import java.io.IOException; import java.util.Map; +import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicLong; @@ -339,6 +340,10 @@ public boolean isEmpty() { return sinks.isEmpty(); } + public Set sinkKeys() { + return sinks.keySet(); + } + @Override protected void doStart() { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java index df6a1e00b0212..c426e0f528eab 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.action; +import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.bulk.BulkRequestBuilder; @@ -15,6 +16,7 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.compute.operator.DriverTaskRunner; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; @@ -27,8 +29,10 @@ import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.test.AbstractMultiClustersTestCase; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.esql.plugin.ComputeService; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.junit.Before; @@ -40,8 +44,10 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; import static org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase.randomPragmas; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; @@ -189,4 +195,44 @@ public void testCancel() throws Exception { Exception error = expectThrows(Exception.class, requestFuture::actionGet); assertThat(error.getMessage(), containsString("proxy timeout")); } + + public void testSameRemoteClusters() throws Exception { + TransportAddress address = cluster(REMOTE_CLUSTER).getInstance(TransportService.class).getLocalNode().getAddress(); + int moreClusters = between(1, 5); + for (int i = 0; i < moreClusters; i++) { + String clusterAlias = REMOTE_CLUSTER + "-" + i; + configureRemoteClusterWithSeedAddresses(clusterAlias, List.of(address)); + } + int numDocs = between(10, 100); + createRemoteIndex(numDocs); + EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); + request.query("FROM *:test | STATS total=sum(const) | LIMIT 1"); + request.pragmas(randomPragmas()); + ActionFuture future = client().execute(EsqlQueryAction.INSTANCE, request); + try { + try { + assertBusy(() -> { + List tasks = client(REMOTE_CLUSTER).admin() + .cluster() + .prepareListTasks() + .setActions(ComputeService.CLUSTER_ACTION_NAME) + .get() + .getTasks(); + assertThat(tasks, hasSize(moreClusters + 1)); + }); + } finally { + PauseFieldPlugin.allowEmitting.countDown(); + } + try (EsqlQueryResponse resp = future.actionGet(30, TimeUnit.SECONDS)) { + // TODO: This produces incorrect results because data on the remote cluster is processed multiple times. + long expectedCount = numDocs * (moreClusters + 1L); + assertThat(getValuesList(resp), equalTo(List.of(List.of(expectedCount)))); + } + } finally { + for (int i = 0; i < moreClusters; i++) { + String clusterAlias = REMOTE_CLUSTER + "-" + i; + removeRemoteCluster(clusterAlias); + } + } + } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index e6db79c7d8abd..460ab0f5b8b38 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -413,7 +413,8 @@ protected void doRun() throws Exception { }); sessionId = foundTasks.get(0).taskId().toString(); assertTrue(fetchingStarted.await(1, TimeUnit.MINUTES)); - ExchangeSinkHandler exchangeSink = exchangeService.getSinkHandler(sessionId); + String exchangeId = exchangeService.sinkKeys().stream().filter(s -> s.startsWith(sessionId)).findFirst().get(); + ExchangeSinkHandler exchangeSink = exchangeService.getSinkHandler(exchangeId); waitedForPages = randomBoolean(); if (waitedForPages) { // do not fail exchange requests until we have some pages diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index fc4c057e52ab6..eeed811674f60 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -82,6 +82,7 @@ import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; import static org.elasticsearch.xpack.esql.plugin.EsqlPlugin.ESQL_WORKER_THREAD_POOL_NAME; @@ -101,6 +102,7 @@ public class ComputeService { private final EnrichLookupService enrichLookupService; private final LookupFromIndexService lookupFromIndexService; private final ClusterService clusterService; + private final AtomicLong childSessionIdGenerator = new AtomicLong(); public ComputeService( SearchService searchService, @@ -167,7 +169,7 @@ public void execute( return; } var computeContext = new ComputeContext( - sessionId, + newChildSession(sessionId), RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, List.of(), configuration, @@ -330,14 +332,15 @@ private void startComputeOnDataNodes( // the new remote exchange sink, and initialize the computation on the target node via data-node-request. for (DataNode node : dataNodeResult.dataNodes()) { var queryPragmas = configuration.pragmas(); + var childSessionId = newChildSession(sessionId); ExchangeService.openExchange( transportService, node.connection, - sessionId, + childSessionId, queryPragmas.exchangeBufferSize(), esqlExecutor, refs.acquire().delegateFailureAndWrap((l, unused) -> { - var remoteSink = exchangeService.newRemoteSink(parentTask, sessionId, transportService, node.connection); + var remoteSink = exchangeService.newRemoteSink(parentTask, childSessionId, transportService, node.connection); exchangeSource.addRemoteSink(remoteSink, queryPragmas.concurrentExchangeClients()); ActionListener computeResponseListener = computeListener.acquireCompute(clusterAlias); var dataNodeListener = ActionListener.runBefore(computeResponseListener, () -> l.onResponse(null)); @@ -345,7 +348,7 @@ private void startComputeOnDataNodes( node.connection, DATA_ACTION_NAME, new DataNodeRequest( - sessionId, + childSessionId, configuration, clusterAlias, node.shardIds, @@ -378,17 +381,18 @@ private void startComputeOnRemoteClusters( var linkExchangeListeners = ActionListener.releaseAfter(computeListener.acquireAvoid(), exchangeSource.addEmptySink()); try (RefCountingListener refs = new RefCountingListener(linkExchangeListeners)) { for (RemoteCluster cluster : clusters) { + final var childSessionId = newChildSession(sessionId); ExchangeService.openExchange( transportService, cluster.connection, - sessionId, + childSessionId, queryPragmas.exchangeBufferSize(), esqlExecutor, refs.acquire().delegateFailureAndWrap((l, unused) -> { - var remoteSink = exchangeService.newRemoteSink(rootTask, sessionId, transportService, cluster.connection); + var remoteSink = exchangeService.newRemoteSink(rootTask, childSessionId, transportService, cluster.connection); exchangeSource.addRemoteSink(remoteSink, queryPragmas.concurrentExchangeClients()); var remotePlan = new RemoteClusterPlan(plan, cluster.concreteIndices, cluster.originalIndices); - var clusterRequest = new ClusterComputeRequest(cluster.clusterAlias, sessionId, configuration, remotePlan); + var clusterRequest = new ClusterComputeRequest(cluster.clusterAlias, childSessionId, configuration, remotePlan); var clusterListener = ActionListener.runBefore( computeListener.acquireCompute(cluster.clusterAlias()), () -> l.onResponse(null) @@ -912,4 +916,8 @@ public List searchExecutionContexts() { return searchContexts.stream().map(ctx -> ctx.getSearchExecutionContext()).toList(); } } + + private String newChildSession(String session) { + return session + "/" + childSessionIdGenerator.incrementAndGet(); + } } From b5c6d927c19993851eda525ac991fa17803e8843 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Fri, 22 Nov 2024 21:30:54 +0100 Subject: [PATCH 191/386] Revert "[Build] Update Gradle wrapper to 8.11.1 (#115886)" This reverts commit 8f943a6a7cc948c7ae0c8248c8f53383b01e537d. breaks serverless build. needs further investigation --- .../gradle/wrapper/gradle-wrapper.properties | 4 +- .../internal/ElasticsearchJavadocPlugin.java | 8 +--- .../test/TestWithDependenciesPlugin.java | 35 +++++---------- .../test/rest/RestTestBasePlugin.java | 8 +--- .../AbstractCustomJavaToolchainResolver.java | 1 - .../src/main/resources/minimumGradleVersion | 2 +- .../gradle/LazyFileOutputStream.java | 6 --- .../gradle/plugin/BasePluginBuildPlugin.java | 22 +-------- distribution/packages/build.gradle | 16 +++---- gradle/verification-metadata.xml | 45 ------------------- gradle/wrapper/gradle-wrapper.properties | 4 +- .../gradle/wrapper/gradle-wrapper.properties | 4 +- 12 files changed, 30 insertions(+), 125 deletions(-) diff --git a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties index 22286c90de3d1..6acc1431eaec1 100644 --- a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties +++ b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=89d4e70e4e84e2d2dfbb63e4daa53e21b25017cc70c37e4eea31ee51fb15098a -distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-all.zip +distributionSha256Sum=2ab88d6de2c23e6adae7363ae6e29cbdd2a709e992929b48b6530fd0c7133bd6 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavadocPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavadocPlugin.java index dbd8181624f65..42a44edd7f9a5 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavadocPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavadocPlugin.java @@ -18,7 +18,6 @@ import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.Dependency; import org.gradle.api.artifacts.ProjectDependency; -import org.gradle.api.internal.artifacts.dependencies.ProjectDependencyInternal; import org.gradle.api.plugins.BasePluginExtension; import org.gradle.api.plugins.JavaPlugin; import org.gradle.api.tasks.javadoc.Javadoc; @@ -83,15 +82,12 @@ private void configureJavadocForConfiguration(Project project, boolean shadow, C .sorted(Comparator.comparing(Dependency::getGroup)) .filter(d -> d instanceof ProjectDependency) .map(d -> (ProjectDependency) d) + .filter(p -> p.getDependencyProject() != null) .forEach(projectDependency -> configureDependency(project, shadow, projectDependency)); } private void configureDependency(Project project, boolean shadowed, ProjectDependency dep) { - // we should use variant aware dependency management to resolve artifacts required for javadoc here - Project upstreamProject = project.project(((ProjectDependencyInternal) dep).getIdentityPath().getPath()); - if (upstreamProject == null) { - return; - } + var upstreamProject = dep.getDependencyProject(); if (shadowed) { /* * Include the source of shadowed upstream projects so we don't diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithDependenciesPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithDependenciesPlugin.java index e24b1afa7747b..487fe012a5941 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithDependenciesPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithDependenciesPlugin.java @@ -10,15 +10,11 @@ package org.elasticsearch.gradle.internal.test; import org.apache.commons.lang.StringUtils; +import org.elasticsearch.gradle.plugin.PluginBuildPlugin; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.artifacts.Configuration; -import org.gradle.api.artifacts.Dependency; import org.gradle.api.artifacts.ProjectDependency; -import org.gradle.api.artifacts.dsl.DependencyHandler; -import org.gradle.api.attributes.Attribute; -import org.gradle.api.attributes.LibraryElements; -import org.gradle.api.internal.artifacts.dependencies.ProjectDependencyInternal; import org.gradle.api.plugins.ExtraPropertiesExtension; import org.gradle.api.tasks.Copy; import org.gradle.api.tasks.SourceSetContainer; @@ -49,34 +45,23 @@ public void apply(final Project project) { Configuration testImplementationConfig = project.getConfigurations().getByName("testImplementation"); testImplementationConfig.getDependencies().all(dep -> { - if (dep instanceof ProjectDependency && dep.getGroup().contains("plugin")) { - addPluginResources(project, ((ProjectDependency) dep)); + if (dep instanceof ProjectDependency + && ((ProjectDependency) dep).getDependencyProject().getPlugins().hasPlugin(PluginBuildPlugin.class)) { + project.getGradle() + .projectsEvaluated(gradle -> addPluginResources(project, ((ProjectDependency) dep).getDependencyProject())); } }); } - private static void addPluginResources(final Project project, final ProjectDependency projectDependency) { - final File outputDir = new File(project.getBuildDir(), "/generated-test-resources/" + projectDependency.getName()); - String camelProjectName = stream(projectDependency.getName().split("-")).map(t -> StringUtils.capitalize(t)) + private static void addPluginResources(final Project project, final Project pluginProject) { + final File outputDir = new File(project.getBuildDir(), "/generated-test-resources/" + pluginProject.getName()); + String camelProjectName = stream(pluginProject.getName().split("-")).map(t -> StringUtils.capitalize(t)) .collect(Collectors.joining()); String taskName = "copy" + camelProjectName + "Metadata"; - String metadataConfiguration = "resolved" + camelProjectName + "Metadata"; - Configuration pluginMetadata = project.getConfigurations().maybeCreate(metadataConfiguration); - pluginMetadata.getAttributes().attribute(Attribute.of("pluginMetadata", Boolean.class), true); - pluginMetadata.getAttributes() - .attribute( - LibraryElements.LIBRARY_ELEMENTS_ATTRIBUTE, - project.getObjects().named(LibraryElements.class, LibraryElements.RESOURCES) - ); - DependencyHandler dependencyHandler = project.getDependencies(); - ProjectDependencyInternal pluginProject = (ProjectDependencyInternal) projectDependency; - - String path = pluginProject.getIdentityPath().getPath(); - Dependency pluginMetadataDependency = dependencyHandler.project(Map.of("path", path)); - dependencyHandler.add(metadataConfiguration, pluginMetadataDependency); project.getTasks().register(taskName, Copy.class, copy -> { copy.into(outputDir); - copy.from(pluginMetadata); + copy.from(pluginProject.getTasks().named("pluginProperties")); + copy.from(pluginProject.file("src/main/plugin-metadata")); }); Map map = Map.of("builtBy", taskName); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java index 32a766953f9b8..548791b9496c2 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java @@ -43,7 +43,6 @@ import org.gradle.api.file.ConfigurableFileCollection; import org.gradle.api.file.FileCollection; import org.gradle.api.file.FileTree; -import org.gradle.api.internal.artifacts.dependencies.ProjectDependencyInternal; import org.gradle.api.provider.ProviderFactory; import org.gradle.api.tasks.ClasspathNormalizer; import org.gradle.api.tasks.PathSensitivity; @@ -252,7 +251,7 @@ private void copyDependencies(Project project, DependencySet dependencies, Confi configuration.getDependencies() .stream() .filter(d -> d instanceof ProjectDependency) - .map(d -> project.getDependencies().project(Map.of("path", ((ProjectDependencyInternal) d).getIdentityPath().getPath()))) + .map(d -> project.getDependencies().project(Map.of("path", ((ProjectDependency) d).getDependencyProject().getPath()))) .forEach(dependencies::add); } @@ -329,11 +328,8 @@ private Configuration createPluginConfiguration(Project project, String name, bo Collection additionalDependencies = new LinkedHashSet<>(); for (Iterator iterator = dependencies.iterator(); iterator.hasNext();) { Dependency dependency = iterator.next(); - // this logic of relying on other projects metadata should probably live in a build service if (dependency instanceof ProjectDependency projectDependency) { - Project dependencyProject = project.project( - ((ProjectDependencyInternal) projectDependency).getIdentityPath().getPath() - ); + Project dependencyProject = projectDependency.getDependencyProject(); List extendedPlugins = dependencyProject.getExtensions() .getByType(PluginPropertiesExtension.class) .getExtendedPlugins(); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AbstractCustomJavaToolchainResolver.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AbstractCustomJavaToolchainResolver.java index 0c6a6bc26156b..ac458a632e818 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AbstractCustomJavaToolchainResolver.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AbstractCustomJavaToolchainResolver.java @@ -34,7 +34,6 @@ static String toArchString(Architecture architecture) { case X86_64 -> "x64"; case AARCH64 -> "aarch64"; case X86 -> "x86"; - default -> throw new UnsupportedOperationException("Architecture " + architecture); }; } diff --git a/build-tools-internal/src/main/resources/minimumGradleVersion b/build-tools-internal/src/main/resources/minimumGradleVersion index 876e3136ea819..dd78a707858a7 100644 --- a/build-tools-internal/src/main/resources/minimumGradleVersion +++ b/build-tools-internal/src/main/resources/minimumGradleVersion @@ -1 +1 @@ -8.11.1 \ No newline at end of file +8.10.2 \ No newline at end of file diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java b/build-tools/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java index c3da389fc30d4..2f5b110fc59a9 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java @@ -39,12 +39,6 @@ public void write(byte b[], int off, int len) throws IOException { bootstrap(); delegate.write(b, off, len); } - - @Override - public void write(byte b[]) throws IOException { - bootstrap(); - delegate.write(b); - } }; } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java index 42e576012c0c9..b3a792b418384 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java @@ -24,8 +24,6 @@ import org.gradle.api.Task; import org.gradle.api.Transformer; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; -import org.gradle.api.attributes.Attribute; -import org.gradle.api.attributes.LibraryElements; import org.gradle.api.file.CopySpec; import org.gradle.api.file.FileCollection; import org.gradle.api.file.RegularFile; @@ -128,27 +126,9 @@ private TaskProvider createBundleTasks(final Project project, PluginPropert // know about the plugin (used by test security code to statically initialize the plugin in unit tests) var testSourceSet = project.getExtensions().getByType(SourceSetContainer.class).getByName("test"); Map map = Map.of("builtBy", buildProperties); - - File generatedResources = new File(project.getBuildDir(), "generated-resources"); - testSourceSet.getOutput().dir(map, generatedResources); + testSourceSet.getOutput().dir(map, new File(project.getBuildDir(), "generated-resources")); testSourceSet.getResources().srcDir(pluginMetadata); - // expose the plugin properties and metadata for other plugins to use in their tests. - // See TestWithDependenciesPlugin for how this is used. - project.getConfigurations().create("pluginMetadata", conf -> { - conf.getAttributes().attribute(Attribute.of("pluginMetadata", Boolean.class), true); - conf.getAttributes() - .attribute( - LibraryElements.LIBRARY_ELEMENTS_ATTRIBUTE, - project.getObjects().named(LibraryElements.class, LibraryElements.RESOURCES) - ); - }); - - project.getArtifacts().add("pluginMetadata", new File(project.getBuildDir(), "generated-descriptor"), artifact -> { - artifact.builtBy(buildProperties); - }); - project.getArtifacts().add("pluginMetadata", pluginMetadata); - // getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, "plugin-metadata"); var bundleSpec = createBundleSpec(project, pluginMetadata, buildProperties); extension.setBundleSpec(bundleSpec); // create the actual bundle task, which zips up all the files for the plugin diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 5f45b4b72974f..486c95d15c7a1 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -43,7 +43,7 @@ import java.util.regex.Pattern */ plugins { - id "com.netflix.nebula.ospackage-base" version "11.10.0" + id "com.netflix.nebula.ospackage-base" version "11.9.1" } ['deb', 'rpm'].each { type -> @@ -195,7 +195,7 @@ def commonPackageConfig(String type, String architecture) { configurationFile '/etc/elasticsearch/users_roles' from("${packagingFiles}") { dirPermissions { - unix(0750) + unix(02750) } into('/etc') permissionGroup 'elasticsearch' @@ -208,7 +208,7 @@ def commonPackageConfig(String type, String architecture) { from("${packagingFiles}/etc/elasticsearch") { into('/etc/elasticsearch') dirPermissions { - unix(0750) + unix(02750) } setgid = true filePermissions { @@ -260,7 +260,7 @@ def commonPackageConfig(String type, String architecture) { // ========= empty dirs ========= // NOTE: these are created under packagingFiles as empty, but the permissions are set here - Closure copyEmptyDir = { path, u, g, gid, mode -> + Closure copyEmptyDir = { path, u, g, mode -> File file = new File(path) into(file.parent) { from "${packagingFiles}/${file.parent}" @@ -272,12 +272,12 @@ def commonPackageConfig(String type, String architecture) { dirPermissions { unix(mode) } - setgid(gid) + setgid (mode == 02750) } } - copyEmptyDir('/var/log/elasticsearch', 'elasticsearch', 'elasticsearch', true, 0750) - copyEmptyDir('/var/lib/elasticsearch', 'elasticsearch', 'elasticsearch', true, 0750) - copyEmptyDir('/usr/share/elasticsearch/plugins', 'root', 'root', false, 0755) + copyEmptyDir('/var/log/elasticsearch', 'elasticsearch', 'elasticsearch', 02750) + copyEmptyDir('/var/lib/elasticsearch', 'elasticsearch', 'elasticsearch', 02750) + copyEmptyDir('/usr/share/elasticsearch/plugins', 'root', 'root', 0755) // the oss package conflicts with the default distribution and vice versa conflicts('elasticsearch-oss') diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 59b4ea0d9eb75..2f465e06a662a 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -944,11 +944,6 @@ - - - - - @@ -3050,11 +3045,6 @@ - - - - - @@ -3070,11 +3060,6 @@ - - - - - @@ -3145,11 +3130,6 @@ - - - - - @@ -3458,11 +3438,6 @@ - - - - - @@ -3478,16 +3453,6 @@ - - - - - - - - - - @@ -3903,11 +3868,6 @@ - - - - - @@ -4663,11 +4623,6 @@ - - - - - diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 22286c90de3d1..6acc1431eaec1 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=89d4e70e4e84e2d2dfbb63e4daa53e21b25017cc70c37e4eea31ee51fb15098a -distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-all.zip +distributionSha256Sum=2ab88d6de2c23e6adae7363ae6e29cbdd2a709e992929b48b6530fd0c7133bd6 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/plugins/examples/gradle/wrapper/gradle-wrapper.properties b/plugins/examples/gradle/wrapper/gradle-wrapper.properties index 22286c90de3d1..6acc1431eaec1 100644 --- a/plugins/examples/gradle/wrapper/gradle-wrapper.properties +++ b/plugins/examples/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=89d4e70e4e84e2d2dfbb63e4daa53e21b25017cc70c37e4eea31ee51fb15098a -distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-all.zip +distributionSha256Sum=2ab88d6de2c23e6adae7363ae6e29cbdd2a709e992929b48b6530fd0c7133bd6 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME From 10e6360cd0f1925c3878928d94f4b6855ffe73df Mon Sep 17 00:00:00 2001 From: Stanislav Malyshev Date: Fri, 22 Nov 2024 13:51:08 -0700 Subject: [PATCH 192/386] Unmute test - should be fixed now (#117382) --- muted-tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index c4af8bf1c0d21..26c7ce6ccf01f 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -1,7 +1,4 @@ tests: -- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT - method: test {yaml=reference/esql/esql-async-query-api/line_17} - issue: https://github.com/elastic/elasticsearch/issues/109260 - class: "org.elasticsearch.client.RestClientSingleHostIntegTests" issue: "https://github.com/elastic/elasticsearch/issues/102717" method: "testRequestResetAndAbort" From 129e17455121838a508b5dba6d6273ba0a57e017 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 22 Nov 2024 13:24:50 -0800 Subject: [PATCH 193/386] Fix entitlement tools to build (#117351) This commit adjusts the common lib of entitlement tools to use elasticsearch.build so that it gets java version configuration automatically. Additionally the mrjar plugin is removed from the core lib since it is not used there. --- libs/core/build.gradle | 1 - libs/entitlement/tools/common/build.gradle | 9 ++++----- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/libs/core/build.gradle b/libs/core/build.gradle index e24417e09a53d..99c22620e7354 100644 --- a/libs/core/build.gradle +++ b/libs/core/build.gradle @@ -8,7 +8,6 @@ */ apply plugin: 'elasticsearch.publish' -apply plugin: 'elasticsearch.mrjar' dependencies { // This dependency is used only by :libs:core for null-checking interop with other tools diff --git a/libs/entitlement/tools/common/build.gradle b/libs/entitlement/tools/common/build.gradle index 3373a8f747430..89772b4132c5f 100644 --- a/libs/entitlement/tools/common/build.gradle +++ b/libs/entitlement/tools/common/build.gradle @@ -7,9 +7,8 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -plugins { - id 'java' -} - -group = 'org.elasticsearch.entitlement.tools' +apply plugin: 'elasticsearch.build' +tasks.named('forbiddenApisMain').configure { + replaceSignatureFiles 'jdk-signatures' +} From e4d05120fc2ba15e3e73f51d770e03eb28dc6351 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 22 Nov 2024 13:42:21 -0800 Subject: [PATCH 194/386] Fix leftover exchange in ManyShardsIT (#117309) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In the ManyShardsIT#testRejection test, we intercept exchange requests and fail them with EsRejectedExecutionException, verifying that we return a 400 response instead of a 500. The issue with the current test is that if a data-node request never arrives because the whole request was canceled after the exchange request failed—the leftover exchange sink remains until it times out, which defaults to 5 minutes. This change adjusts the test to use a single data node and ensures exchange requests are only failed after the data-node request has arrived. Closes #112406 Closes #112418 Closes #112424 --- muted-tests.yml | 6 -- .../xpack/esql/action/ManyShardsIT.java | 97 ++++++++++++++----- 2 files changed, 72 insertions(+), 31 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 26c7ce6ccf01f..cd656412b5025 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -11,12 +11,6 @@ tests: - class: org.elasticsearch.smoketest.WatcherYamlRestIT method: test {p0=watcher/usage/10_basic/Test watcher usage stats output} issue: https://github.com/elastic/elasticsearch/issues/112189 -- class: org.elasticsearch.xpack.esql.action.ManyShardsIT - method: testRejection - issue: https://github.com/elastic/elasticsearch/issues/112406 -- class: org.elasticsearch.xpack.esql.action.ManyShardsIT - method: testConcurrentQueries - issue: https://github.com/elastic/elasticsearch/issues/112424 - class: org.elasticsearch.ingest.geoip.IngestGeoIpClientYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/111497 - class: org.elasticsearch.packaging.test.PackagesSecurityAutoConfigurationTests diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/ManyShardsIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/ManyShardsIT.java index 1ce92ded8acc6..c52e1b538972b 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/ManyShardsIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/ManyShardsIT.java @@ -14,9 +14,13 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.compute.operator.exchange.ExchangeService; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.MockSearchService; @@ -26,6 +30,7 @@ import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.esql.plugin.ComputeService; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.hamcrest.Matchers; import org.junit.Before; @@ -56,6 +61,18 @@ protected Collection> getMockPlugins() { return plugins; } + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopy(super.nodePlugins(), InternalExchangePlugin.class); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, TimeValue.timeValueMillis(between(3000, 5000))) + .build(); + } + @Before public void setupIndices() { int numIndices = between(10, 20); @@ -113,32 +130,64 @@ public void testConcurrentQueries() throws Exception { } public void testRejection() throws Exception { - String[] nodes = internalCluster().getNodeNames(); - for (String node : nodes) { - MockTransportService ts = (MockTransportService) internalCluster().getInstance(TransportService.class, node); - ts.addRequestHandlingBehavior(ExchangeService.EXCHANGE_ACTION_NAME, (handler, request, channel, task) -> { - handler.messageReceived(request, new TransportChannel() { - @Override - public String getProfileName() { - return channel.getProfileName(); - } - - @Override - public void sendResponse(TransportResponse response) { - channel.sendResponse(new RemoteTransportException("simulated", new EsRejectedExecutionException("test queue"))); - } - - @Override - public void sendResponse(Exception exception) { - channel.sendResponse(exception); - } - }, task); + DiscoveryNode dataNode = randomFrom(internalCluster().clusterService().state().nodes().getDataNodes().values()); + String indexName = "single-node-index"; + client().admin() + .indices() + .prepareCreate(indexName) + .setSettings( + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.routing.allocation.require._name", dataNode.getName()) + ) + .setMapping("user", "type=keyword", "tags", "type=keyword") + .get(); + client().prepareIndex(indexName) + .setSource("user", "u1", "tags", "lucene") + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); + + MockTransportService ts = (MockTransportService) internalCluster().getInstance(TransportService.class, dataNode.getName()); + CountDownLatch dataNodeRequestLatch = new CountDownLatch(1); + ts.addRequestHandlingBehavior(ComputeService.DATA_ACTION_NAME, (handler, request, channel, task) -> { + handler.messageReceived(request, channel, task); + dataNodeRequestLatch.countDown(); + }); + + ts.addRequestHandlingBehavior(ExchangeService.EXCHANGE_ACTION_NAME, (handler, request, channel, task) -> { + ts.getThreadPool().generic().execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + channel.sendResponse(e); + } + + @Override + protected void doRun() throws Exception { + assertTrue(dataNodeRequestLatch.await(30, TimeUnit.SECONDS)); + handler.messageReceived(request, new TransportChannel() { + @Override + public String getProfileName() { + return channel.getProfileName(); + } + + @Override + public void sendResponse(TransportResponse response) { + channel.sendResponse(new RemoteTransportException("simulated", new EsRejectedExecutionException("test queue"))); + } + + @Override + public void sendResponse(Exception exception) { + channel.sendResponse(exception); + } + }, task); + } }); - } + }); + try { AtomicReference failure = new AtomicReference<>(); EsqlQueryRequest request = new EsqlQueryRequest(); - request.query("from test-* | stats count(user) by tags"); + request.query("from single-node-index | stats count(user) by tags"); request.acceptedPragmaRisks(true); request.pragmas(randomPragmas()); CountDownLatch queryLatch = new CountDownLatch(1); @@ -151,9 +200,7 @@ public void sendResponse(Exception exception) { assertThat(ExceptionsHelper.status(failure.get()), equalTo(RestStatus.TOO_MANY_REQUESTS)); assertThat(failure.get().getMessage(), equalTo("test queue")); } finally { - for (String node : nodes) { - ((MockTransportService) internalCluster().getInstance(TransportService.class, node)).clearAllRules(); - } + ts.clearAllRules(); } } From f13c1ee86775467ecbd48bd4bde5cd5e87dcf9ea Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Fri, 22 Nov 2024 15:57:36 -0600 Subject: [PATCH 195/386] bump hadoop hdfs to 3.4.1 (#117263) This commit bump hadoop hdfs to 3.4.1 (for repository-hdfs) . --- gradle/verification-metadata.xml | 33 +++++++++++-------- plugins/repository-hdfs/build.gradle | 16 +++++---- .../hdfs/HdfsSecurityContext.java | 3 +- 3 files changed, 30 insertions(+), 22 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2f465e06a662a..4cfd329ba728e 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -1201,6 +1201,11 @@ + + + + + @@ -1256,16 +1261,16 @@ - - - - - + + + + + @@ -2372,14 +2377,14 @@ - - - + + + - - - + + + @@ -2414,9 +2419,9 @@ - - - + + + diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 6c2dc56b17eb2..4da7c24de80f1 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -19,7 +19,7 @@ esplugin { } versions << [ - 'hadoop': '3.3.3' + 'hadoop': '3.4.1' ] configurations { @@ -41,9 +41,9 @@ dependencies { api "com.google.protobuf:protobuf-java:${versions.protobuf}" api "commons-logging:commons-logging:${versions.commonslogging}" api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" - api 'commons-cli:commons-cli:1.2' + api 'commons-cli:commons-cli:1.5.0' api "commons-codec:commons-codec:${versions.commonscodec}" - api 'commons-io:commons-io:2.8.0' + api 'commons-io:commons-io:2.16.1' api 'org.apache.commons:commons-lang3:3.11' api 'javax.servlet:javax.servlet-api:3.1.0' api "org.slf4j:slf4j-api:${versions.slf4j}" @@ -57,14 +57,14 @@ dependencies { javaRestTestImplementation project(':test:fixtures:krb5kdc-fixture') javaRestTestImplementation "org.slf4j:slf4j-api:${versions.slf4j}" javaRestTestRuntimeOnly "com.google.guava:guava:16.0.1" - javaRestTestRuntimeOnly "commons-cli:commons-cli:1.2" + javaRestTestRuntimeOnly "commons-cli:commons-cli:1.5.0" javaRestTestRuntimeOnly "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" yamlRestTestCompileOnly(project(':test:fixtures:hdfs-fixture')) yamlRestTestImplementation project(':test:fixtures:krb5kdc-fixture') yamlRestTestImplementation "org.slf4j:slf4j-api:${versions.slf4j}" yamlRestTestRuntimeOnly "com.google.guava:guava:16.0.1" - yamlRestTestRuntimeOnly "commons-cli:commons-cli:1.2" + yamlRestTestRuntimeOnly "commons-cli:commons-cli:1.5.0" yamlRestTestRuntimeOnly "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" hdfsFixture2 project(path: ':test:fixtures:hdfs-fixture', configuration: 'shadowedHdfs2') @@ -177,7 +177,6 @@ tasks.named("thirdPartyAudit").configure { 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', - 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64', 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64$1', 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64$Cell', @@ -188,6 +187,9 @@ tasks.named("thirdPartyAudit").configure { 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil', 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$1', 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$JvmMemoryAccessor', - 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$MemoryAccessor' + 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$MemoryAccessor', + 'org.apache.hadoop.thirdparty.protobuf.MessageSchema', + 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$Android32MemoryAccessor', + 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$Android64MemoryAccessor' ) } diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsSecurityContext.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsSecurityContext.java index 98aa9951172ba..ce6acd79a0bb9 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsSecurityContext.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsSecurityContext.java @@ -47,7 +47,8 @@ class HdfsSecurityContext { // 2) allow hadoop to add credentials to our Subject new AuthPermission("modifyPrivateCredentials"), // 3) RPC Engine requires this for re-establishing pooled connections over the lifetime of the client - new PrivateCredentialPermission("org.apache.hadoop.security.Credentials * \"*\"", "read") }; + new PrivateCredentialPermission("org.apache.hadoop.security.Credentials * \"*\"", "read"), + new RuntimePermission("getClassLoader") }; // If Security is enabled, we need all the following elevated permissions: KERBEROS_AUTH_PERMISSIONS = new Permission[] { From de1390708a74387d4a6b141e20cca1ce6d73b325 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 23 Nov 2024 09:32:36 +1100 Subject: [PATCH 196/386] Mute org.elasticsearch.xpack.esql.action.EsqlActionTaskIT testCancelRequestWhenFailingFetchingPages #117397 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index cd656412b5025..d086d41a15edd 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -232,6 +232,9 @@ tests: - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testInferDeploysDefaultElser issue: https://github.com/elastic/elasticsearch/issues/114913 +- class: org.elasticsearch.xpack.esql.action.EsqlActionTaskIT + method: testCancelRequestWhenFailingFetchingPages + issue: https://github.com/elastic/elasticsearch/issues/117397 # Examples: # From 7e801e0410a9515cb7168f8a5141c0b4713ea443 Mon Sep 17 00:00:00 2001 From: Larisa Motova Date: Fri, 22 Nov 2024 12:33:46 -1000 Subject: [PATCH 197/386] [ES|QL] Add a standard deviation function (#116531) Uses Welford's online algorithm, as well as the parallel version, to calculate standard deviation. --- docs/changelog/116531.yaml | 5 + .../functions/aggregation-functions.asciidoc | 2 + .../functions/description/std_dev.asciidoc | 5 + .../esql/functions/examples/std_dev.asciidoc | 22 ++ .../functions/kibana/definition/std_dev.json | 50 ++++ .../esql/functions/kibana/docs/std_dev.md | 11 + .../esql/functions/layout/std_dev.asciidoc | 15 ++ .../functions/parameters/std_dev.asciidoc | 6 + .../esql/functions/signature/std_dev.svg | 1 + .../esql/functions/types/std_dev.asciidoc | 11 + x-pack/plugin/esql/compute/build.gradle | 21 ++ .../aggregation/StdDevDoubleAggregator.java | 66 +++++ .../aggregation/StdDevFloatAggregator.java | 66 +++++ .../aggregation/StdDevIntAggregator.java | 66 +++++ .../aggregation/StdDevLongAggregator.java | 66 +++++ .../StdDevDoubleAggregatorFunction.java | 178 ++++++++++++++ ...tdDevDoubleAggregatorFunctionSupplier.java | 38 +++ ...tdDevDoubleGroupingAggregatorFunction.java | 223 +++++++++++++++++ .../StdDevFloatAggregatorFunction.java | 180 ++++++++++++++ ...StdDevFloatAggregatorFunctionSupplier.java | 38 +++ ...StdDevFloatGroupingAggregatorFunction.java | 225 ++++++++++++++++++ .../StdDevIntAggregatorFunction.java | 180 ++++++++++++++ .../StdDevIntAggregatorFunctionSupplier.java | 38 +++ .../StdDevIntGroupingAggregatorFunction.java | 223 +++++++++++++++++ .../StdDevLongAggregatorFunction.java | 178 ++++++++++++++ .../StdDevLongAggregatorFunctionSupplier.java | 38 +++ .../StdDevLongGroupingAggregatorFunction.java | 223 +++++++++++++++++ .../compute/aggregation/StdDevStates.java | 211 ++++++++++++++++ .../compute/aggregation/WelfordAlgorithm.java | 79 ++++++ .../aggregation/X-StdDevAggregator.java.st | 66 +++++ .../src/main/resources/stats.csv-spec | 140 +++++++++++ .../xpack/esql/action/EsqlCapabilities.java | 5 + .../function/EsqlFunctionRegistry.java | 2 + .../aggregate/AggregateWritables.java | 1 + .../expression/function/aggregate/StdDev.java | 112 +++++++++ .../xpack/esql/planner/AggregateMapper.java | 7 +- .../function/aggregate/StdDevTests.java | 73 ++++++ .../rest-api-spec/test/esql/60_usage.yml | 4 +- 38 files changed, 2869 insertions(+), 6 deletions(-) create mode 100644 docs/changelog/116531.yaml create mode 100644 docs/reference/esql/functions/description/std_dev.asciidoc create mode 100644 docs/reference/esql/functions/examples/std_dev.asciidoc create mode 100644 docs/reference/esql/functions/kibana/definition/std_dev.json create mode 100644 docs/reference/esql/functions/kibana/docs/std_dev.md create mode 100644 docs/reference/esql/functions/layout/std_dev.asciidoc create mode 100644 docs/reference/esql/functions/parameters/std_dev.asciidoc create mode 100644 docs/reference/esql/functions/signature/std_dev.svg create mode 100644 docs/reference/esql/functions/types/std_dev.asciidoc create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevDoubleAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevFloatAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevIntAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevLongAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/StdDevStates.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/WelfordAlgorithm.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-StdDevAggregator.java.st create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDev.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java diff --git a/docs/changelog/116531.yaml b/docs/changelog/116531.yaml new file mode 100644 index 0000000000000..908bbff487973 --- /dev/null +++ b/docs/changelog/116531.yaml @@ -0,0 +1,5 @@ +pr: 116531 +summary: "Add a standard deviation aggregating function: STD_DEV" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index 3a27e1944a684..c2c2508ad5de2 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -17,6 +17,7 @@ The <> command supports these aggregate functions: * <> * <> * experimental:[] <> +* <> * <> * <> * <> @@ -32,6 +33,7 @@ include::layout/median_absolute_deviation.asciidoc[] include::layout/min.asciidoc[] include::layout/percentile.asciidoc[] include::layout/st_centroid_agg.asciidoc[] +include::layout/std_dev.asciidoc[] include::layout/sum.asciidoc[] include::layout/top.asciidoc[] include::layout/values.asciidoc[] diff --git a/docs/reference/esql/functions/description/std_dev.asciidoc b/docs/reference/esql/functions/description/std_dev.asciidoc new file mode 100644 index 0000000000000..b78ddd7dbba13 --- /dev/null +++ b/docs/reference/esql/functions/description/std_dev.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +The standard deviation of a numeric field. diff --git a/docs/reference/esql/functions/examples/std_dev.asciidoc b/docs/reference/esql/functions/examples/std_dev.asciidoc new file mode 100644 index 0000000000000..2e6dc996aae9a --- /dev/null +++ b/docs/reference/esql/functions/examples/std_dev.asciidoc @@ -0,0 +1,22 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Examples* + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats.csv-spec[tag=stdev] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=stdev-result] +|=== +The expression can use inline functions. For example, to calculate the standard deviation of each employee's maximum salary changes, first use `MV_MAX` on each row, and then use `STD_DEV` on the result +[source.merge.styled,esql] +---- +include::{esql-specs}/stats.csv-spec[tag=docsStatsStdDevNestedExpression] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=docsStatsStdDevNestedExpression-result] +|=== + diff --git a/docs/reference/esql/functions/kibana/definition/std_dev.json b/docs/reference/esql/functions/kibana/definition/std_dev.json new file mode 100644 index 0000000000000..f31d3345421d9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/std_dev.json @@ -0,0 +1,50 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "agg", + "name" : "std_dev", + "description" : "The standard deviation of a numeric field.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "FROM employees\n| STATS STD_DEV(height)", + "FROM employees\n| STATS stddev_salary_change = STD_DEV(MV_MAX(salary_change))" + ], + "preview" : false, + "snapshot_only" : false +} diff --git a/docs/reference/esql/functions/kibana/docs/std_dev.md b/docs/reference/esql/functions/kibana/docs/std_dev.md new file mode 100644 index 0000000000000..a6afca7b8f6b3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/std_dev.md @@ -0,0 +1,11 @@ + + +### STD_DEV +The standard deviation of a numeric field. + +``` +FROM employees +| STATS STD_DEV(height) +``` diff --git a/docs/reference/esql/functions/layout/std_dev.asciidoc b/docs/reference/esql/functions/layout/std_dev.asciidoc new file mode 100644 index 0000000000000..a7a34b1331d17 --- /dev/null +++ b/docs/reference/esql/functions/layout/std_dev.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-std_dev]] +=== `STD_DEV` + +*Syntax* + +[.text-center] +image::esql/functions/signature/std_dev.svg[Embedded,opts=inline] + +include::../parameters/std_dev.asciidoc[] +include::../description/std_dev.asciidoc[] +include::../types/std_dev.asciidoc[] +include::../examples/std_dev.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/std_dev.asciidoc b/docs/reference/esql/functions/parameters/std_dev.asciidoc new file mode 100644 index 0000000000000..91c56709d182a --- /dev/null +++ b/docs/reference/esql/functions/parameters/std_dev.asciidoc @@ -0,0 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`number`:: + diff --git a/docs/reference/esql/functions/signature/std_dev.svg b/docs/reference/esql/functions/signature/std_dev.svg new file mode 100644 index 0000000000000..606d285154f59 --- /dev/null +++ b/docs/reference/esql/functions/signature/std_dev.svg @@ -0,0 +1 @@ +STD_DEV(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/std_dev.asciidoc b/docs/reference/esql/functions/types/std_dev.asciidoc new file mode 100644 index 0000000000000..273dae4af76c2 --- /dev/null +++ b/docs/reference/esql/functions/types/std_dev.asciidoc @@ -0,0 +1,11 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +number | result +double | double +integer | double +long | double +|=== diff --git a/x-pack/plugin/esql/compute/build.gradle b/x-pack/plugin/esql/compute/build.gradle index 3deac4925c951..609c778df5929 100644 --- a/x-pack/plugin/esql/compute/build.gradle +++ b/x-pack/plugin/esql/compute/build.gradle @@ -608,6 +608,27 @@ tasks.named('stringTemplates').configure { it.outputFile = "org/elasticsearch/compute/aggregation/RateDoubleAggregator.java" } + File stdDevAggregatorInputFile = file("src/main/java/org/elasticsearch/compute/aggregation/X-StdDevAggregator.java.st") + template { + it.properties = intProperties + it.inputFile = stdDevAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/StdDevIntAggregator.java" + } + template { + it.properties = longProperties + it.inputFile = stdDevAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/StdDevLongAggregator.java" + } + template { + it.properties = floatProperties + it.inputFile = stdDevAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/StdDevFloatAggregator.java" + } + template { + it.properties = doubleProperties + it.inputFile = stdDevAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/StdDevDoubleAggregator.java" + } File topAggregatorInputFile = new File("${projectDir}/src/main/java/org/elasticsearch/compute/aggregation/X-TopAggregator.java.st") template { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevDoubleAggregator.java new file mode 100644 index 0000000000000..3a1185d34fa23 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevDoubleAggregator.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * A standard deviation aggregation definition for double. + * This class is generated. Edit `X-StdDevAggregator.java.st` instead. + */ +@Aggregator( + { + @IntermediateState(name = "mean", type = "DOUBLE"), + @IntermediateState(name = "m2", type = "DOUBLE"), + @IntermediateState(name = "count", type = "LONG") } +) +@GroupingAggregator +public class StdDevDoubleAggregator { + + public static StdDevStates.SingleState initSingle() { + return new StdDevStates.SingleState(); + } + + public static void combine(StdDevStates.SingleState state, double value) { + state.add(value); + } + + public static void combineIntermediate(StdDevStates.SingleState state, double mean, double m2, long count) { + state.combine(mean, m2, count); + } + + public static Block evaluateFinal(StdDevStates.SingleState state, DriverContext driverContext) { + return state.evaluateFinal(driverContext); + } + + public static StdDevStates.GroupingState initGrouping(BigArrays bigArrays) { + return new StdDevStates.GroupingState(bigArrays); + } + + public static void combine(StdDevStates.GroupingState current, int groupId, double value) { + current.add(groupId, value); + } + + public static void combineStates(StdDevStates.GroupingState current, int groupId, StdDevStates.GroupingState state, int statePosition) { + current.combine(groupId, state.getOrNull(statePosition)); + } + + public static void combineIntermediate(StdDevStates.GroupingState state, int groupId, double mean, double m2, long count) { + state.combine(groupId, mean, m2, count); + } + + public static Block evaluateFinal(StdDevStates.GroupingState state, IntVector selected, DriverContext driverContext) { + return state.evaluateFinal(selected, driverContext); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevFloatAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevFloatAggregator.java new file mode 100644 index 0000000000000..51c22e7e29c1e --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevFloatAggregator.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * A standard deviation aggregation definition for float. + * This class is generated. Edit `X-StdDevAggregator.java.st` instead. + */ +@Aggregator( + { + @IntermediateState(name = "mean", type = "DOUBLE"), + @IntermediateState(name = "m2", type = "DOUBLE"), + @IntermediateState(name = "count", type = "LONG") } +) +@GroupingAggregator +public class StdDevFloatAggregator { + + public static StdDevStates.SingleState initSingle() { + return new StdDevStates.SingleState(); + } + + public static void combine(StdDevStates.SingleState state, float value) { + state.add(value); + } + + public static void combineIntermediate(StdDevStates.SingleState state, double mean, double m2, long count) { + state.combine(mean, m2, count); + } + + public static Block evaluateFinal(StdDevStates.SingleState state, DriverContext driverContext) { + return state.evaluateFinal(driverContext); + } + + public static StdDevStates.GroupingState initGrouping(BigArrays bigArrays) { + return new StdDevStates.GroupingState(bigArrays); + } + + public static void combine(StdDevStates.GroupingState current, int groupId, float value) { + current.add(groupId, value); + } + + public static void combineStates(StdDevStates.GroupingState current, int groupId, StdDevStates.GroupingState state, int statePosition) { + current.combine(groupId, state.getOrNull(statePosition)); + } + + public static void combineIntermediate(StdDevStates.GroupingState state, int groupId, double mean, double m2, long count) { + state.combine(groupId, mean, m2, count); + } + + public static Block evaluateFinal(StdDevStates.GroupingState state, IntVector selected, DriverContext driverContext) { + return state.evaluateFinal(selected, driverContext); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevIntAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevIntAggregator.java new file mode 100644 index 0000000000000..24eae35cb3249 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevIntAggregator.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * A standard deviation aggregation definition for int. + * This class is generated. Edit `X-StdDevAggregator.java.st` instead. + */ +@Aggregator( + { + @IntermediateState(name = "mean", type = "DOUBLE"), + @IntermediateState(name = "m2", type = "DOUBLE"), + @IntermediateState(name = "count", type = "LONG") } +) +@GroupingAggregator +public class StdDevIntAggregator { + + public static StdDevStates.SingleState initSingle() { + return new StdDevStates.SingleState(); + } + + public static void combine(StdDevStates.SingleState state, int value) { + state.add(value); + } + + public static void combineIntermediate(StdDevStates.SingleState state, double mean, double m2, long count) { + state.combine(mean, m2, count); + } + + public static Block evaluateFinal(StdDevStates.SingleState state, DriverContext driverContext) { + return state.evaluateFinal(driverContext); + } + + public static StdDevStates.GroupingState initGrouping(BigArrays bigArrays) { + return new StdDevStates.GroupingState(bigArrays); + } + + public static void combine(StdDevStates.GroupingState current, int groupId, int value) { + current.add(groupId, value); + } + + public static void combineStates(StdDevStates.GroupingState current, int groupId, StdDevStates.GroupingState state, int statePosition) { + current.combine(groupId, state.getOrNull(statePosition)); + } + + public static void combineIntermediate(StdDevStates.GroupingState state, int groupId, double mean, double m2, long count) { + state.combine(groupId, mean, m2, count); + } + + public static Block evaluateFinal(StdDevStates.GroupingState state, IntVector selected, DriverContext driverContext) { + return state.evaluateFinal(selected, driverContext); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevLongAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevLongAggregator.java new file mode 100644 index 0000000000000..888ace30a0c8e --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/StdDevLongAggregator.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * A standard deviation aggregation definition for long. + * This class is generated. Edit `X-StdDevAggregator.java.st` instead. + */ +@Aggregator( + { + @IntermediateState(name = "mean", type = "DOUBLE"), + @IntermediateState(name = "m2", type = "DOUBLE"), + @IntermediateState(name = "count", type = "LONG") } +) +@GroupingAggregator +public class StdDevLongAggregator { + + public static StdDevStates.SingleState initSingle() { + return new StdDevStates.SingleState(); + } + + public static void combine(StdDevStates.SingleState state, long value) { + state.add(value); + } + + public static void combineIntermediate(StdDevStates.SingleState state, double mean, double m2, long count) { + state.combine(mean, m2, count); + } + + public static Block evaluateFinal(StdDevStates.SingleState state, DriverContext driverContext) { + return state.evaluateFinal(driverContext); + } + + public static StdDevStates.GroupingState initGrouping(BigArrays bigArrays) { + return new StdDevStates.GroupingState(bigArrays); + } + + public static void combine(StdDevStates.GroupingState current, int groupId, long value) { + current.add(groupId, value); + } + + public static void combineStates(StdDevStates.GroupingState current, int groupId, StdDevStates.GroupingState state, int statePosition) { + current.combine(groupId, state.getOrNull(statePosition)); + } + + public static void combineIntermediate(StdDevStates.GroupingState state, int groupId, double mean, double m2, long count) { + state.combine(groupId, mean, m2, count); + } + + public static Block evaluateFinal(StdDevStates.GroupingState state, IntVector selected, DriverContext driverContext) { + return state.evaluateFinal(selected, driverContext); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunction.java new file mode 100644 index 0000000000000..dd6cc89401a99 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunction.java @@ -0,0 +1,178 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link StdDevDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevDoubleAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("mean", ElementType.DOUBLE), + new IntermediateStateDesc("m2", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final DriverContext driverContext; + + private final StdDevStates.SingleState state; + + private final List channels; + + public StdDevDoubleAggregatorFunction(DriverContext driverContext, List channels, + StdDevStates.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static StdDevDoubleAggregatorFunction create(DriverContext driverContext, + List channels) { + return new StdDevDoubleAggregatorFunction(driverContext, channels, StdDevDoubleAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + return; + } + if (mask.allTrue()) { + // No masking + DoubleBlock block = page.getBlock(channels.get(0)); + DoubleVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept + DoubleBlock block = page.getBlock(channels.get(0)); + DoubleVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector, mask); + } else { + addRawBlock(block, mask); + } + } + + private void addRawVector(DoubleVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + StdDevDoubleAggregator.combine(state, vector.getDouble(i)); + } + } + + private void addRawVector(DoubleVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + StdDevDoubleAggregator.combine(state, vector.getDouble(i)); + } + } + + private void addRawBlock(DoubleBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + StdDevDoubleAggregator.combine(state, block.getDouble(i)); + } + } + } + + private void addRawBlock(DoubleBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + StdDevDoubleAggregator.combine(state, block.getDouble(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block meanUncast = page.getBlock(channels.get(0)); + if (meanUncast.areAllValuesNull()) { + return; + } + DoubleVector mean = ((DoubleBlock) meanUncast).asVector(); + assert mean.getPositionCount() == 1; + Block m2Uncast = page.getBlock(channels.get(1)); + if (m2Uncast.areAllValuesNull()) { + return; + } + DoubleVector m2 = ((DoubleBlock) m2Uncast).asVector(); + assert m2.getPositionCount() == 1; + Block countUncast = page.getBlock(channels.get(2)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); + assert count.getPositionCount() == 1; + StdDevDoubleAggregator.combineIntermediate(state, mean.getDouble(0), m2.getDouble(0), count.getLong(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = StdDevDoubleAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..313eed4ae97ae --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleAggregatorFunctionSupplier.java @@ -0,0 +1,38 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link StdDevDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public StdDevDoubleAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public StdDevDoubleAggregatorFunction aggregator(DriverContext driverContext) { + return StdDevDoubleAggregatorFunction.create(driverContext, channels); + } + + @Override + public StdDevDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return StdDevDoubleGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "std_dev of doubles"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..da49c254e353a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevDoubleGroupingAggregatorFunction.java @@ -0,0 +1,223 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link StdDevDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("mean", ElementType.DOUBLE), + new IntermediateStateDesc("m2", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final StdDevStates.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public StdDevDoubleGroupingAggregatorFunction(List channels, + StdDevStates.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static StdDevDoubleGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new StdDevDoubleGroupingAggregatorFunction(channels, StdDevDoubleAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + DoubleBlock valuesBlock = page.getBlock(channels.get(0)); + DoubleVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + StdDevDoubleAggregator.combine(state, groupId, values.getDouble(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + StdDevDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + StdDevDoubleAggregator.combine(state, groupId, values.getDouble(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + StdDevDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); + } + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block meanUncast = page.getBlock(channels.get(0)); + if (meanUncast.areAllValuesNull()) { + return; + } + DoubleVector mean = ((DoubleBlock) meanUncast).asVector(); + Block m2Uncast = page.getBlock(channels.get(1)); + if (m2Uncast.areAllValuesNull()) { + return; + } + DoubleVector m2 = ((DoubleBlock) m2Uncast).asVector(); + Block countUncast = page.getBlock(channels.get(2)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); + assert mean.getPositionCount() == m2.getPositionCount() && mean.getPositionCount() == count.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + StdDevDoubleAggregator.combineIntermediate(state, groupId, mean.getDouble(groupPosition + positionOffset), m2.getDouble(groupPosition + positionOffset), count.getLong(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + StdDevStates.GroupingState inState = ((StdDevDoubleGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + StdDevDoubleAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = StdDevDoubleAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunction.java new file mode 100644 index 0000000000000..bf8c4854f6b93 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunction.java @@ -0,0 +1,180 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.FloatBlock; +import org.elasticsearch.compute.data.FloatVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link StdDevFloatAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevFloatAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("mean", ElementType.DOUBLE), + new IntermediateStateDesc("m2", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final DriverContext driverContext; + + private final StdDevStates.SingleState state; + + private final List channels; + + public StdDevFloatAggregatorFunction(DriverContext driverContext, List channels, + StdDevStates.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static StdDevFloatAggregatorFunction create(DriverContext driverContext, + List channels) { + return new StdDevFloatAggregatorFunction(driverContext, channels, StdDevFloatAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + return; + } + if (mask.allTrue()) { + // No masking + FloatBlock block = page.getBlock(channels.get(0)); + FloatVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept + FloatBlock block = page.getBlock(channels.get(0)); + FloatVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector, mask); + } else { + addRawBlock(block, mask); + } + } + + private void addRawVector(FloatVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + StdDevFloatAggregator.combine(state, vector.getFloat(i)); + } + } + + private void addRawVector(FloatVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + StdDevFloatAggregator.combine(state, vector.getFloat(i)); + } + } + + private void addRawBlock(FloatBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + StdDevFloatAggregator.combine(state, block.getFloat(i)); + } + } + } + + private void addRawBlock(FloatBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + StdDevFloatAggregator.combine(state, block.getFloat(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block meanUncast = page.getBlock(channels.get(0)); + if (meanUncast.areAllValuesNull()) { + return; + } + DoubleVector mean = ((DoubleBlock) meanUncast).asVector(); + assert mean.getPositionCount() == 1; + Block m2Uncast = page.getBlock(channels.get(1)); + if (m2Uncast.areAllValuesNull()) { + return; + } + DoubleVector m2 = ((DoubleBlock) m2Uncast).asVector(); + assert m2.getPositionCount() == 1; + Block countUncast = page.getBlock(channels.get(2)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); + assert count.getPositionCount() == 1; + StdDevFloatAggregator.combineIntermediate(state, mean.getDouble(0), m2.getDouble(0), count.getLong(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = StdDevFloatAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..25dfa54895eda --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatAggregatorFunctionSupplier.java @@ -0,0 +1,38 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link StdDevFloatAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public StdDevFloatAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public StdDevFloatAggregatorFunction aggregator(DriverContext driverContext) { + return StdDevFloatAggregatorFunction.create(driverContext, channels); + } + + @Override + public StdDevFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return StdDevFloatGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "std_dev of floats"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..bf994aaf2840e --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevFloatGroupingAggregatorFunction.java @@ -0,0 +1,225 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.FloatBlock; +import org.elasticsearch.compute.data.FloatVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link StdDevFloatAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("mean", ElementType.DOUBLE), + new IntermediateStateDesc("m2", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final StdDevStates.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public StdDevFloatGroupingAggregatorFunction(List channels, + StdDevStates.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static StdDevFloatGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new StdDevFloatGroupingAggregatorFunction(channels, StdDevFloatAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + FloatBlock valuesBlock = page.getBlock(channels.get(0)); + FloatVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, FloatBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + StdDevFloatAggregator.combine(state, groupId, values.getFloat(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, FloatVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + StdDevFloatAggregator.combine(state, groupId, values.getFloat(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, FloatBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + StdDevFloatAggregator.combine(state, groupId, values.getFloat(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, FloatVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + StdDevFloatAggregator.combine(state, groupId, values.getFloat(groupPosition + positionOffset)); + } + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block meanUncast = page.getBlock(channels.get(0)); + if (meanUncast.areAllValuesNull()) { + return; + } + DoubleVector mean = ((DoubleBlock) meanUncast).asVector(); + Block m2Uncast = page.getBlock(channels.get(1)); + if (m2Uncast.areAllValuesNull()) { + return; + } + DoubleVector m2 = ((DoubleBlock) m2Uncast).asVector(); + Block countUncast = page.getBlock(channels.get(2)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); + assert mean.getPositionCount() == m2.getPositionCount() && mean.getPositionCount() == count.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + StdDevFloatAggregator.combineIntermediate(state, groupId, mean.getDouble(groupPosition + positionOffset), m2.getDouble(groupPosition + positionOffset), count.getLong(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + StdDevStates.GroupingState inState = ((StdDevFloatGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + StdDevFloatAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = StdDevFloatAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunction.java new file mode 100644 index 0000000000000..4a5585a7dd454 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunction.java @@ -0,0 +1,180 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link StdDevIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevIntAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("mean", ElementType.DOUBLE), + new IntermediateStateDesc("m2", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final DriverContext driverContext; + + private final StdDevStates.SingleState state; + + private final List channels; + + public StdDevIntAggregatorFunction(DriverContext driverContext, List channels, + StdDevStates.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static StdDevIntAggregatorFunction create(DriverContext driverContext, + List channels) { + return new StdDevIntAggregatorFunction(driverContext, channels, StdDevIntAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + return; + } + if (mask.allTrue()) { + // No masking + IntBlock block = page.getBlock(channels.get(0)); + IntVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept + IntBlock block = page.getBlock(channels.get(0)); + IntVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector, mask); + } else { + addRawBlock(block, mask); + } + } + + private void addRawVector(IntVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + StdDevIntAggregator.combine(state, vector.getInt(i)); + } + } + + private void addRawVector(IntVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + StdDevIntAggregator.combine(state, vector.getInt(i)); + } + } + + private void addRawBlock(IntBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + StdDevIntAggregator.combine(state, block.getInt(i)); + } + } + } + + private void addRawBlock(IntBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + StdDevIntAggregator.combine(state, block.getInt(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block meanUncast = page.getBlock(channels.get(0)); + if (meanUncast.areAllValuesNull()) { + return; + } + DoubleVector mean = ((DoubleBlock) meanUncast).asVector(); + assert mean.getPositionCount() == 1; + Block m2Uncast = page.getBlock(channels.get(1)); + if (m2Uncast.areAllValuesNull()) { + return; + } + DoubleVector m2 = ((DoubleBlock) m2Uncast).asVector(); + assert m2.getPositionCount() == 1; + Block countUncast = page.getBlock(channels.get(2)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); + assert count.getPositionCount() == 1; + StdDevIntAggregator.combineIntermediate(state, mean.getDouble(0), m2.getDouble(0), count.getLong(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = StdDevIntAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..5a762d6606a25 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntAggregatorFunctionSupplier.java @@ -0,0 +1,38 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link StdDevIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public StdDevIntAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public StdDevIntAggregatorFunction aggregator(DriverContext driverContext) { + return StdDevIntAggregatorFunction.create(driverContext, channels); + } + + @Override + public StdDevIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return StdDevIntGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "std_dev of ints"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..139cc24d3541f --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevIntGroupingAggregatorFunction.java @@ -0,0 +1,223 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link StdDevIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevIntGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("mean", ElementType.DOUBLE), + new IntermediateStateDesc("m2", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final StdDevStates.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public StdDevIntGroupingAggregatorFunction(List channels, + StdDevStates.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static StdDevIntGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new StdDevIntGroupingAggregatorFunction(channels, StdDevIntAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + IntBlock valuesBlock = page.getBlock(channels.get(0)); + IntVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + StdDevIntAggregator.combine(state, groupId, values.getInt(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, IntVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + StdDevIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + StdDevIntAggregator.combine(state, groupId, values.getInt(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + StdDevIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); + } + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block meanUncast = page.getBlock(channels.get(0)); + if (meanUncast.areAllValuesNull()) { + return; + } + DoubleVector mean = ((DoubleBlock) meanUncast).asVector(); + Block m2Uncast = page.getBlock(channels.get(1)); + if (m2Uncast.areAllValuesNull()) { + return; + } + DoubleVector m2 = ((DoubleBlock) m2Uncast).asVector(); + Block countUncast = page.getBlock(channels.get(2)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); + assert mean.getPositionCount() == m2.getPositionCount() && mean.getPositionCount() == count.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + StdDevIntAggregator.combineIntermediate(state, groupId, mean.getDouble(groupPosition + positionOffset), m2.getDouble(groupPosition + positionOffset), count.getLong(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + StdDevStates.GroupingState inState = ((StdDevIntGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + StdDevIntAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = StdDevIntAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunction.java new file mode 100644 index 0000000000000..b5ed31116a90c --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunction.java @@ -0,0 +1,178 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link StdDevLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevLongAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("mean", ElementType.DOUBLE), + new IntermediateStateDesc("m2", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final DriverContext driverContext; + + private final StdDevStates.SingleState state; + + private final List channels; + + public StdDevLongAggregatorFunction(DriverContext driverContext, List channels, + StdDevStates.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static StdDevLongAggregatorFunction create(DriverContext driverContext, + List channels) { + return new StdDevLongAggregatorFunction(driverContext, channels, StdDevLongAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.allFalse()) { + // Entire page masked away + return; + } + if (mask.allTrue()) { + // No masking + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector, mask); + } else { + addRawBlock(block, mask); + } + } + + private void addRawVector(LongVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + StdDevLongAggregator.combine(state, vector.getLong(i)); + } + } + + private void addRawVector(LongVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + StdDevLongAggregator.combine(state, vector.getLong(i)); + } + } + + private void addRawBlock(LongBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + StdDevLongAggregator.combine(state, block.getLong(i)); + } + } + } + + private void addRawBlock(LongBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + StdDevLongAggregator.combine(state, block.getLong(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block meanUncast = page.getBlock(channels.get(0)); + if (meanUncast.areAllValuesNull()) { + return; + } + DoubleVector mean = ((DoubleBlock) meanUncast).asVector(); + assert mean.getPositionCount() == 1; + Block m2Uncast = page.getBlock(channels.get(1)); + if (m2Uncast.areAllValuesNull()) { + return; + } + DoubleVector m2 = ((DoubleBlock) m2Uncast).asVector(); + assert m2.getPositionCount() == 1; + Block countUncast = page.getBlock(channels.get(2)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); + assert count.getPositionCount() == 1; + StdDevLongAggregator.combineIntermediate(state, mean.getDouble(0), m2.getDouble(0), count.getLong(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = StdDevLongAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..09b996201ef16 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongAggregatorFunctionSupplier.java @@ -0,0 +1,38 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link StdDevLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public StdDevLongAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public StdDevLongAggregatorFunction aggregator(DriverContext driverContext) { + return StdDevLongAggregatorFunction.create(driverContext, channels); + } + + @Override + public StdDevLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return StdDevLongGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "std_dev of longs"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..da7a5f4bdea0d --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/StdDevLongGroupingAggregatorFunction.java @@ -0,0 +1,223 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link StdDevLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class StdDevLongGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("mean", ElementType.DOUBLE), + new IntermediateStateDesc("m2", ElementType.DOUBLE), + new IntermediateStateDesc("count", ElementType.LONG) ); + + private final StdDevStates.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public StdDevLongGroupingAggregatorFunction(List channels, + StdDevStates.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static StdDevLongGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new StdDevLongGroupingAggregatorFunction(channels, StdDevLongAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + LongBlock valuesBlock = page.getBlock(channels.get(0)); + LongVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void close() { + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void close() { + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + StdDevLongAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + StdDevLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + StdDevLongAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = groups.getInt(g); + StdDevLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + } + + @Override + public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) { + state.enableGroupIdTracking(seenGroupIds); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block meanUncast = page.getBlock(channels.get(0)); + if (meanUncast.areAllValuesNull()) { + return; + } + DoubleVector mean = ((DoubleBlock) meanUncast).asVector(); + Block m2Uncast = page.getBlock(channels.get(1)); + if (m2Uncast.areAllValuesNull()) { + return; + } + DoubleVector m2 = ((DoubleBlock) m2Uncast).asVector(); + Block countUncast = page.getBlock(channels.get(2)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); + assert mean.getPositionCount() == m2.getPositionCount() && mean.getPositionCount() == count.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = groups.getInt(groupPosition); + StdDevLongAggregator.combineIntermediate(state, groupId, mean.getDouble(groupPosition + positionOffset), m2.getDouble(groupPosition + positionOffset), count.getLong(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + StdDevStates.GroupingState inState = ((StdDevLongGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + StdDevLongAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = StdDevLongAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/StdDevStates.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/StdDevStates.java new file mode 100644 index 0000000000000..bff8903fd3bec --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/StdDevStates.java @@ -0,0 +1,211 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasables; + +public final class StdDevStates { + + private StdDevStates() {} + + static final class SingleState implements AggregatorState { + + private final WelfordAlgorithm welfordAlgorithm; + + SingleState() { + this(0, 0, 0); + } + + SingleState(double mean, double m2, long count) { + this.welfordAlgorithm = new WelfordAlgorithm(mean, m2, count); + } + + public void add(long value) { + welfordAlgorithm.add(value); + } + + public void add(double value) { + welfordAlgorithm.add(value); + } + + public void add(int value) { + welfordAlgorithm.add(value); + } + + public void combine(double mean, double m2, long count) { + welfordAlgorithm.add(mean, m2, count); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + assert blocks.length >= offset + 3; + BlockFactory blockFactory = driverContext.blockFactory(); + blocks[offset + 0] = blockFactory.newConstantDoubleBlockWith(mean(), 1); + blocks[offset + 1] = blockFactory.newConstantDoubleBlockWith(m2(), 1); + blocks[offset + 2] = blockFactory.newConstantLongBlockWith(count(), 1); + } + + @Override + public void close() {} + + public double mean() { + return welfordAlgorithm.mean(); + } + + public double m2() { + return welfordAlgorithm.m2(); + } + + public long count() { + return welfordAlgorithm.count(); + } + + public double evaluateFinal() { + return welfordAlgorithm.evaluate(); + } + + public Block evaluateFinal(DriverContext driverContext) { + final long count = count(); + final double m2 = m2(); + if (count == 0 || Double.isFinite(m2) == false) { + return driverContext.blockFactory().newConstantNullBlock(1); + } + return driverContext.blockFactory().newConstantDoubleBlockWith(evaluateFinal(), 1); + } + } + + static final class GroupingState implements GroupingAggregatorState { + + private ObjectArray states; + private final BigArrays bigArrays; + + GroupingState(BigArrays bigArrays) { + this.states = bigArrays.newObjectArray(1); + this.bigArrays = bigArrays; + } + + WelfordAlgorithm getOrNull(int position) { + if (position < states.size()) { + return states.get(position); + } else { + return null; + } + } + + public void combine(int groupId, WelfordAlgorithm state) { + if (state == null) { + return; + } + combine(groupId, state.mean(), state.m2(), state.count()); + } + + public void combine(int groupId, double meanValue, double m2Value, long countValue) { + ensureCapacity(groupId); + var state = states.get(groupId); + if (state == null) { + state = new WelfordAlgorithm(meanValue, m2Value, countValue); + states.set(groupId, state); + } else { + state.add(meanValue, m2Value, countValue); + } + } + + public WelfordAlgorithm getOrSet(int groupId) { + ensureCapacity(groupId); + var state = states.get(groupId); + if (state == null) { + state = new WelfordAlgorithm(); + states.set(groupId, state); + } + return state; + } + + public void add(int groupId, long value) { + var state = getOrSet(groupId); + state.add(value); + } + + public void add(int groupId, double value) { + var state = getOrSet(groupId); + state.add(value); + } + + public void add(int groupId, int value) { + var state = getOrSet(groupId); + state.add(value); + } + + private void ensureCapacity(int groupId) { + states = bigArrays.grow(states, groupId + 1); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + assert blocks.length >= offset + 3 : "blocks=" + blocks.length + ",offset=" + offset; + try ( + var meanBuilder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount()); + var m2Builder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount()); + var countBuilder = driverContext.blockFactory().newLongBlockBuilder(selected.getPositionCount()); + ) { + for (int i = 0; i < selected.getPositionCount(); i++) { + final var groupId = selected.getInt(i); + final var state = groupId < states.size() ? states.get(groupId) : null; + if (state != null) { + meanBuilder.appendDouble(state.mean()); + m2Builder.appendDouble(state.m2()); + countBuilder.appendLong(state.count()); + } else { + meanBuilder.appendDouble(0.0); + m2Builder.appendDouble(0.0); + countBuilder.appendLong(0); + } + } + blocks[offset + 0] = meanBuilder.build(); + blocks[offset + 1] = m2Builder.build(); + blocks[offset + 2] = countBuilder.build(); + } + } + + public Block evaluateFinal(IntVector selected, DriverContext driverContext) { + try (DoubleBlock.Builder builder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount())) { + for (int i = 0; i < selected.getPositionCount(); i++) { + final var groupId = selected.getInt(i); + final var st = getOrNull(groupId); + if (st != null) { + final var m2 = st.m2(); + final var count = st.count(); + if (count == 0 || Double.isFinite(m2) == false) { + builder.appendNull(); + } else { + builder.appendDouble(st.evaluate()); + } + } else { + builder.appendNull(); + } + } + return builder.build(); + } + } + + @Override + public void close() { + Releasables.close(states); + } + + void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/WelfordAlgorithm.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/WelfordAlgorithm.java new file mode 100644 index 0000000000000..8ccb985507247 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/WelfordAlgorithm.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +/** + * Algorithm for calculating standard deviation, one value at a time. + * + * @see + * Welford's_online_algorithm and + * + * Parallel algorithm + */ +public final class WelfordAlgorithm { + private double mean; + private double m2; + private long count; + + public double mean() { + return mean; + } + + public double m2() { + return m2; + } + + public long count() { + return count; + } + + public WelfordAlgorithm() { + this(0, 0, 0); + } + + public WelfordAlgorithm(double mean, double m2, long count) { + this.mean = mean; + this.m2 = m2; + this.count = count; + } + + public void add(int value) { + add((double) value); + } + + public void add(long value) { + add((double) value); + } + + public void add(double value) { + final double delta = value - mean; + count += 1; + mean += delta / count; + m2 += delta * (value - mean); + } + + public void add(double meanValue, double m2Value, long countValue) { + if (countValue == 0) { + return; + } + if (count == 0) { + mean = meanValue; + m2 = m2Value; + count = countValue; + return; + } + double delta = mean - meanValue; + m2 += m2Value + delta * delta * count * countValue / (count + countValue); + mean = (mean * count + meanValue * countValue) / (count + countValue); + count += countValue; + } + + public double evaluate() { + return count < 2 ? 0 : Math.sqrt(m2 / count); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-StdDevAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-StdDevAggregator.java.st new file mode 100644 index 0000000000000..510d770f90d62 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-StdDevAggregator.java.st @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * A standard deviation aggregation definition for $type$. + * This class is generated. Edit `X-StdDevAggregator.java.st` instead. + */ +@Aggregator( + { + @IntermediateState(name = "mean", type = "DOUBLE"), + @IntermediateState(name = "m2", type = "DOUBLE"), + @IntermediateState(name = "count", type = "LONG") } +) +@GroupingAggregator +public class StdDev$Type$Aggregator { + + public static StdDevStates.SingleState initSingle() { + return new StdDevStates.SingleState(); + } + + public static void combine(StdDevStates.SingleState state, $type$ value) { + state.add(value); + } + + public static void combineIntermediate(StdDevStates.SingleState state, double mean, double m2, long count) { + state.combine(mean, m2, count); + } + + public static Block evaluateFinal(StdDevStates.SingleState state, DriverContext driverContext) { + return state.evaluateFinal(driverContext); + } + + public static StdDevStates.GroupingState initGrouping(BigArrays bigArrays) { + return new StdDevStates.GroupingState(bigArrays); + } + + public static void combine(StdDevStates.GroupingState current, int groupId, $type$ value) { + current.add(groupId, value); + } + + public static void combineStates(StdDevStates.GroupingState current, int groupId, StdDevStates.GroupingState state, int statePosition) { + current.combine(groupId, state.getOrNull(statePosition)); + } + + public static void combineIntermediate(StdDevStates.GroupingState state, int groupId, double mean, double m2, long count) { + state.combine(groupId, mean, m2, count); + } + + public static Block evaluateFinal(StdDevStates.GroupingState state, IntVector selected, DriverContext driverContext) { + return state.evaluateFinal(selected, driverContext); + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 66c5362a24134..5562028a5935f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -2903,3 +2903,143 @@ max:integer | job_positions:keyword 39878 | Business Analyst 67492 | Data Scientist ; + +stdDeviation +required_capability: std_dev +// tag::stdev[] +FROM employees +| STATS STD_DEV(height) +// end::stdev[] +; + +// tag::stdev-result[] +STD_DEV(height):double +0.20637044362020449 +// end::stdev-result[] +; + +stdDeviationNested +required_capability: std_dev +// tag::docsStatsStdDevNestedExpression[] +FROM employees +| STATS stddev_salary_change = STD_DEV(MV_MAX(salary_change)) +// end::docsStatsStdDevNestedExpression[] +; + +// tag::docsStatsStdDevNestedExpression-result[] +stddev_salary_change:double +6.875829592924112 +// end::docsStatsStdDevNestedExpression-result[] +; + + +stdDeviationWithLongs +required_capability: std_dev +FROM employees +| STATS STD_DEV(avg_worked_seconds) +; + +STD_DEV(avg_worked_seconds):double +5.76010425971634E7 +; + +stdDeviationWithInts +required_capability: std_dev +FROM employees +| STATS STD_DEV(salary) +; + +STD_DEV(salary):double +13765.12550278783 +; + +stdDeviationConstantValue +required_capability: std_dev +FROM employees +| WHERE languages == 2 +| STATS STD_DEV(languages) +; + +STD_DEV(languages):double +0.0 +; + +stdDeviationGroupedDoublesOnly +required_capability: std_dev +FROM employees +| STATS STD_DEV(height) BY languages +| SORT languages asc +; + +STD_DEV(height):double | languages:integer +0.22106409327010415 | 1 +0.22797190865484734 | 2 +0.18893070075713295 | 3 +0.14656141004227627 | 4 +0.17733860152780256 | 5 +0.2486543786061287 | null +; + +stdDeviationGroupedAllTypes +required_capability: std_dev +FROM employees +| WHERE languages < 3 +| STATS + double_std_dev = STD_DEV(height), + int_std_dev = STD_DEV(salary), + long_std_dev = STD_DEV(avg_worked_seconds) + BY languages +| SORT languages asc +; + +double_std_dev:double | int_std_dev:double | long_std_dev:double | languages:integer +0.22106409327010415 | 15166.244178730898 | 5.1998715922156096E7 | 1 +0.22797190865484734 | 12139.61099378116 | 5.309085506583288E7 | 2 +; + +stdDeviationNoRows +required_capability: std_dev +FROM employees +| WHERE languages IS null +| STATS STD_DEV(languages) +; + +STD_DEV(languages):double +null +; + +stdDevMultiValue +required_capability: std_dev +FROM employees +| STATS STD_DEV(salary_change) +; + +STD_DEV(salary_change):double +7.062226788733394 +; + +stdDevFilter +required_capability: std_dev +FROM employees +| STATS greater_than = STD_DEV(salary_change) WHERE languages > 3 +, less_than = STD_DEV(salary_change) WHERE languages <= 3 +, salary = STD_DEV(salary * 2) +, count = COUNT(*) BY gender +| SORT gender asc +; + +greater_than:double | less_than:double | salary:double | count:long | gender:keyword +6.4543266953142835 | 7.57786788789264 | 29045.770666969744 | 33 | F +6.975232333891946 | 6.604807075547775 | 26171.331109641273 | 57 | M +6.949207097931448 | 7.127229475750027 | 27921.220736207077 | 10 | null +; + +stdDevRow +required_capability: std_dev +ROW a = [1,2,3], b = 5 +| STATS STD_DEV(a), STD_DEV(b) +; + +STD_DEV(a):double | STD_DEV(b):double +0.816496580927726 | 0.0 +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 54b02c87b285b..a186b784e95fb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -447,6 +447,11 @@ public enum Cap { */ PER_AGG_FILTERING_ORDS, + /** + * Support for {@code STD_DEV} aggregation. + */ + STD_DEV, + /** * Fix for https://github.com/elastic/elasticsearch/issues/114714 */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index eafb1fdbcbdcb..ea1669ccc7a4f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Percentile; import org.elasticsearch.xpack.esql.expression.function.aggregate.Rate; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; +import org.elasticsearch.xpack.esql.expression.function.aggregate.StdDev; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.aggregate.Top; import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; @@ -276,6 +277,7 @@ private static FunctionDefinition[][] functions() { def(MedianAbsoluteDeviation.class, uni(MedianAbsoluteDeviation::new), "median_absolute_deviation"), def(Min.class, uni(Min::new), "min"), def(Percentile.class, bi(Percentile::new), "percentile"), + def(StdDev.class, uni(StdDev::new), "std_dev"), def(Sum.class, uni(Sum::new), "sum"), def(Top.class, tri(Top::new), "top"), def(Values.class, uni(Values::new), "values"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java index b9cfd8892dd69..d74b5c8b386b8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java @@ -25,6 +25,7 @@ public static List getNamedWriteables() { Percentile.ENTRY, Rate.ENTRY, SpatialCentroid.ENTRY, + StdDev.ENTRY, Sum.ENTRY, Top.ENTRY, Values.ENTRY, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDev.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDev.java new file mode 100644 index 0000000000000..189b6a81912cb --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDev.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.StdDevDoubleAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.StdDevIntAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.StdDevLongAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.planner.ToAggregator; + +import java.io.IOException; +import java.util.List; + +import static java.util.Collections.emptyList; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; + +public class StdDev extends AggregateFunction implements ToAggregator { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "StdDev", StdDev::new); + + @FunctionInfo( + returnType = "double", + description = "The standard deviation of a numeric field.", + isAggregation = true, + examples = { + @Example(file = "stats", tag = "stdev"), + @Example( + description = "The expression can use inline functions. For example, to calculate the standard " + + "deviation of each employee's maximum salary changes, first use `MV_MAX` on each row, " + + "and then use `STD_DEV` on the result", + file = "stats", + tag = "docsStatsStdDevNestedExpression" + ) } + ) + public StdDev(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { + this(source, field, Literal.TRUE); + } + + public StdDev(Source source, Expression field, Expression filter) { + super(source, field, filter, emptyList()); + } + + private StdDev(StreamInput in) throws IOException { + super(in); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + public DataType dataType() { + return DataType.DOUBLE; + } + + @Override + protected Expression.TypeResolution resolveType() { + return isType( + field(), + dt -> dt.isNumeric() && dt != DataType.UNSIGNED_LONG, + sourceText(), + DEFAULT, + "numeric except unsigned_long or counter types" + ); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, StdDev::new, field(), filter()); + } + + @Override + public StdDev replaceChildren(List newChildren) { + return new StdDev(source(), newChildren.get(0), newChildren.get(1)); + } + + public StdDev withFilter(Expression filter) { + return new StdDev(source(), field(), filter); + } + + @Override + public final AggregatorFunctionSupplier supplier(List inputChannels) { + DataType type = field().dataType(); + if (type == DataType.LONG) { + return new StdDevLongAggregatorFunctionSupplier(inputChannels); + } + if (type == DataType.INTEGER) { + return new StdDevIntAggregatorFunctionSupplier(inputChannels); + } + if (type == DataType.DOUBLE) { + return new StdDevDoubleAggregatorFunctionSupplier(inputChannels); + } + throw EsqlIllegalArgumentException.illegalDataType(type); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java index 3e81c2a2c1101..605e0d7c3109c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Rate; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialAggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; +import org.elasticsearch.xpack.esql.expression.function.aggregate.StdDev; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.aggregate.ToPartial; import org.elasticsearch.xpack.esql.expression.function.aggregate.Top; @@ -48,9 +49,6 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; -import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; - /** * Static class used to convert aggregate expressions to the named expressions that represent their intermediate state. *

    @@ -78,6 +76,7 @@ final class AggregateMapper { Min.class, Percentile.class, SpatialCentroid.class, + StdDev.class, Sum.class, Values.class, Top.class, @@ -171,7 +170,7 @@ private static Stream, Tuple>> typeAndNames(Class types = List.of("Int", "Long", "Double", "Boolean", "BytesRef"); } else if (Top.class.isAssignableFrom(clazz)) { types = List.of("Boolean", "Int", "Long", "Double", "Ip", "BytesRef"); - } else if (Rate.class.isAssignableFrom(clazz)) { + } else if (Rate.class.isAssignableFrom(clazz) || StdDev.class.isAssignableFrom(clazz)) { types = List.of("Int", "Long", "Double"); } else if (FromPartial.class.isAssignableFrom(clazz) || ToPartial.class.isAssignableFrom(clazz)) { types = List.of(""); // no type diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java new file mode 100644 index 0000000000000..85b96e29d1f6a --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/StdDevTests.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.compute.aggregation.WelfordAlgorithm; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractAggregationTestCase; +import org.elasticsearch.xpack.esql.expression.function.MultiRowTestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.equalTo; + +public class StdDevTests extends AbstractAggregationTestCase { + public StdDevTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + var suppliers = new ArrayList(); + + Stream.of( + MultiRowTestCaseSupplier.intCases(1, 1000, Integer.MIN_VALUE, Integer.MAX_VALUE, true), + MultiRowTestCaseSupplier.longCases(1, 1000, Long.MIN_VALUE, Long.MAX_VALUE, true), + MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true) + ).flatMap(List::stream).map(StdDevTests::makeSupplier).collect(Collectors.toCollection(() -> suppliers)); + + return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + } + + @Override + protected Expression build(Source source, List args) { + return new StdDev(source, args.get(0)); + } + + private static TestCaseSupplier makeSupplier(TestCaseSupplier.TypedDataSupplier fieldSupplier) { + return new TestCaseSupplier(List.of(fieldSupplier.type()), () -> { + var fieldTypedData = fieldSupplier.get(); + var fieldValues = fieldTypedData.multiRowData(); + + WelfordAlgorithm welfordAlgorithm = new WelfordAlgorithm(); + + for (var fieldValue : fieldValues) { + var value = ((Number) fieldValue).doubleValue(); + welfordAlgorithm.add(value); + } + var result = welfordAlgorithm.evaluate(); + var expected = Double.isInfinite(result) ? null : result; + return new TestCaseSupplier.TestCase( + List.of(fieldTypedData), + "StdDev[field=Attribute[channel=0]]", + DataType.DOUBLE, + equalTo(expected) + ); + }); + } +} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index 4c3b16c5dc309..72c7c51655378 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -92,7 +92,7 @@ setup: - gt: {esql.functions.to_long: $functions_to_long} - match: {esql.functions.coalesce: $functions_coalesce} # Testing for the entire function set isn't feasbile, so we just check that we return the correct count as an approximation. - - length: {esql.functions: 120} # check the "sister" test below for a likely update to the same esql.functions length check + - length: {esql.functions: 121} # check the "sister" test below for a likely update to the same esql.functions length check --- "Basic ESQL usage output (telemetry) non-snapshot version": @@ -163,4 +163,4 @@ setup: - match: {esql.functions.cos: $functions_cos} - gt: {esql.functions.to_long: $functions_to_long} - match: {esql.functions.coalesce: $functions_coalesce} - - length: {esql.functions: 117} # check the "sister" test above for a likely update to the same esql.functions length check + - length: {esql.functions: 118} # check the "sister" test above for a likely update to the same esql.functions length check From 25bd830020d5facaa4bd558dafd40cd1bbe795e7 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Fri, 22 Nov 2024 18:26:31 -0500 Subject: [PATCH 198/386] Remove some leftover references to the UBI docker image (#117391) --- .../InternalDistributionDownloadPlugin.java | 3 --- ...ockerUbiElasticsearchDistributionType.java | 27 ------------------- ...nternalElasticsearchDistributionTypes.java | 2 -- .../internal/test/DistroTestPlugin.java | 2 -- 4 files changed, 34 deletions(-) delete mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerUbiElasticsearchDistributionType.java diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java index 60699522cdc3f..ec694de8ec597 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java @@ -173,9 +173,6 @@ private static String distributionProjectName(ElasticsearchDistribution distribu if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER) { return projectName + "docker" + archString + "-export"; } - if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER_UBI) { - return projectName + "ubi-docker" + archString + "-export"; - } if (distribution.getType() == InternalElasticsearchDistributionTypes.DOCKER_IRONBANK) { return projectName + "ironbank-docker" + archString + "-export"; } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerUbiElasticsearchDistributionType.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerUbiElasticsearchDistributionType.java deleted file mode 100644 index aa19bf6d60c53..0000000000000 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/DockerUbiElasticsearchDistributionType.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.gradle.internal.distribution; - -import org.elasticsearch.gradle.ElasticsearchDistributionType; - -public class DockerUbiElasticsearchDistributionType implements ElasticsearchDistributionType { - - DockerUbiElasticsearchDistributionType() {} - - @Override - public String getName() { - return "dockerUbi"; - } - - @Override - public boolean isDocker() { - return true; - } -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java index 8f0951da86b88..28776f03d17e8 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/distribution/InternalElasticsearchDistributionTypes.java @@ -17,7 +17,6 @@ public class InternalElasticsearchDistributionTypes { public static ElasticsearchDistributionType DEB = new DebElasticsearchDistributionType(); public static ElasticsearchDistributionType RPM = new RpmElasticsearchDistributionType(); public static ElasticsearchDistributionType DOCKER = new DockerElasticsearchDistributionType(); - public static ElasticsearchDistributionType DOCKER_UBI = new DockerUbiElasticsearchDistributionType(); public static ElasticsearchDistributionType DOCKER_IRONBANK = new DockerIronBankElasticsearchDistributionType(); public static ElasticsearchDistributionType DOCKER_CLOUD_ESS = new DockerCloudEssElasticsearchDistributionType(); public static ElasticsearchDistributionType DOCKER_WOLFI = new DockerWolfiElasticsearchDistributionType(); @@ -26,7 +25,6 @@ public class InternalElasticsearchDistributionTypes { DEB, RPM, DOCKER, - DOCKER_UBI, DOCKER_IRONBANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java index e8d2bbd93ff20..211718c151ba9 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java @@ -51,7 +51,6 @@ import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_CLOUD_ESS; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_IRONBANK; -import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_UBI; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_WOLFI; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.RPM; import static org.elasticsearch.gradle.internal.util.ParamsUtils.loadBuildParams; @@ -148,7 +147,6 @@ public void apply(Project project) { private static Map> lifecycleTasks(Project project, String taskPrefix) { Map> lifecyleTasks = new HashMap<>(); lifecyleTasks.put(DOCKER, project.getTasks().register(taskPrefix + ".docker")); - lifecyleTasks.put(DOCKER_UBI, project.getTasks().register(taskPrefix + ".docker-ubi")); lifecyleTasks.put(DOCKER_IRONBANK, project.getTasks().register(taskPrefix + ".docker-ironbank")); lifecyleTasks.put(DOCKER_CLOUD_ESS, project.getTasks().register(taskPrefix + ".docker-cloud-ess")); lifecyleTasks.put(DOCKER_WOLFI, project.getTasks().register(taskPrefix + ".docker-wolfi")); From 0d1979e0fee1f6a691f04fc4c7485644f86eb196 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 23 Nov 2024 10:30:33 +1100 Subject: [PATCH 199/386] Mute org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT testEveryActionIsEitherOperatorOnlyOrNonOperator #102992 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index d086d41a15edd..edc13f3c47b78 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -235,6 +235,9 @@ tests: - class: org.elasticsearch.xpack.esql.action.EsqlActionTaskIT method: testCancelRequestWhenFailingFetchingPages issue: https://github.com/elastic/elasticsearch/issues/117397 +- class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT + method: testEveryActionIsEitherOperatorOnlyOrNonOperator + issue: https://github.com/elastic/elasticsearch/issues/102992 # Examples: # From 2b8e4e727c8b08f3ceb13595db9ac296c312e0e1 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Fri, 22 Nov 2024 16:35:24 -0800 Subject: [PATCH 200/386] Migrate mapper-related modules to internal-*-rest-test (#117298) --- modules/mapper-extras/build.gradle | 10 ++-------- .../MapperExtrasClientYamlTestSuiteIT.java | 10 ++++++++++ modules/parent-join/build.gradle | 4 ++-- .../join/ParentChildClientYamlTestSuiteIT.java | 10 ++++++++++ modules/percolator/build.gradle | 4 ++-- .../PercolatorClientYamlTestSuiteIT.java | 10 ++++++++++ modules/reindex/build.gradle | 9 ++++++--- .../index/reindex/ReindexWithoutContentIT.java | 1 - .../reindex/ReindexClientYamlTestSuiteIT.java | 16 ++++++++++++++++ plugins/mapper-annotated-text/build.gradle | 10 ++-------- .../AnnotatedTextClientYamlTestSuiteIT.java | 10 ++++++++++ plugins/mapper-murmur3/build.gradle | 16 +++------------- .../MapperMurmur3ClientYamlTestSuiteIT.java | 10 ++++++++++ x-pack/plugin/mapper-unsigned-long/build.gradle | 14 ++------------ .../UnsignedLongClientYamlTestSuiteIT.java | 10 ++++++++++ x-pack/plugin/mapper-version/build.gradle | 9 ++------- .../VersionClientYamlTestSuiteIT.java | 10 ++++++++++ x-pack/plugin/wildcard/build.gradle | 8 +------- .../wildcard/WildcardClientYamlTestSuiteIT.java | 10 ++++++++++ 19 files changed, 118 insertions(+), 63 deletions(-) diff --git a/modules/mapper-extras/build.gradle b/modules/mapper-extras/build.gradle index eda55fe6de9da..992f39a22b28c 100644 --- a/modules/mapper-extras/build.gradle +++ b/modules/mapper-extras/build.gradle @@ -7,8 +7,8 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -apply plugin: 'elasticsearch.legacy-yaml-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' +apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { @@ -21,9 +21,3 @@ restResources { include '_common', 'cluster', 'field_caps', 'nodes', 'indices', 'index', 'search', 'get' } } - -if (buildParams.isSnapshotBuild() == false) { - tasks.named("test").configure { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } -} diff --git a/modules/mapper-extras/src/yamlRestTest/java/org/elasticsearch/index/mapper/MapperExtrasClientYamlTestSuiteIT.java b/modules/mapper-extras/src/yamlRestTest/java/org/elasticsearch/index/mapper/MapperExtrasClientYamlTestSuiteIT.java index b325c81616257..80953af5a4cbb 100644 --- a/modules/mapper-extras/src/yamlRestTest/java/org/elasticsearch/index/mapper/MapperExtrasClientYamlTestSuiteIT.java +++ b/modules/mapper-extras/src/yamlRestTest/java/org/elasticsearch/index/mapper/MapperExtrasClientYamlTestSuiteIT.java @@ -12,8 +12,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; /** Runs yaml rest tests */ public class MapperExtrasClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -26,4 +28,12 @@ public MapperExtrasClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate t public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("mapper-extras").build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/modules/parent-join/build.gradle b/modules/parent-join/build.gradle index c714e52512b2e..0d34b5f6e3b40 100644 --- a/modules/parent-join/build.gradle +++ b/modules/parent-join/build.gradle @@ -6,8 +6,8 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -apply plugin: 'elasticsearch.legacy-yaml-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' +apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { diff --git a/modules/parent-join/src/yamlRestTest/java/org/elasticsearch/join/ParentChildClientYamlTestSuiteIT.java b/modules/parent-join/src/yamlRestTest/java/org/elasticsearch/join/ParentChildClientYamlTestSuiteIT.java index 7b02f87691841..ecfc464f0739c 100644 --- a/modules/parent-join/src/yamlRestTest/java/org/elasticsearch/join/ParentChildClientYamlTestSuiteIT.java +++ b/modules/parent-join/src/yamlRestTest/java/org/elasticsearch/join/ParentChildClientYamlTestSuiteIT.java @@ -12,8 +12,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; public class ParentChildClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public ParentChildClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -24,4 +26,12 @@ public ParentChildClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate te public static Iterable parameters() throws Exception { return createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("parent-join").build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/modules/percolator/build.gradle b/modules/percolator/build.gradle index e7865a086a003..2d2f6767f5e62 100644 --- a/modules/percolator/build.gradle +++ b/modules/percolator/build.gradle @@ -6,8 +6,8 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -apply plugin: 'elasticsearch.legacy-yaml-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' +apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { diff --git a/modules/percolator/src/yamlRestTest/java/org/elasticsearch/percolator/PercolatorClientYamlTestSuiteIT.java b/modules/percolator/src/yamlRestTest/java/org/elasticsearch/percolator/PercolatorClientYamlTestSuiteIT.java index 21bc2d8a4ae10..d71e758525085 100644 --- a/modules/percolator/src/yamlRestTest/java/org/elasticsearch/percolator/PercolatorClientYamlTestSuiteIT.java +++ b/modules/percolator/src/yamlRestTest/java/org/elasticsearch/percolator/PercolatorClientYamlTestSuiteIT.java @@ -12,8 +12,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; public class PercolatorClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public PercolatorClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -24,4 +26,12 @@ public PercolatorClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate tes public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("percolator").build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index b4a1c9cd6248d..7281c161e2c4a 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -15,9 +15,9 @@ import org.elasticsearch.gradle.transform.UnzipTransform apply plugin: 'elasticsearch.test-with-dependencies' apply plugin: 'elasticsearch.jdk-download' -apply plugin: 'elasticsearch.legacy-yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.legacy-java-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' +apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { @@ -32,7 +32,6 @@ testClusters.configureEach { module ':modules:rest-root' // Whitelist reindexing from the local node so we can test reindex-from-remote. setting 'reindex.remote.whitelist', '127.0.0.1:*' - requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } dependencies { @@ -41,6 +40,10 @@ dependencies { // for parent/child testing testImplementation project(':modules:parent-join') testImplementation project(':modules:rest-root') + + clusterModules project(':modules:lang-painless') + clusterModules project(':modules:parent-join') + clusterModules project(":modules:rest-root") } restResources { diff --git a/modules/reindex/src/javaRestTest/java/org/elasticsearch/index/reindex/ReindexWithoutContentIT.java b/modules/reindex/src/javaRestTest/java/org/elasticsearch/index/reindex/ReindexWithoutContentIT.java index 99be7123040cc..a0212a937f27b 100644 --- a/modules/reindex/src/javaRestTest/java/org/elasticsearch/index/reindex/ReindexWithoutContentIT.java +++ b/modules/reindex/src/javaRestTest/java/org/elasticsearch/index/reindex/ReindexWithoutContentIT.java @@ -18,7 +18,6 @@ import static org.hamcrest.CoreMatchers.containsString; public class ReindexWithoutContentIT extends ESRestTestCase { - public void testReindexMissingBody() throws IOException { ResponseException responseException = expectThrows( ResponseException.class, diff --git a/modules/reindex/src/yamlRestTest/java/org/elasticsearch/index/reindex/ReindexClientYamlTestSuiteIT.java b/modules/reindex/src/yamlRestTest/java/org/elasticsearch/index/reindex/ReindexClientYamlTestSuiteIT.java index a44a2150bf07c..0378a63ed5481 100644 --- a/modules/reindex/src/yamlRestTest/java/org/elasticsearch/index/reindex/ReindexClientYamlTestSuiteIT.java +++ b/modules/reindex/src/yamlRestTest/java/org/elasticsearch/index/reindex/ReindexClientYamlTestSuiteIT.java @@ -12,8 +12,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; public class ReindexClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public ReindexClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -24,4 +26,18 @@ public ReindexClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCa public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .module("reindex") + .module("lang-painless") + .module("parent-join") + .module("rest-root") + .setting("reindex.remote.whitelist", "127.0.0.1:*") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/plugins/mapper-annotated-text/build.gradle b/plugins/mapper-annotated-text/build.gradle index ff7230701aa0a..435ad83974efa 100644 --- a/plugins/mapper-annotated-text/build.gradle +++ b/plugins/mapper-annotated-text/build.gradle @@ -6,20 +6,14 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -apply plugin: 'elasticsearch.legacy-yaml-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' +apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { description 'The Mapper Annotated_text plugin adds support for text fields with markup used to inject annotation tokens into the index.' classname 'org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextPlugin' } -if (buildParams.isSnapshotBuild() == false) { - tasks.named("test").configure { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } -} - restResources { restApi { include '_common', 'indices', 'index', 'search' diff --git a/plugins/mapper-annotated-text/src/yamlRestTest/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextClientYamlTestSuiteIT.java b/plugins/mapper-annotated-text/src/yamlRestTest/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextClientYamlTestSuiteIT.java index afb23106bc101..68d141b6df840 100644 --- a/plugins/mapper-annotated-text/src/yamlRestTest/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextClientYamlTestSuiteIT.java +++ b/plugins/mapper-annotated-text/src/yamlRestTest/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextClientYamlTestSuiteIT.java @@ -12,8 +12,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; public class AnnotatedTextClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -25,4 +27,12 @@ public AnnotatedTextClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate public static Iterable parameters() throws Exception { return createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().plugin("mapper-annotated-text").build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/plugins/mapper-murmur3/build.gradle b/plugins/mapper-murmur3/build.gradle index 15d7f6249695b..0271296df934d 100644 --- a/plugins/mapper-murmur3/build.gradle +++ b/plugins/mapper-murmur3/build.gradle @@ -6,8 +6,8 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -apply plugin: 'elasticsearch.legacy-yaml-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' +apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { description 'The Mapper Murmur3 plugin allows to compute hashes of a field\'s values at index-time and to store them in the index.' @@ -18,12 +18,7 @@ esplugin { dependencies { compileOnly project(':modules:lang-painless:spi') testImplementation project(':modules:lang-painless') -} - -if (buildParams.isSnapshotBuild() == false) { - tasks.named("test").configure { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + clusterModules project(':modules:lang-painless') } restResources { @@ -31,8 +26,3 @@ restResources { include '_common', 'indices', 'index', 'search' } } - -testClusters.configureEach { - testDistribution = 'DEFAULT' - setting 'xpack.security.enabled', 'false' -} diff --git a/plugins/mapper-murmur3/src/yamlRestTest/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3ClientYamlTestSuiteIT.java b/plugins/mapper-murmur3/src/yamlRestTest/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3ClientYamlTestSuiteIT.java index d4b0f2e0dc6a7..399b488a5d2f7 100644 --- a/plugins/mapper-murmur3/src/yamlRestTest/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3ClientYamlTestSuiteIT.java +++ b/plugins/mapper-murmur3/src/yamlRestTest/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3ClientYamlTestSuiteIT.java @@ -12,8 +12,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; public class MapperMurmur3ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -25,4 +27,12 @@ public MapperMurmur3ClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate public static Iterable parameters() throws Exception { return createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("lang-painless").plugin("mapper-murmur3").build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/x-pack/plugin/mapper-unsigned-long/build.gradle b/x-pack/plugin/mapper-unsigned-long/build.gradle index 7eff1bfe94a3a..17a4f8a03fa57 100644 --- a/x-pack/plugin/mapper-unsigned-long/build.gradle +++ b/x-pack/plugin/mapper-unsigned-long/build.gradle @@ -10,8 +10,8 @@ import org.elasticsearch.gradle.Version evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.internal-es-plugin' -apply plugin: 'elasticsearch.legacy-yaml-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' +apply plugin: 'elasticsearch.yaml-rest-compat-test' esplugin { name 'unsigned-long' @@ -35,13 +35,3 @@ restResources { include '_common', 'bulk', 'indices', 'index', 'search', 'xpack' } } - -if (buildParams.isSnapshotBuild() == false) { - tasks.named("test").configure { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } -} - -testClusters.configureEach { - requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") -} diff --git a/x-pack/plugin/mapper-unsigned-long/src/yamlRestTest/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongClientYamlTestSuiteIT.java b/x-pack/plugin/mapper-unsigned-long/src/yamlRestTest/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongClientYamlTestSuiteIT.java index 008bfb193387c..df2c5d81ca14a 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/yamlRestTest/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongClientYamlTestSuiteIT.java +++ b/x-pack/plugin/mapper-unsigned-long/src/yamlRestTest/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongClientYamlTestSuiteIT.java @@ -10,8 +10,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; /** Runs yaml rest tests */ public class UnsignedLongClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -24,4 +26,12 @@ public UnsignedLongClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate t public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("unsigned-long").build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/x-pack/plugin/mapper-version/build.gradle b/x-pack/plugin/mapper-version/build.gradle index a87def29620c7..bf78c61523e39 100644 --- a/x-pack/plugin/mapper-version/build.gradle +++ b/x-pack/plugin/mapper-version/build.gradle @@ -8,8 +8,8 @@ evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.internal-es-plugin' -apply plugin: 'elasticsearch.legacy-yaml-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' +apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { @@ -29,8 +29,3 @@ dependencies { testImplementation project(path: xpackModule('analytics')) } -if (buildParams.isSnapshotBuild() == false) { - tasks.named("test").configure { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } -} diff --git a/x-pack/plugin/mapper-version/src/yamlRestTest/java/org/elasticsearch/xpack/versionfield/VersionClientYamlTestSuiteIT.java b/x-pack/plugin/mapper-version/src/yamlRestTest/java/org/elasticsearch/xpack/versionfield/VersionClientYamlTestSuiteIT.java index bc9f32766a3bb..3474d5ce9be8c 100644 --- a/x-pack/plugin/mapper-version/src/yamlRestTest/java/org/elasticsearch/xpack/versionfield/VersionClientYamlTestSuiteIT.java +++ b/x-pack/plugin/mapper-version/src/yamlRestTest/java/org/elasticsearch/xpack/versionfield/VersionClientYamlTestSuiteIT.java @@ -10,8 +10,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; /** Runs yaml rest tests */ public class VersionClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -24,4 +26,12 @@ public VersionClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCa public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("mapper-version").build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/x-pack/plugin/wildcard/build.gradle b/x-pack/plugin/wildcard/build.gradle index 760ad407575d7..1a4f133402582 100644 --- a/x-pack/plugin/wildcard/build.gradle +++ b/x-pack/plugin/wildcard/build.gradle @@ -6,7 +6,7 @@ */ apply plugin: 'elasticsearch.internal-es-plugin' -apply plugin: 'elasticsearch.legacy-yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' esplugin { name 'wildcard' @@ -24,9 +24,3 @@ dependencies { compileOnly project(path: xpackModule('core')) testImplementation(testArtifact(project(xpackModule('core')))) } - -if (buildParams.isSnapshotBuild() == false) { - tasks.named("test").configure { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } -} diff --git a/x-pack/plugin/wildcard/src/yamlRestTest/java/org/elasticsearch/xpack/wildcard/WildcardClientYamlTestSuiteIT.java b/x-pack/plugin/wildcard/src/yamlRestTest/java/org/elasticsearch/xpack/wildcard/WildcardClientYamlTestSuiteIT.java index 61eb0c8b0de3e..c9ec7d71b1805 100644 --- a/x-pack/plugin/wildcard/src/yamlRestTest/java/org/elasticsearch/xpack/wildcard/WildcardClientYamlTestSuiteIT.java +++ b/x-pack/plugin/wildcard/src/yamlRestTest/java/org/elasticsearch/xpack/wildcard/WildcardClientYamlTestSuiteIT.java @@ -10,8 +10,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; /** Runs yaml rest tests */ public class WildcardClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -24,4 +26,12 @@ public WildcardClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testC public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("wildcard").build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } From 8fe13f50d20da05b653fd2831ca621548c1760c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20R=C3=BChsen?= Date: Sat, 23 Nov 2024 13:00:51 +0100 Subject: [PATCH 201/386] [Profiling] Add k8s namespace to events (#117323) Co-authored-by: Francesco Gualazzi --- .../profiling/component-template/profiling-events.json | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json index 8f50ebd334f16..f90d2202db0d3 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json @@ -9,6 +9,7 @@ "sort": { "field": [ "profiling.project.id", + "k8s.namespace.name", "orchestrator.resource.name", "host.name", "container.name", @@ -80,6 +81,9 @@ }, "container.id": { "type": "keyword" + }, + "k8s.namespace.name": { + "type": "keyword" } } } From dd20828e73b393c52371f6f351fb9f644cd637ce Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Sat, 23 Nov 2024 05:49:53 -0800 Subject: [PATCH 202/386] Store plugin loader separately from instance (#117393) Stable plugins do not have a plugin instance, so their loader cannot be retrieved by looking at the instance class (which is a placeholder). This commit adds back the class loader of each plugin to the LoadedPlugin record so that it can be closed by tests. closes #117220 --- .../elasticsearch/plugins/PluginsService.java | 4 ++-- .../plugins/PluginsServiceTests.java | 17 +++++++++++------ .../plugins/MockPluginsService.java | 2 +- 3 files changed, 14 insertions(+), 9 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java index cfdb7aaf0b509..6ef3cd17ba2e9 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -62,7 +62,7 @@ public StablePluginsRegistry getStablePluginRegistry() { * @param descriptor Metadata about the plugin, usually loaded from plugin properties * @param instance The constructed instance of the plugin's main class */ - record LoadedPlugin(PluginDescriptor descriptor, Plugin instance) { + record LoadedPlugin(PluginDescriptor descriptor, Plugin instance, ClassLoader classLoader) { LoadedPlugin { Objects.requireNonNull(descriptor); @@ -426,7 +426,7 @@ We need to pass a name though so that we can show that a plugin was loaded (via } plugin = loadPlugin(pluginClass, settings, configPath); } - loadedPlugins.put(name, new LoadedPlugin(pluginBundle.plugin, plugin)); + loadedPlugins.put(name, new LoadedPlugin(pluginBundle.plugin, plugin, pluginLayer.pluginClassLoader())); } finally { privilegedSetContextClassLoader(cl); } diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index b84f1d2c7635c..015bc72747bf2 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -466,7 +466,8 @@ public void testExtensiblePlugin() { List.of( new PluginsService.LoadedPlugin( new PluginDescriptor("extensible", null, null, null, null, classname, null, List.of(), false, false, false, false), - extensiblePlugin + extensiblePlugin, + null ) ) ); @@ -480,7 +481,8 @@ public void testExtensiblePlugin() { List.of( new PluginsService.LoadedPlugin( new PluginDescriptor("extensible", null, null, null, null, classname, null, List.of(), false, false, false, false), - extensiblePlugin + extensiblePlugin, + null ), new PluginsService.LoadedPlugin( new PluginDescriptor( @@ -497,7 +499,8 @@ public void testExtensiblePlugin() { false, false ), - testPlugin + testPlugin, + null ) ) ); @@ -885,20 +888,22 @@ static final class Loader extends ClassLoader { // We can use the direct ClassLoader from the plugin because tests do not use any parent SPI ClassLoaders. static void closePluginLoaders(PluginsService pluginService) { for (var lp : pluginService.plugins()) { - if (lp.instance().getClass().getClassLoader() instanceof URLClassLoader urlClassLoader) { + if (lp.classLoader() instanceof URLClassLoader urlClassLoader) { try { PrivilegedOperations.closeURLClassLoader(urlClassLoader); } catch (IOException unexpected) { throw new UncheckedIOException(unexpected); } - } - if (lp.instance().getClass().getClassLoader() instanceof UberModuleClassLoader loader) { + } else if (lp.classLoader() instanceof UberModuleClassLoader loader) { try { PrivilegedOperations.closeURLClassLoader(loader.getInternalLoader()); } catch (Exception e) { throw new RuntimeException(e); } + } else { + throw new AssertionError("Cannot close unexpected classloader " + lp.classLoader()); } + } } diff --git a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java index d51b2cfb450bc..9e96396493bdf 100644 --- a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java +++ b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java @@ -72,7 +72,7 @@ protected void addServerExportsService(Map Date: Sun, 24 Nov 2024 10:07:00 +0100 Subject: [PATCH 203/386] [Build] Do not create empty buildkite build artifacts (#117400) when building nesting bwc distros we tend to generate empty buildkite artifacts. Lets not create empty archives --- .../gradle/internal/ElasticsearchBuildCompletePlugin.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java index 7d9537feaea56..14baa55794c95 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java @@ -147,12 +147,17 @@ interface Parameters extends FlowParameters { @SuppressWarnings("checkstyle:DescendantToken") @Override public void execute(BuildFinishedFlowAction.Parameters parameters) throws FileNotFoundException { + List filesToArchive = parameters.getFilteredFiles().get(); + if (filesToArchive.isEmpty()) { + return; + } File uploadFile = parameters.getUploadFile().get(); if (uploadFile.exists()) { getFileSystemOperations().delete(spec -> spec.delete(uploadFile)); } uploadFile.getParentFile().mkdirs(); - createBuildArchiveTar(parameters.getFilteredFiles().get(), parameters.getProjectDir().get(), uploadFile); + + createBuildArchiveTar(filesToArchive, parameters.getProjectDir().get(), uploadFile); if (uploadFile.exists() && "true".equals(System.getenv("BUILDKITE"))) { String uploadFilePath = uploadFile.getName(); File uploadFileDir = uploadFile.getParentFile(); From d2b3dc51d15e56c3a7a2a0ef28a1bbf31a97ddd7 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Sun, 24 Nov 2024 15:13:15 +0100 Subject: [PATCH 204/386] [Build] Reapply updating to Gradle 8.11.1 (#117394) This reverts commit b5c6d927c19993851eda525ac991fa17803e8843. * Simplify and fix dynamic project dependency handling --- .../gradle/wrapper/gradle-wrapper.properties | 4 +- .../internal/ElasticsearchJavadocPlugin.java | 7 ++- .../test/TestWithDependenciesPlugin.java | 31 ++++++++----- .../test/rest/RestTestBasePlugin.java | 10 +++-- .../AbstractCustomJavaToolchainResolver.java | 1 + .../src/main/resources/minimumGradleVersion | 2 +- .../gradle/LazyFileOutputStream.java | 6 +++ .../gradle/plugin/BasePluginBuildPlugin.java | 22 ++++++++- distribution/packages/build.gradle | 16 +++---- gradle/verification-metadata.xml | 45 +++++++++++++++++++ gradle/wrapper/gradle-wrapper.properties | 4 +- .../gradle/wrapper/gradle-wrapper.properties | 4 +- 12 files changed, 120 insertions(+), 32 deletions(-) diff --git a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties index 6acc1431eaec1..22286c90de3d1 100644 --- a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties +++ b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=2ab88d6de2c23e6adae7363ae6e29cbdd2a709e992929b48b6530fd0c7133bd6 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-all.zip +distributionSha256Sum=89d4e70e4e84e2d2dfbb63e4daa53e21b25017cc70c37e4eea31ee51fb15098a +distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavadocPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavadocPlugin.java index 42a44edd7f9a5..aca310cbf1e47 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavadocPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavadocPlugin.java @@ -82,12 +82,15 @@ private void configureJavadocForConfiguration(Project project, boolean shadow, C .sorted(Comparator.comparing(Dependency::getGroup)) .filter(d -> d instanceof ProjectDependency) .map(d -> (ProjectDependency) d) - .filter(p -> p.getDependencyProject() != null) .forEach(projectDependency -> configureDependency(project, shadow, projectDependency)); } private void configureDependency(Project project, boolean shadowed, ProjectDependency dep) { - var upstreamProject = dep.getDependencyProject(); + // we should use variant aware dependency management to resolve artifacts required for javadoc here + Project upstreamProject = project.project(dep.getPath()); + if (upstreamProject == null) { + return; + } if (shadowed) { /* * Include the source of shadowed upstream projects so we don't diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithDependenciesPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithDependenciesPlugin.java index 487fe012a5941..a2851bfa2ae55 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithDependenciesPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithDependenciesPlugin.java @@ -10,11 +10,14 @@ package org.elasticsearch.gradle.internal.test; import org.apache.commons.lang.StringUtils; -import org.elasticsearch.gradle.plugin.PluginBuildPlugin; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.artifacts.Configuration; +import org.gradle.api.artifacts.Dependency; import org.gradle.api.artifacts.ProjectDependency; +import org.gradle.api.artifacts.dsl.DependencyHandler; +import org.gradle.api.attributes.Attribute; +import org.gradle.api.attributes.LibraryElements; import org.gradle.api.plugins.ExtraPropertiesExtension; import org.gradle.api.tasks.Copy; import org.gradle.api.tasks.SourceSetContainer; @@ -45,23 +48,31 @@ public void apply(final Project project) { Configuration testImplementationConfig = project.getConfigurations().getByName("testImplementation"); testImplementationConfig.getDependencies().all(dep -> { - if (dep instanceof ProjectDependency - && ((ProjectDependency) dep).getDependencyProject().getPlugins().hasPlugin(PluginBuildPlugin.class)) { - project.getGradle() - .projectsEvaluated(gradle -> addPluginResources(project, ((ProjectDependency) dep).getDependencyProject())); + if (dep instanceof ProjectDependency && dep.getGroup().contains("plugin")) { + addPluginResources(project, ((ProjectDependency) dep)); } }); } - private static void addPluginResources(final Project project, final Project pluginProject) { - final File outputDir = new File(project.getBuildDir(), "/generated-test-resources/" + pluginProject.getName()); - String camelProjectName = stream(pluginProject.getName().split("-")).map(t -> StringUtils.capitalize(t)) + private static void addPluginResources(final Project project, final ProjectDependency projectDependency) { + final File outputDir = new File(project.getBuildDir(), "/generated-test-resources/" + projectDependency.getName()); + String camelProjectName = stream(projectDependency.getName().split("-")).map(t -> StringUtils.capitalize(t)) .collect(Collectors.joining()); String taskName = "copy" + camelProjectName + "Metadata"; + String metadataConfiguration = "resolved" + camelProjectName + "Metadata"; + Configuration pluginMetadata = project.getConfigurations().maybeCreate(metadataConfiguration); + pluginMetadata.getAttributes().attribute(Attribute.of("pluginMetadata", Boolean.class), true); + pluginMetadata.getAttributes() + .attribute( + LibraryElements.LIBRARY_ELEMENTS_ATTRIBUTE, + project.getObjects().named(LibraryElements.class, LibraryElements.RESOURCES) + ); + DependencyHandler dependencyHandler = project.getDependencies(); + Dependency pluginMetadataDependency = dependencyHandler.project(Map.of("path", projectDependency.getPath())); + dependencyHandler.add(metadataConfiguration, pluginMetadataDependency); project.getTasks().register(taskName, Copy.class, copy -> { copy.into(outputDir); - copy.from(pluginProject.getTasks().named("pluginProperties")); - copy.from(pluginProject.file("src/main/plugin-metadata")); + copy.from(pluginMetadata); }); Map map = Map.of("builtBy", taskName); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java index 548791b9496c2..b44cfdac69ba7 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java @@ -43,6 +43,7 @@ import org.gradle.api.file.ConfigurableFileCollection; import org.gradle.api.file.FileCollection; import org.gradle.api.file.FileTree; +import org.gradle.api.internal.artifacts.dependencies.ProjectDependencyInternal; import org.gradle.api.provider.ProviderFactory; import org.gradle.api.tasks.ClasspathNormalizer; import org.gradle.api.tasks.PathSensitivity; @@ -251,7 +252,7 @@ private void copyDependencies(Project project, DependencySet dependencies, Confi configuration.getDependencies() .stream() .filter(d -> d instanceof ProjectDependency) - .map(d -> project.getDependencies().project(Map.of("path", ((ProjectDependency) d).getDependencyProject().getPath()))) + .map(d -> project.getDependencies().project(Map.of("path", ((ProjectDependencyInternal) d).getPath()))) .forEach(dependencies::add); } @@ -328,8 +329,9 @@ private Configuration createPluginConfiguration(Project project, String name, bo Collection additionalDependencies = new LinkedHashSet<>(); for (Iterator iterator = dependencies.iterator(); iterator.hasNext();) { Dependency dependency = iterator.next(); + // this logic of relying on other projects metadata should probably live in a build service if (dependency instanceof ProjectDependency projectDependency) { - Project dependencyProject = projectDependency.getDependencyProject(); + Project dependencyProject = project.project(projectDependency.getPath()); List extendedPlugins = dependencyProject.getExtensions() .getByType(PluginPropertiesExtension.class) .getExtendedPlugins(); @@ -339,8 +341,8 @@ private Configuration createPluginConfiguration(Project project, String name, bo iterator.remove(); additionalDependencies.add( useExploded - ? getExplodedBundleDependency(project, dependencyProject.getPath()) - : getBundleZipTaskDependency(project, dependencyProject.getPath()) + ? getExplodedBundleDependency(project, projectDependency.getPath()) + : getBundleZipTaskDependency(project, projectDependency.getPath()) ); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AbstractCustomJavaToolchainResolver.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AbstractCustomJavaToolchainResolver.java index ac458a632e818..0c6a6bc26156b 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AbstractCustomJavaToolchainResolver.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AbstractCustomJavaToolchainResolver.java @@ -34,6 +34,7 @@ static String toArchString(Architecture architecture) { case X86_64 -> "x64"; case AARCH64 -> "aarch64"; case X86 -> "x86"; + default -> throw new UnsupportedOperationException("Architecture " + architecture); }; } diff --git a/build-tools-internal/src/main/resources/minimumGradleVersion b/build-tools-internal/src/main/resources/minimumGradleVersion index dd78a707858a7..876e3136ea819 100644 --- a/build-tools-internal/src/main/resources/minimumGradleVersion +++ b/build-tools-internal/src/main/resources/minimumGradleVersion @@ -1 +1 @@ -8.10.2 \ No newline at end of file +8.11.1 \ No newline at end of file diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java b/build-tools/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java index 2f5b110fc59a9..c3da389fc30d4 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/LazyFileOutputStream.java @@ -39,6 +39,12 @@ public void write(byte b[], int off, int len) throws IOException { bootstrap(); delegate.write(b, off, len); } + + @Override + public void write(byte b[]) throws IOException { + bootstrap(); + delegate.write(b); + } }; } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java index b3a792b418384..42e576012c0c9 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/BasePluginBuildPlugin.java @@ -24,6 +24,8 @@ import org.gradle.api.Task; import org.gradle.api.Transformer; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; +import org.gradle.api.attributes.Attribute; +import org.gradle.api.attributes.LibraryElements; import org.gradle.api.file.CopySpec; import org.gradle.api.file.FileCollection; import org.gradle.api.file.RegularFile; @@ -126,9 +128,27 @@ private TaskProvider createBundleTasks(final Project project, PluginPropert // know about the plugin (used by test security code to statically initialize the plugin in unit tests) var testSourceSet = project.getExtensions().getByType(SourceSetContainer.class).getByName("test"); Map map = Map.of("builtBy", buildProperties); - testSourceSet.getOutput().dir(map, new File(project.getBuildDir(), "generated-resources")); + + File generatedResources = new File(project.getBuildDir(), "generated-resources"); + testSourceSet.getOutput().dir(map, generatedResources); testSourceSet.getResources().srcDir(pluginMetadata); + // expose the plugin properties and metadata for other plugins to use in their tests. + // See TestWithDependenciesPlugin for how this is used. + project.getConfigurations().create("pluginMetadata", conf -> { + conf.getAttributes().attribute(Attribute.of("pluginMetadata", Boolean.class), true); + conf.getAttributes() + .attribute( + LibraryElements.LIBRARY_ELEMENTS_ATTRIBUTE, + project.getObjects().named(LibraryElements.class, LibraryElements.RESOURCES) + ); + }); + + project.getArtifacts().add("pluginMetadata", new File(project.getBuildDir(), "generated-descriptor"), artifact -> { + artifact.builtBy(buildProperties); + }); + project.getArtifacts().add("pluginMetadata", pluginMetadata); + // getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, "plugin-metadata"); var bundleSpec = createBundleSpec(project, pluginMetadata, buildProperties); extension.setBundleSpec(bundleSpec); // create the actual bundle task, which zips up all the files for the plugin diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 486c95d15c7a1..5f45b4b72974f 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -43,7 +43,7 @@ import java.util.regex.Pattern */ plugins { - id "com.netflix.nebula.ospackage-base" version "11.9.1" + id "com.netflix.nebula.ospackage-base" version "11.10.0" } ['deb', 'rpm'].each { type -> @@ -195,7 +195,7 @@ def commonPackageConfig(String type, String architecture) { configurationFile '/etc/elasticsearch/users_roles' from("${packagingFiles}") { dirPermissions { - unix(02750) + unix(0750) } into('/etc') permissionGroup 'elasticsearch' @@ -208,7 +208,7 @@ def commonPackageConfig(String type, String architecture) { from("${packagingFiles}/etc/elasticsearch") { into('/etc/elasticsearch') dirPermissions { - unix(02750) + unix(0750) } setgid = true filePermissions { @@ -260,7 +260,7 @@ def commonPackageConfig(String type, String architecture) { // ========= empty dirs ========= // NOTE: these are created under packagingFiles as empty, but the permissions are set here - Closure copyEmptyDir = { path, u, g, mode -> + Closure copyEmptyDir = { path, u, g, gid, mode -> File file = new File(path) into(file.parent) { from "${packagingFiles}/${file.parent}" @@ -272,12 +272,12 @@ def commonPackageConfig(String type, String architecture) { dirPermissions { unix(mode) } - setgid (mode == 02750) + setgid(gid) } } - copyEmptyDir('/var/log/elasticsearch', 'elasticsearch', 'elasticsearch', 02750) - copyEmptyDir('/var/lib/elasticsearch', 'elasticsearch', 'elasticsearch', 02750) - copyEmptyDir('/usr/share/elasticsearch/plugins', 'root', 'root', 0755) + copyEmptyDir('/var/log/elasticsearch', 'elasticsearch', 'elasticsearch', true, 0750) + copyEmptyDir('/var/lib/elasticsearch', 'elasticsearch', 'elasticsearch', true, 0750) + copyEmptyDir('/usr/share/elasticsearch/plugins', 'root', 'root', false, 0755) // the oss package conflicts with the default distribution and vice versa conflicts('elasticsearch-oss') diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 4cfd329ba728e..37178fd9439d0 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -944,6 +944,11 @@ + + + + + @@ -3050,6 +3055,11 @@ + + + + + @@ -3065,6 +3075,11 @@ + + + + + @@ -3135,6 +3150,11 @@ + + + + + @@ -3443,6 +3463,11 @@ + + + + + @@ -3458,6 +3483,16 @@ + + + + + + + + + + @@ -3873,6 +3908,11 @@ + + + + + @@ -4628,6 +4668,11 @@ + + + + + diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 6acc1431eaec1..22286c90de3d1 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=2ab88d6de2c23e6adae7363ae6e29cbdd2a709e992929b48b6530fd0c7133bd6 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-all.zip +distributionSha256Sum=89d4e70e4e84e2d2dfbb63e4daa53e21b25017cc70c37e4eea31ee51fb15098a +distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/plugins/examples/gradle/wrapper/gradle-wrapper.properties b/plugins/examples/gradle/wrapper/gradle-wrapper.properties index 6acc1431eaec1..22286c90de3d1 100644 --- a/plugins/examples/gradle/wrapper/gradle-wrapper.properties +++ b/plugins/examples/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=2ab88d6de2c23e6adae7363ae6e29cbdd2a709e992929b48b6530fd0c7133bd6 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-all.zip +distributionSha256Sum=89d4e70e4e84e2d2dfbb63e4daa53e21b25017cc70c37e4eea31ee51fb15098a +distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME From 5f3b3801347c4df66d319f9d45ef9beb3c5d1383 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Sun, 24 Nov 2024 20:37:45 +0100 Subject: [PATCH 205/386] Unmute 115728 (#117431) This is long fixed by #116264 Fixes #115728 --- muted-tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index edc13f3c47b78..f33ca972b7d36 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -118,9 +118,6 @@ tests: - class: org.elasticsearch.search.SearchServiceTests method: testParseSourceValidation issue: https://github.com/elastic/elasticsearch/issues/115936 -- class: org.elasticsearch.search.query.SearchQueryIT - method: testAllDocsQueryString - issue: https://github.com/elastic/elasticsearch/issues/115728 - class: org.elasticsearch.xpack.application.connector.ConnectorIndexServiceTests issue: https://github.com/elastic/elasticsearch/issues/116087 - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT From e701697eb509029cc23184e430e30378d19ad714 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Sun, 24 Nov 2024 19:39:09 +0000 Subject: [PATCH 206/386] Remove historical features infrastructure (#117043) v9 can only talk to 8.18, and historical features are a maximum of 8.12, so we can remove all historical features and infrastructure. --- .../internal/BuildPluginFuncTest.groovy | 2 +- .../internal/PublishPluginFuncTest.groovy | 2 +- .../BaseInternalPluginBuildPlugin.java | 4 +- .../gradle/internal/BuildPlugin.java | 4 +- ...ava => ClusterFeaturesMetadataPlugin.java} | 12 +- ....java => ClusterFeaturesMetadataTask.java} | 12 +- .../test/rest/RestTestBasePlugin.java | 11 +- distribution/build.gradle | 4 +- .../cluster/ClusterFeatures.java | 2 +- .../elasticsearch/features/FeatureData.java | 69 +---------- .../features/FeatureService.java | 16 +-- .../features/FeatureSpecification.java | 11 -- .../features/FeatureServiceTests.java | 117 ++---------------- .../test/rest/ESRestTestCase.java | 21 +--- .../test/rest/ESRestTestFeatureService.java | 54 ++------ ... => ClusterFeaturesMetadataExtractor.java} | 30 ++--- ...lusterFeaturesMetadataExtractorTests.java} | 19 +-- .../xpack/esql/qa/rest/EsqlSpecTestCase.java | 6 +- .../xpack/esql/action/EsqlCapabilities.java | 3 - 19 files changed, 62 insertions(+), 337 deletions(-) rename build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/{HistoricalFeaturesMetadataPlugin.java => ClusterFeaturesMetadataPlugin.java} (83%) rename build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/{HistoricalFeaturesMetadataTask.java => ClusterFeaturesMetadataTask.java} (81%) rename test/metadata-extractor/src/main/java/org/elasticsearch/extractor/features/{HistoricalFeaturesMetadataExtractor.java => ClusterFeaturesMetadataExtractor.java} (69%) rename test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/{HistoricalFeaturesMetadataExtractorTests.java => ClusterFeaturesMetadataExtractorTests.java} (67%) diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/BuildPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/BuildPluginFuncTest.groovy index 03b044583add0..63bb732d8a11d 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/BuildPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/BuildPluginFuncTest.groovy @@ -119,7 +119,7 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest { noticeFile.set(file("NOTICE")) """ when: - def result = gradleRunner("assemble", "-x", "generateHistoricalFeaturesMetadata").build() + def result = gradleRunner("assemble", "-x", "generateClusterFeaturesMetadata").build() then: result.task(":assemble").outcome == TaskOutcome.SUCCESS file("build/distributions/hello-world.jar").exists() diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy index c7e11ba96c7dd..a199ff9d3eac5 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/PublishPluginFuncTest.groovy @@ -303,7 +303,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest { """ when: - def result = gradleRunner('assemble', '--stacktrace', '-x', 'generateHistoricalFeaturesMetadata').build() + def result = gradleRunner('assemble', '--stacktrace', '-x', 'generateClusterFeaturesMetadata').build() then: result.task(":generatePom").outcome == TaskOutcome.SUCCESS diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java index 49887dac5b6fd..2b79bc2b9173e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java @@ -14,7 +14,7 @@ import org.elasticsearch.gradle.internal.conventions.util.Util; import org.elasticsearch.gradle.internal.info.BuildParameterExtension; import org.elasticsearch.gradle.internal.precommit.JarHellPrecommitPlugin; -import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin; +import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin; import org.elasticsearch.gradle.plugin.PluginBuildPlugin; import org.elasticsearch.gradle.plugin.PluginPropertiesExtension; import org.elasticsearch.gradle.testclusters.ElasticsearchCluster; @@ -38,7 +38,7 @@ public void apply(Project project) { project.getPluginManager().apply(PluginBuildPlugin.class); project.getPluginManager().apply(JarHellPrecommitPlugin.class); project.getPluginManager().apply(ElasticsearchJavaPlugin.class); - project.getPluginManager().apply(HistoricalFeaturesMetadataPlugin.class); + project.getPluginManager().apply(ClusterFeaturesMetadataPlugin.class); boolean isCi = project.getRootProject().getExtensions().getByType(BuildParameterExtension.class).isCi(); // Clear default dependencies added by public PluginBuildPlugin as we add our // own project dependencies for internal builds diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BuildPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BuildPlugin.java index 75984e1bc6998..fb8a9858e24d5 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BuildPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BuildPlugin.java @@ -12,7 +12,7 @@ import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; import org.elasticsearch.gradle.internal.precommit.InternalPrecommitTasks; import org.elasticsearch.gradle.internal.snyk.SnykDependencyMonitoringGradlePlugin; -import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin; +import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin; import org.gradle.api.InvalidUserDataException; import org.gradle.api.Plugin; import org.gradle.api.Project; @@ -63,7 +63,7 @@ public void apply(final Project project) { project.getPluginManager().apply(ElasticsearchJavadocPlugin.class); project.getPluginManager().apply(DependenciesInfoPlugin.class); project.getPluginManager().apply(SnykDependencyMonitoringGradlePlugin.class); - project.getPluginManager().apply(HistoricalFeaturesMetadataPlugin.class); + project.getPluginManager().apply(ClusterFeaturesMetadataPlugin.class); InternalPrecommitTasks.create(project, true); configureLicenseAndNotice(project); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/HistoricalFeaturesMetadataPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ClusterFeaturesMetadataPlugin.java similarity index 83% rename from build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/HistoricalFeaturesMetadataPlugin.java rename to build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ClusterFeaturesMetadataPlugin.java index be972f11d4586..0c8a99fa82398 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/HistoricalFeaturesMetadataPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ClusterFeaturesMetadataPlugin.java @@ -21,10 +21,10 @@ import java.util.Map; /** - * Extracts historical feature metadata into a machine-readable format for use in backward compatibility testing. + * Extracts cluster feature metadata into a machine-readable format for use in backward compatibility testing. */ -public class HistoricalFeaturesMetadataPlugin implements Plugin { - public static final String HISTORICAL_FEATURES_JSON = "historical-features.json"; +public class ClusterFeaturesMetadataPlugin implements Plugin { + public static final String CLUSTER_FEATURES_JSON = "cluster-features.json"; public static final String FEATURES_METADATA_TYPE = "features-metadata-json"; public static final String FEATURES_METADATA_CONFIGURATION = "featuresMetadata"; @@ -40,13 +40,13 @@ public void apply(Project project) { SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class); SourceSet mainSourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME); - TaskProvider generateTask = project.getTasks() - .register("generateHistoricalFeaturesMetadata", HistoricalFeaturesMetadataTask.class, task -> { + TaskProvider generateTask = project.getTasks() + .register("generateClusterFeaturesMetadata", ClusterFeaturesMetadataTask.class, task -> { task.setClasspath( featureMetadataExtractorConfig.plus(mainSourceSet.getRuntimeClasspath()) .plus(project.getConfigurations().getByName(CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME)) ); - task.getOutputFile().convention(project.getLayout().getBuildDirectory().file(HISTORICAL_FEATURES_JSON)); + task.getOutputFile().convention(project.getLayout().getBuildDirectory().file(CLUSTER_FEATURES_JSON)); }); Configuration featuresMetadataArtifactConfig = project.getConfigurations().create(FEATURES_METADATA_CONFIGURATION, c -> { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/HistoricalFeaturesMetadataTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ClusterFeaturesMetadataTask.java similarity index 81% rename from build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/HistoricalFeaturesMetadataTask.java rename to build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ClusterFeaturesMetadataTask.java index a2ea7af210dfd..aa4f90e4d2367 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/HistoricalFeaturesMetadataTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ClusterFeaturesMetadataTask.java @@ -26,7 +26,7 @@ import javax.inject.Inject; @CacheableTask -public abstract class HistoricalFeaturesMetadataTask extends DefaultTask { +public abstract class ClusterFeaturesMetadataTask extends DefaultTask { private FileCollection classpath; @OutputFile @@ -46,30 +46,30 @@ public void setClasspath(FileCollection classpath) { @TaskAction public void execute() { - getWorkerExecutor().noIsolation().submit(HistoricalFeaturesMetadataWorkAction.class, params -> { + getWorkerExecutor().noIsolation().submit(ClusterFeaturesMetadataWorkAction.class, params -> { params.getClasspath().setFrom(getClasspath()); params.getOutputFile().set(getOutputFile()); }); } - public interface HistoricalFeaturesWorkParameters extends WorkParameters { + public interface ClusterFeaturesWorkParameters extends WorkParameters { ConfigurableFileCollection getClasspath(); RegularFileProperty getOutputFile(); } - public abstract static class HistoricalFeaturesMetadataWorkAction implements WorkAction { + public abstract static class ClusterFeaturesMetadataWorkAction implements WorkAction { private final ExecOperations execOperations; @Inject - public HistoricalFeaturesMetadataWorkAction(ExecOperations execOperations) { + public ClusterFeaturesMetadataWorkAction(ExecOperations execOperations) { this.execOperations = execOperations; } @Override public void execute() { LoggedExec.javaexec(execOperations, spec -> { - spec.getMainClass().set("org.elasticsearch.extractor.features.HistoricalFeaturesMetadataExtractor"); + spec.getMainClass().set("org.elasticsearch.extractor.features.ClusterFeaturesMetadataExtractor"); spec.classpath(getParameters().getClasspath()); spec.args(getParameters().getOutputFile().get().getAsFile().getAbsolutePath()); }); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java index b44cfdac69ba7..559c0f60abc08 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java @@ -20,8 +20,8 @@ import org.elasticsearch.gradle.distribution.ElasticsearchDistributionTypes; import org.elasticsearch.gradle.internal.ElasticsearchJavaBasePlugin; import org.elasticsearch.gradle.internal.InternalDistributionDownloadPlugin; +import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin; import org.elasticsearch.gradle.internal.test.ErrorReportingTestListener; -import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin; import org.elasticsearch.gradle.plugin.BasePluginBuildPlugin; import org.elasticsearch.gradle.plugin.PluginBuildPlugin; import org.elasticsearch.gradle.plugin.PluginPropertiesExtension; @@ -116,12 +116,12 @@ public void apply(Project project) { extractedPluginsConfiguration.extendsFrom(pluginsConfiguration); configureArtifactTransforms(project); - // Create configuration for aggregating historical feature metadata + // Create configuration for aggregating feature metadata FileCollection featureMetadataConfig = project.getConfigurations().create(FEATURES_METADATA_CONFIGURATION, c -> { c.setCanBeConsumed(false); c.setCanBeResolved(true); c.attributes( - a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE) + a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ClusterFeaturesMetadataPlugin.FEATURES_METADATA_TYPE) ); c.defaultDependencies(d -> d.add(project.getDependencies().project(Map.of("path", ":server")))); c.withDependencies(dependencies -> { @@ -136,10 +136,7 @@ public void apply(Project project) { c.setCanBeConsumed(false); c.setCanBeResolved(true); c.attributes( - a -> a.attribute( - ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, - HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE - ) + a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ClusterFeaturesMetadataPlugin.FEATURES_METADATA_TYPE) ); c.defaultDependencies( d -> d.add(project.getDependencies().project(Map.of("path", ":distribution", "configuration", "featuresMetadata"))) diff --git a/distribution/build.gradle b/distribution/build.gradle index e3481706ef230..bfbf10ac85e2f 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -14,7 +14,7 @@ import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.ConcatFilesTask import org.elasticsearch.gradle.internal.DependenciesInfoPlugin import org.elasticsearch.gradle.internal.NoticeTask -import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin +import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin import java.nio.file.Files import java.nio.file.Path @@ -33,7 +33,7 @@ configurations { } featuresMetadata { attributes { - attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE) + attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ClusterFeaturesMetadataPlugin.FEATURES_METADATA_TYPE) } } } diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java b/server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java index 57b90454c7e8b..ad285cbd391cd 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java @@ -95,7 +95,7 @@ public Set allNodeFeatures() { /** * {@code true} if {@code feature} is present on all nodes in the cluster. *

    - * NOTE: This should not be used directly, as it does not read historical features. + * NOTE: This should not be used directly. * Please use {@link org.elasticsearch.features.FeatureService#clusterHasFeature} instead. */ @SuppressForbidden(reason = "directly reading cluster features") diff --git a/server/src/main/java/org/elasticsearch/features/FeatureData.java b/server/src/main/java/org/elasticsearch/features/FeatureData.java index 991bb4d82be3d..65b95eae27e06 100644 --- a/server/src/main/java/org/elasticsearch/features/FeatureData.java +++ b/server/src/main/java/org/elasticsearch/features/FeatureData.java @@ -9,25 +9,19 @@ package org.elasticsearch.features; -import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.NavigableMap; import java.util.Set; -import java.util.TreeMap; - -import static org.elasticsearch.features.FeatureService.CLUSTER_FEATURES_ADDED_VERSION; /** - * Reads and consolidate features exposed by a list {@link FeatureSpecification}, grouping them into historical features and node - * features for the consumption of {@link FeatureService} + * Reads and consolidate features exposed by a list {@link FeatureSpecification}, + * grouping them together for the consumption of {@link FeatureService} */ public class FeatureData { @@ -40,19 +34,14 @@ public class FeatureData { } } - private final NavigableMap> historicalFeatures; private final Map nodeFeatures; - private FeatureData(NavigableMap> historicalFeatures, Map nodeFeatures) { - this.historicalFeatures = historicalFeatures; + private FeatureData(Map nodeFeatures) { this.nodeFeatures = nodeFeatures; } public static FeatureData createFromSpecifications(List specs) { Map allFeatures = new HashMap<>(); - - // Initialize historicalFeatures with empty version to guarantee there's a floor entry for every version - NavigableMap> historicalFeatures = new TreeMap<>(Map.of(Version.V_EMPTY, Set.of())); Map nodeFeatures = new HashMap<>(); for (FeatureSpecification spec : specs) { Set specFeatures = spec.getFeatures(); @@ -61,39 +50,6 @@ public static FeatureData createFromSpecifications(List new HashSet<>()).add(hfe.getKey().id()); - } - for (NodeFeature f : specFeatures) { FeatureSpecification existing = allFeatures.putIfAbsent(f.id(), spec); if (existing != null && existing.getClass() != spec.getClass()) { @@ -106,24 +62,7 @@ public static FeatureData createFromSpecifications(List> consolidateHistoricalFeatures( - NavigableMap> declaredHistoricalFeatures - ) { - // update each version by adding in all features from previous versions - Set featureAggregator = new HashSet<>(); - for (Map.Entry> versions : declaredHistoricalFeatures.entrySet()) { - featureAggregator.addAll(versions.getValue()); - versions.setValue(Set.copyOf(featureAggregator)); - } - - return Collections.unmodifiableNavigableMap(declaredHistoricalFeatures); - } - - public NavigableMap> getHistoricalFeatures() { - return historicalFeatures; + return new FeatureData(Map.copyOf(nodeFeatures)); } public Map getNodeFeatures() { diff --git a/server/src/main/java/org/elasticsearch/features/FeatureService.java b/server/src/main/java/org/elasticsearch/features/FeatureService.java index 1d911a75a4838..9a0ac7cafc183 100644 --- a/server/src/main/java/org/elasticsearch/features/FeatureService.java +++ b/server/src/main/java/org/elasticsearch/features/FeatureService.java @@ -9,7 +9,6 @@ package org.elasticsearch.features; -import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.logging.LogManager; @@ -17,8 +16,6 @@ import java.util.List; import java.util.Map; -import java.util.NavigableMap; -import java.util.Set; /** * Manages information on the features supported by nodes in the cluster. @@ -34,9 +31,6 @@ public class FeatureService { private static final Logger logger = LogManager.getLogger(FeatureService.class); - public static final Version CLUSTER_FEATURES_ADDED_VERSION = Version.V_8_12_0; - - private final NavigableMap> historicalFeatures; private final Map nodeFeatures; /** @@ -47,13 +41,12 @@ public FeatureService(List specs) { var featureData = FeatureData.createFromSpecifications(specs); nodeFeatures = featureData.getNodeFeatures(); - historicalFeatures = featureData.getHistoricalFeatures(); logger.info("Registered local node features {}", nodeFeatures.keySet().stream().sorted().toList()); } /** - * The non-historical features supported by this node. + * The features supported by this node. * @return Map of {@code feature-id} to its declaring {@code NodeFeature} object. */ public Map getNodeFeatures() { @@ -65,11 +58,6 @@ public Map getNodeFeatures() { */ @SuppressForbidden(reason = "We need basic feature information from cluster state") public boolean clusterHasFeature(ClusterState state, NodeFeature feature) { - if (state.clusterFeatures().clusterHasFeature(feature)) { - return true; - } - - var features = historicalFeatures.floorEntry(state.getNodes().getMinNodeVersion()); - return features != null && features.getValue().contains(feature.id()); + return state.clusterFeatures().clusterHasFeature(feature); } } diff --git a/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java b/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java index 03f0dd89f172e..c37bc4488f109 100644 --- a/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java +++ b/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java @@ -9,9 +9,6 @@ package org.elasticsearch.features; -import org.elasticsearch.Version; - -import java.util.Map; import java.util.Set; /** @@ -49,12 +46,4 @@ default Set getFeatures() { default Set getTestFeatures() { return Set.of(); } - - /** - * Returns information on historical features that should be deemed to be present on all nodes - * on or above the {@link Version} specified. - */ - default Map getHistoricalFeatures() { - return Map.of(); - } } diff --git a/server/src/test/java/org/elasticsearch/features/FeatureServiceTests.java b/server/src/test/java/org/elasticsearch/features/FeatureServiceTests.java index e103704c89649..874a6a96313e4 100644 --- a/server/src/test/java/org/elasticsearch/features/FeatureServiceTests.java +++ b/server/src/test/java/org/elasticsearch/features/FeatureServiceTests.java @@ -9,15 +9,9 @@ package org.elasticsearch.features; -import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.DiscoveryNodeUtils; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.VersionUtils; import java.util.List; import java.util.Map; @@ -30,79 +24,36 @@ public class FeatureServiceTests extends ESTestCase { private static class TestFeatureSpecification implements FeatureSpecification { private final Set features; - private final Map historicalFeatures; - private TestFeatureSpecification(Set features, Map historicalFeatures) { + private TestFeatureSpecification(Set features) { this.features = features; - this.historicalFeatures = historicalFeatures; } @Override public Set getFeatures() { return features; } - - @Override - public Map getHistoricalFeatures() { - return historicalFeatures; - } } public void testFailsDuplicateFeatures() { // these all need to be separate classes to trigger the exception - FeatureSpecification fs1 = new TestFeatureSpecification(Set.of(new NodeFeature("f1")), Map.of()) { - }; - FeatureSpecification fs2 = new TestFeatureSpecification(Set.of(new NodeFeature("f1")), Map.of()) { - }; - FeatureSpecification hfs1 = new TestFeatureSpecification(Set.of(), Map.of(new NodeFeature("f1"), Version.V_8_11_0)) { + FeatureSpecification fs1 = new TestFeatureSpecification(Set.of(new NodeFeature("f1"))) { }; - FeatureSpecification hfs2 = new TestFeatureSpecification(Set.of(), Map.of(new NodeFeature("f1"), Version.V_8_11_0)) { + FeatureSpecification fs2 = new TestFeatureSpecification(Set.of(new NodeFeature("f1"))) { }; assertThat( expectThrows(IllegalArgumentException.class, () -> new FeatureService(List.of(fs1, fs2))).getMessage(), containsString("Duplicate feature") ); - assertThat( - expectThrows(IllegalArgumentException.class, () -> new FeatureService(List.of(hfs1, hfs2))).getMessage(), - containsString("Duplicate feature") - ); - assertThat( - expectThrows(IllegalArgumentException.class, () -> new FeatureService(List.of(fs1, hfs1))).getMessage(), - containsString("Duplicate feature") - ); - } - - public void testFailsNonHistoricalVersion() { - FeatureSpecification fs = new TestFeatureSpecification( - Set.of(), - Map.of(new NodeFeature("f1"), Version.fromId(FeatureService.CLUSTER_FEATURES_ADDED_VERSION.id + 1)) - ); - - assertThat( - expectThrows(IllegalArgumentException.class, () -> new FeatureService(List.of(fs))).getMessage(), - containsString("not a historical version") - ); - } - - public void testFailsSameRegularAndHistoricalFeature() { - FeatureSpecification fs = new TestFeatureSpecification( - Set.of(new NodeFeature("f1")), - Map.of(new NodeFeature("f1"), Version.V_8_12_0) - ); - - assertThat( - expectThrows(IllegalArgumentException.class, () -> new FeatureService(List.of(fs))).getMessage(), - containsString("cannot be declared as both a regular and historical feature") - ); } public void testGetNodeFeaturesCombinesAllSpecs() { List specs = List.of( - new TestFeatureSpecification(Set.of(new NodeFeature("f1"), new NodeFeature("f2")), Map.of()), - new TestFeatureSpecification(Set.of(new NodeFeature("f3")), Map.of()), - new TestFeatureSpecification(Set.of(new NodeFeature("f4"), new NodeFeature("f5")), Map.of()), - new TestFeatureSpecification(Set.of(), Map.of()) + new TestFeatureSpecification(Set.of(new NodeFeature("f1"), new NodeFeature("f2"))), + new TestFeatureSpecification(Set.of(new NodeFeature("f3"))), + new TestFeatureSpecification(Set.of(new NodeFeature("f4"), new NodeFeature("f5"))), + new TestFeatureSpecification(Set.of()) ); FeatureService service = new FeatureService(specs); @@ -111,10 +62,10 @@ public void testGetNodeFeaturesCombinesAllSpecs() { public void testStateHasFeatures() { List specs = List.of( - new TestFeatureSpecification(Set.of(new NodeFeature("f1"), new NodeFeature("f2")), Map.of()), - new TestFeatureSpecification(Set.of(new NodeFeature("f3")), Map.of()), - new TestFeatureSpecification(Set.of(new NodeFeature("f4"), new NodeFeature("f5")), Map.of()), - new TestFeatureSpecification(Set.of(), Map.of()) + new TestFeatureSpecification(Set.of(new NodeFeature("f1"), new NodeFeature("f2"))), + new TestFeatureSpecification(Set.of(new NodeFeature("f3"))), + new TestFeatureSpecification(Set.of(new NodeFeature("f4"), new NodeFeature("f5"))), + new TestFeatureSpecification(Set.of()) ); ClusterState state = ClusterState.builder(ClusterName.DEFAULT) @@ -130,50 +81,4 @@ public void testStateHasFeatures() { assertFalse(service.clusterHasFeature(state, new NodeFeature("nf2"))); assertFalse(service.clusterHasFeature(state, new NodeFeature("nf3"))); } - - private static ClusterState stateWithMinVersion(Version version) { - DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(); - nodes.add(DiscoveryNodeUtils.builder("node").version(version, IndexVersions.ZERO, IndexVersion.current()).build()); - for (int n = randomInt(5); n >= 0; n--) { - nodes.add( - DiscoveryNodeUtils.builder("node" + n) - .version( - VersionUtils.randomVersionBetween(random(), version, Version.CURRENT), - IndexVersions.ZERO, - IndexVersion.current() - ) - .build() - ); - } - - return ClusterState.builder(ClusterName.DEFAULT).nodes(nodes).build(); - } - - public void testStateHasHistoricalFeatures() { - NodeFeature v8_11_0 = new NodeFeature("hf_8.11.0"); - NodeFeature v8_10_0 = new NodeFeature("hf_8.10.0"); - NodeFeature v7_17_0 = new NodeFeature("hf_7.17.0"); - List specs = List.of( - new TestFeatureSpecification(Set.of(), Map.of(v8_11_0, Version.V_8_11_0)), - new TestFeatureSpecification(Set.of(), Map.of(v8_10_0, Version.V_8_10_0)), - new TestFeatureSpecification(Set.of(), Map.of(v7_17_0, Version.V_7_17_0)) - ); - - FeatureService service = new FeatureService(specs); - assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_8_11_0), v8_11_0)); - assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_8_11_0), v8_10_0)); - assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_8_11_0), v7_17_0)); - - assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_8_10_0), v8_11_0)); - assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_8_10_0), v8_10_0)); - assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_8_10_0), v7_17_0)); - - assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_7_17_0), v8_11_0)); - assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_7_17_0), v8_10_0)); - assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_7_17_0), v7_17_0)); - - assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_7_16_0), v8_11_0)); - assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_7_16_0), v8_10_0)); - assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_7_16_0), v7_17_0)); - } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index a4195a07e7621..8ca9c0709b359 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -63,7 +63,6 @@ import org.elasticsearch.core.PathUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.health.node.selection.HealthNode; import org.elasticsearch.index.IndexSettings; @@ -398,29 +397,11 @@ public void initClient() throws IOException { assert nodesVersions != null; } - /** - * Override to provide additional test-only historical features. - * - * Note: This extension point cannot be used to add cluster features. The provided {@link FeatureSpecification}s - * must contain only historical features, otherwise an assertion error is thrown. - */ - protected List additionalTestOnlyHistoricalFeatures() { - return List.of(); - } - protected final TestFeatureService createTestFeatureService( Map> clusterStateFeatures, Set semanticNodeVersions ) { - // Historical features information is unavailable when using legacy test plugins - if (ESRestTestFeatureService.hasFeatureMetadata() == false) { - logger.warn( - "This test is running on the legacy test framework; historical features from production code will not be available. " - + "You need to port the test to the new test plugins in order to use historical features from production code. " - + "If this is a legacy feature used only in tests, you can add it to a test-only FeatureSpecification." - ); - } - return new ESRestTestFeatureService(additionalTestOnlyHistoricalFeatures(), semanticNodeVersions, clusterStateFeatures.values()); + return new ESRestTestFeatureService(semanticNodeVersions, clusterStateFeatures.values()); } protected static boolean has(ProductFeature feature) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java index cd3406e7ddac5..9054dc6f94182 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java @@ -13,9 +13,6 @@ import org.elasticsearch.core.PathUtils; import org.elasticsearch.core.Strings; import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.features.FeatureData; -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.json.JsonXContent; @@ -25,13 +22,9 @@ import java.io.InputStream; import java.io.UncheckedIOException; import java.nio.file.Files; -import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; -import java.util.List; -import java.util.Map; import java.util.Set; import java.util.function.BiConsumer; import java.util.function.Predicate; @@ -48,33 +41,12 @@ class ESRestTestFeatureService implements TestFeatureService { */ private static final Pattern VERSION_FEATURE_PATTERN = Pattern.compile("gte_v(\\d+\\.\\d+\\.\\d+)"); - private final Set knownHistoricalFeatureNames; private final Collection nodeVersions; private final Collection> nodeFeatures; - private final Collection> nodeHistoricalFeatures; - ESRestTestFeatureService(List featureSpecs, Set nodeVersions, Collection> nodeFeatures) { - List specs = new ArrayList<>(featureSpecs); - if (MetadataHolder.HISTORICAL_FEATURES != null) { - specs.add(MetadataHolder.HISTORICAL_FEATURES); - } - FeatureData featureData = FeatureData.createFromSpecifications(specs); - assert featureData.getNodeFeatures().isEmpty() - : Strings.format( - "Only historical features can be injected via ESRestTestCase#additionalTestOnlyHistoricalFeatures(), rejecting %s", - featureData.getNodeFeatures().keySet() - ); - this.knownHistoricalFeatureNames = featureData.getHistoricalFeatures().lastEntry().getValue(); + ESRestTestFeatureService(Set nodeVersions, Collection> nodeFeatures) { this.nodeVersions = nodeVersions; this.nodeFeatures = nodeFeatures; - this.nodeHistoricalFeatures = nodeVersions.stream() - .map(featureData.getHistoricalFeatures()::floorEntry) - .map(Map.Entry::getValue) - .toList(); - } - - public static boolean hasFeatureMetadata() { - return MetadataHolder.HISTORICAL_FEATURES != null; } private static boolean checkCollection(Collection coll, Predicate pred, boolean any) { @@ -83,11 +55,10 @@ private static boolean checkCollection(Collection coll, Predicate pred @Override public boolean clusterHasFeature(String featureId, boolean any) { - if (checkCollection(nodeFeatures, s -> s.contains(featureId), any) - || checkCollection(nodeHistoricalFeatures, s -> s.contains(featureId), any)) { + if (checkCollection(nodeFeatures, s -> s.contains(featureId), any)) { return true; } - if (MetadataHolder.FEATURE_NAMES.contains(featureId) || knownHistoricalFeatureNames.contains(featureId)) { + if (MetadataHolder.FEATURE_NAMES.contains(featureId)) { return false; // feature known but not present } @@ -131,24 +102,20 @@ public boolean clusterHasFeature(String featureId, boolean any) { return false; } + public static boolean hasFeatureMetadata() { + return MetadataHolder.FEATURE_NAMES.isEmpty() == false; + } + private static class MetadataHolder { - private static final FeatureSpecification HISTORICAL_FEATURES; private static final Set FEATURE_NAMES; static { String metadataPath = System.getProperty("tests.features.metadata.path"); if (metadataPath == null) { FEATURE_NAMES = emptySet(); - HISTORICAL_FEATURES = null; } else { Set featureNames = new HashSet<>(); - Map historicalFeatures = new HashMap<>(); loadFeatureMetadata(metadataPath, (key, value) -> { - if (key.equals("historical_features") && value instanceof Map map) { - for (var entry : map.entrySet()) { - historicalFeatures.put(new NodeFeature((String) entry.getKey()), Version.fromString((String) entry.getValue())); - } - } if (key.equals("feature_names") && value instanceof Collection collection) { for (var entry : collection) { featureNames.add((String) entry); @@ -156,13 +123,6 @@ private static class MetadataHolder { } }); FEATURE_NAMES = Collections.unmodifiableSet(featureNames); - Map unmodifiableHistoricalFeatures = Collections.unmodifiableMap(historicalFeatures); - HISTORICAL_FEATURES = new FeatureSpecification() { - @Override - public Map getHistoricalFeatures() { - return unmodifiableHistoricalFeatures; - } - }; } } diff --git a/test/metadata-extractor/src/main/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractor.java b/test/metadata-extractor/src/main/java/org/elasticsearch/extractor/features/ClusterFeaturesMetadataExtractor.java similarity index 69% rename from test/metadata-extractor/src/main/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractor.java rename to test/metadata-extractor/src/main/java/org/elasticsearch/extractor/features/ClusterFeaturesMetadataExtractor.java index 3ffa27126fac8..3a090a1b3fadc 100644 --- a/test/metadata-extractor/src/main/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractor.java +++ b/test/metadata-extractor/src/main/java/org/elasticsearch/extractor/features/ClusterFeaturesMetadataExtractor.java @@ -9,9 +9,8 @@ package org.elasticsearch.extractor.features; -import org.elasticsearch.Version; -import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.logging.LogConfigurator; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xcontent.XContentGenerator; @@ -25,14 +24,12 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; -import java.util.HashMap; import java.util.HashSet; -import java.util.Map; import java.util.ServiceLoader; import java.util.Set; import java.util.stream.Stream; -public class HistoricalFeaturesMetadataExtractor { +public class ClusterFeaturesMetadataExtractor { private final ClassLoader classLoader; static { @@ -40,7 +37,7 @@ public class HistoricalFeaturesMetadataExtractor { LogConfigurator.configureESLogging(); } - public HistoricalFeaturesMetadataExtractor(ClassLoader classLoader) { + public ClusterFeaturesMetadataExtractor(ClassLoader classLoader) { this.classLoader = classLoader; } @@ -56,9 +53,7 @@ public static void main(String[] args) { printUsageAndExit(); } - new HistoricalFeaturesMetadataExtractor(HistoricalFeaturesMetadataExtractor.class.getClassLoader()).generateMetadataFile( - outputFile - ); + new ClusterFeaturesMetadataExtractor(ClusterFeaturesMetadataExtractor.class.getClassLoader()).generateMetadataFile(outputFile); } public void generateMetadataFile(Path outputFile) { @@ -67,13 +62,7 @@ public void generateMetadataFile(Path outputFile) { XContentGenerator generator = JsonXContent.jsonXContent.createGenerator(os) ) { generator.writeStartObject(); - extractHistoricalFeatureMetadata((historical, names) -> { - generator.writeFieldName("historical_features"); - generator.writeStartObject(); - for (Map.Entry entry : historical.entrySet()) { - generator.writeStringField(entry.getKey().id(), entry.getValue().toString()); - } - generator.writeEndObject(); + extractClusterFeaturesMetadata(names -> { generator.writeFieldName("feature_names"); generator.writeStartArray(); for (var entry : names) { @@ -87,22 +76,19 @@ public void generateMetadataFile(Path outputFile) { } } - void extractHistoricalFeatureMetadata(CheckedBiConsumer, Set, IOException> metadataConsumer) - throws IOException { - Map historicalFeatures = new HashMap<>(); + void extractClusterFeaturesMetadata(CheckedConsumer, IOException> metadataConsumer) throws IOException { Set featureNames = new HashSet<>(); ServiceLoader featureSpecLoader = ServiceLoader.load(FeatureSpecification.class, classLoader); for (FeatureSpecification featureSpecification : featureSpecLoader) { - historicalFeatures.putAll(featureSpecification.getHistoricalFeatures()); Stream.concat(featureSpecification.getFeatures().stream(), featureSpecification.getTestFeatures().stream()) .map(NodeFeature::id) .forEach(featureNames::add); } - metadataConsumer.accept(historicalFeatures, featureNames); + metadataConsumer.accept(featureNames); } private static void printUsageAndExit() { - System.err.println("Usage: HistoricalFeaturesMetadataExtractor "); + System.err.println("Usage: ClusterFeaturesMetadataExtractor "); System.exit(1); } } diff --git a/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java b/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/ClusterFeaturesMetadataExtractorTests.java similarity index 67% rename from test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java rename to test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/ClusterFeaturesMetadataExtractorTests.java index d810f17ae552e..af69aaff86cc5 100644 --- a/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java +++ b/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/ClusterFeaturesMetadataExtractorTests.java @@ -9,8 +9,6 @@ package org.elasticsearch.extractor.features; -import org.elasticsearch.Version; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.json.JsonXContent; @@ -21,7 +19,6 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Collection; -import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -29,25 +26,19 @@ import static org.elasticsearch.xcontent.XContentParserConfiguration.EMPTY; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.not; -public class HistoricalFeaturesMetadataExtractorTests extends ESTestCase { +public class ClusterFeaturesMetadataExtractorTests extends ESTestCase { @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder(); public void testExtractHistoricalMetadata() throws IOException { - HistoricalFeaturesMetadataExtractor extractor = new HistoricalFeaturesMetadataExtractor(this.getClass().getClassLoader()); - Map nodeFeatureVersionMap = new HashMap<>(); + ClusterFeaturesMetadataExtractor extractor = new ClusterFeaturesMetadataExtractor(this.getClass().getClassLoader()); Set featureNamesSet = new HashSet<>(); - extractor.extractHistoricalFeatureMetadata((historical, names) -> { - nodeFeatureVersionMap.putAll(historical); - featureNamesSet.addAll(names); - }); - // assertThat(nodeFeatureVersionMap, not(anEmptyMap())); + extractor.extractClusterFeaturesMetadata(featureNamesSet::addAll); assertThat(featureNamesSet, not(empty())); assertThat(featureNamesSet, hasItem("test_features_enabled")); @@ -55,11 +46,7 @@ public void testExtractHistoricalMetadata() throws IOException { extractor.generateMetadataFile(outputFile); try (XContentParser parser = JsonXContent.jsonXContent.createParser(EMPTY, Files.newInputStream(outputFile))) { Map parsedMap = parser.map(); - assertThat(parsedMap, hasKey("historical_features")); assertThat(parsedMap, hasKey("feature_names")); - @SuppressWarnings("unchecked") - Map historicalFeaturesMap = (Map) (parsedMap.get("historical_features")); - nodeFeatureVersionMap.forEach((key, value) -> assertThat(historicalFeaturesMap, hasEntry(key.id(), value.toString()))); @SuppressWarnings("unchecked") Collection featureNamesList = (Collection) (parsedMap.get("feature_names")); diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 6ebf05755ef5e..265d9f7bd8cd5 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -51,7 +51,6 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.LongStream; -import java.util.stream.Stream; import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude; import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude; @@ -207,10 +206,7 @@ protected static void checkCapabilities(RestClient client, TestFeatureService te } } - var features = Stream.concat( - new EsqlFeatures().getFeatures().stream(), - new EsqlFeatures().getHistoricalFeatures().keySet().stream() - ).map(NodeFeature::id).collect(Collectors.toSet()); + var features = new EsqlFeatures().getFeatures().stream().map(NodeFeature::id).collect(Collectors.toSet()); for (String feature : testCase.requiredCapabilities) { var esqlFeature = "esql." + feature; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index a186b784e95fb..d675f772b5a3b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -567,9 +567,6 @@ public static Set capabilities(boolean all) { for (NodeFeature feature : new EsqlFeatures().getFeatures()) { caps.add(cap(feature)); } - for (NodeFeature feature : new EsqlFeatures().getHistoricalFeatures().keySet()) { - caps.add(cap(feature)); - } return Set.copyOf(caps); } From d22a946fa736f4b08b9e0ec655afff4c5446c4d4 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Sun, 24 Nov 2024 17:39:20 -0800 Subject: [PATCH 207/386] Fix testCancelRequestWhenFailingFetchingPages (#117437) Each data-node request involves two exchange sinks: an external one for fetching pages from the coordinator and an internal one for node-level reduction. Currently, the test selects one of these sinks randomly, leading to assertion failures. This update ensures the test consistently selects the external exchange sink. Closes #117397 --- muted-tests.yml | 3 --- .../xpack/esql/action/EsqlActionTaskIT.java | 24 +++++++++++++------ 2 files changed, 17 insertions(+), 10 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index f33ca972b7d36..0d2e6b991a5c3 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -229,9 +229,6 @@ tests: - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testInferDeploysDefaultElser issue: https://github.com/elastic/elasticsearch/issues/114913 -- class: org.elasticsearch.xpack.esql.action.EsqlActionTaskIT - method: testCancelRequestWhenFailingFetchingPages - issue: https://github.com/elastic/elasticsearch/issues/117397 - class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT method: testEveryActionIsEitherOperatorOnlyOrNonOperator issue: https://github.com/elastic/elasticsearch/issues/102992 diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index 460ab0f5b8b38..56453a291ea81 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -392,12 +392,13 @@ protected void doRun() throws Exception { .get(); ensureYellowAndNoInitializingShards("test"); request.query("FROM test | LIMIT 10"); - request.pragmas(randomPragmas()); + QueryPragmas pragmas = randomPragmas(); + request.pragmas(pragmas); PlainActionFuture future = new PlainActionFuture<>(); client.execute(EsqlQueryAction.INSTANCE, request, future); ExchangeService exchangeService = internalCluster().getInstance(ExchangeService.class, dataNode); - boolean waitedForPages; - final String sessionId; + final boolean waitedForPages; + final String exchangeId; try { List foundTasks = new ArrayList<>(); assertBusy(() -> { @@ -411,13 +412,22 @@ protected void doRun() throws Exception { assertThat(tasks, hasSize(1)); foundTasks.addAll(tasks); }); - sessionId = foundTasks.get(0).taskId().toString(); + final String sessionId = foundTasks.get(0).taskId().toString(); assertTrue(fetchingStarted.await(1, TimeUnit.MINUTES)); - String exchangeId = exchangeService.sinkKeys().stream().filter(s -> s.startsWith(sessionId)).findFirst().get(); + List sinkKeys = exchangeService.sinkKeys() + .stream() + .filter( + s -> s.startsWith(sessionId) + // exclude the node-level reduction sink + && s.endsWith("[n]") == false + ) + .toList(); + assertThat(sinkKeys.toString(), sinkKeys.size(), equalTo(1)); + exchangeId = sinkKeys.get(0); ExchangeSinkHandler exchangeSink = exchangeService.getSinkHandler(exchangeId); waitedForPages = randomBoolean(); if (waitedForPages) { - // do not fail exchange requests until we have some pages + // do not fail exchange requests until we have some pages. assertBusy(() -> assertThat(exchangeSink.bufferSize(), greaterThan(0))); } } finally { @@ -429,7 +439,7 @@ protected void doRun() throws Exception { // As a result, the exchange sinks on data-nodes won't be removed until the inactive_timeout elapses, which is // longer than the assertBusy timeout. if (waitedForPages == false) { - exchangeService.finishSinkHandler(sessionId, failure); + exchangeService.finishSinkHandler(exchangeId, failure); } } finally { transportService.clearAllRules(); From 2f8bb0b23ce6070335fb750d9e76265f558ea3a9 Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Mon, 25 Nov 2024 11:43:36 +0400 Subject: [PATCH 208/386] Add missing async_search query parameters to rest-api-spec (#117312) --- docs/changelog/117312.yaml | 5 +++++ .../rest-api-spec/api/async_search.submit.json | 15 +++++++++++++++ 2 files changed, 20 insertions(+) create mode 100644 docs/changelog/117312.yaml diff --git a/docs/changelog/117312.yaml b/docs/changelog/117312.yaml new file mode 100644 index 0000000000000..302b91388ef2b --- /dev/null +++ b/docs/changelog/117312.yaml @@ -0,0 +1,5 @@ +pr: 117312 +summary: Add missing `async_search` query parameters to rest-api-spec +area: Search +type: bug +issues: [] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.submit.json b/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.submit.json index 5cd2b0e26459e..a7a7ebe838eab 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.submit.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.submit.json @@ -65,6 +65,11 @@ "type":"boolean", "description":"Specify whether wildcard and prefix queries should be analyzed (default: false)" }, + "ccs_minimize_roundtrips":{ + "type":"boolean", + "default":false, + "description":"When doing a cross-cluster search, setting it to true may improve overall search latency, particularly when searching clusters with a large number of shards. However, when set to true, the progress of searches on the remote clusters will not be received until the search finishes on all clusters." + }, "default_operator":{ "type":"enum", "options":[ @@ -126,6 +131,16 @@ "type":"string", "description":"Specify the node or shard the operation should be performed on (default: random)" }, + "pre_filter_shard_size":{ + "type":"number", + "default": 1, + "description":"Cannot be changed: this is to enforce the execution of a pre-filter roundtrip to retrieve statistics from each shard so that the ones that surely don’t hold any document matching the query get skipped." + }, + "rest_total_hits_as_int":{ + "type":"boolean", + "description":"Indicates whether hits.total should be rendered as an integer or an object in the rest search response", + "default":false + }, "q":{ "type":"string", "description":"Query in the Lucene query string syntax" From b0c49766f6a2301f8938629bfdadf76459329b8d Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 25 Nov 2024 08:01:21 +0000 Subject: [PATCH 209/386] Extract IMDS test fixture from S3 fixture (#117324) The S3 and IMDS services are separate things in practice, we shouldn't be conflating them as we do today. This commit introduces a new independent test fixture just for the IMDS endpoint and migrates the relevant tests to use it. Relates ES-9984 --- modules/repository-s3/build.gradle | 1 + .../s3/RepositoryS3ClientYamlTestSuiteIT.java | 42 +++- .../RepositoryS3EcsClientYamlTestSuiteIT.java | 30 ++- settings.gradle | 1 + test/fixtures/ec2-imds-fixture/build.gradle | 19 ++ .../fixture/aws/imds/Ec2ImdsHttpFixture.java | 66 ++++++ .../fixture/aws/imds/Ec2ImdsHttpHandler.java | 98 +++++++++ .../aws/imds/Ec2ImdsHttpHandlerTests.java | 188 ++++++++++++++++++ .../java/fixture/s3/S3HttpFixtureWithEC2.java | 84 -------- .../java/fixture/s3/S3HttpFixtureWithECS.java | 48 ----- .../s3/S3HttpFixtureWithSessionToken.java | 12 +- 11 files changed, 433 insertions(+), 156 deletions(-) create mode 100644 test/fixtures/ec2-imds-fixture/build.gradle create mode 100644 test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpFixture.java create mode 100644 test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java create mode 100644 test/fixtures/ec2-imds-fixture/src/test/java/fixture/aws/imds/Ec2ImdsHttpHandlerTests.java delete mode 100644 test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixtureWithEC2.java delete mode 100644 test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixtureWithECS.java diff --git a/modules/repository-s3/build.gradle b/modules/repository-s3/build.gradle index 1301d17606d63..9a7f0a5994d73 100644 --- a/modules/repository-s3/build.gradle +++ b/modules/repository-s3/build.gradle @@ -45,6 +45,7 @@ dependencies { testImplementation project(':test:fixtures:s3-fixture') yamlRestTestImplementation project(":test:framework") yamlRestTestImplementation project(':test:fixtures:s3-fixture') + yamlRestTestImplementation project(':test:fixtures:ec2-imds-fixture') yamlRestTestImplementation project(':test:fixtures:minio-fixture') internalClusterTestImplementation project(':test:fixtures:minio-fixture') diff --git a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java b/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java index 0ae8af0989fa6..64cb3c3fd3a69 100644 --- a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java +++ b/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java @@ -9,8 +9,8 @@ package org.elasticsearch.repositories.s3; +import fixture.aws.imds.Ec2ImdsHttpFixture; import fixture.s3.S3HttpFixture; -import fixture.s3.S3HttpFixtureWithEC2; import fixture.s3.S3HttpFixtureWithSessionToken; import com.carrotsearch.randomizedtesting.annotations.Name; @@ -18,6 +18,7 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; +import org.elasticsearch.cluster.routing.Murmur3HashFunction; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; @@ -25,15 +26,34 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; +import java.util.Set; + @ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) @ThreadLeakScope(ThreadLeakScope.Scope.NONE) // https://github.com/elastic/elasticsearch/issues/102482 public class RepositoryS3ClientYamlTestSuiteIT extends AbstractRepositoryS3ClientYamlTestSuiteIT { - public static final S3HttpFixture s3Fixture = new S3HttpFixture(); - public static final S3HttpFixtureWithSessionToken s3HttpFixtureWithSessionToken = new S3HttpFixtureWithSessionToken(); - public static final S3HttpFixtureWithEC2 s3Ec2 = new S3HttpFixtureWithEC2(); + private static final String HASHED_SEED = Integer.toString(Murmur3HashFunction.hash(System.getProperty("tests.seed"))); + private static final String TEMPORARY_SESSION_TOKEN = "session_token-" + HASHED_SEED; + private static final String IMDS_ACCESS_KEY = "imds-access-key-" + HASHED_SEED; + private static final String IMDS_SESSION_TOKEN = "imds-session-token-" + HASHED_SEED; + + private static final S3HttpFixture s3Fixture = new S3HttpFixture(); + + private static final S3HttpFixtureWithSessionToken s3HttpFixtureWithSessionToken = new S3HttpFixtureWithSessionToken( + "session_token_bucket", + "session_token_base_path_integration_tests", + System.getProperty("s3TemporaryAccessKey"), + TEMPORARY_SESSION_TOKEN + ); + + private static final S3HttpFixtureWithSessionToken s3HttpFixtureWithImdsSessionToken = new S3HttpFixtureWithSessionToken( + "ec2_bucket", + "ec2_base_path", + IMDS_ACCESS_KEY, + IMDS_SESSION_TOKEN + ); - private static final String s3TemporarySessionToken = "session_token"; + private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture(IMDS_ACCESS_KEY, IMDS_SESSION_TOKEN, Set.of()); public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .module("repository-s3") @@ -41,15 +61,19 @@ public class RepositoryS3ClientYamlTestSuiteIT extends AbstractRepositoryS3Clien .keystore("s3.client.integration_test_permanent.secret_key", System.getProperty("s3PermanentSecretKey")) .keystore("s3.client.integration_test_temporary.access_key", System.getProperty("s3TemporaryAccessKey")) .keystore("s3.client.integration_test_temporary.secret_key", System.getProperty("s3TemporarySecretKey")) - .keystore("s3.client.integration_test_temporary.session_token", s3TemporarySessionToken) + .keystore("s3.client.integration_test_temporary.session_token", TEMPORARY_SESSION_TOKEN) .setting("s3.client.integration_test_permanent.endpoint", s3Fixture::getAddress) .setting("s3.client.integration_test_temporary.endpoint", s3HttpFixtureWithSessionToken::getAddress) - .setting("s3.client.integration_test_ec2.endpoint", s3Ec2::getAddress) - .systemProperty("com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", s3Ec2::getAddress) + .setting("s3.client.integration_test_ec2.endpoint", s3HttpFixtureWithImdsSessionToken::getAddress) + .systemProperty("com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", ec2ImdsHttpFixture::getAddress) .build(); @ClassRule - public static TestRule ruleChain = RuleChain.outerRule(s3Fixture).around(s3Ec2).around(s3HttpFixtureWithSessionToken).around(cluster); + public static TestRule ruleChain = RuleChain.outerRule(s3Fixture) + .around(s3HttpFixtureWithSessionToken) + .around(s3HttpFixtureWithImdsSessionToken) + .around(ec2ImdsHttpFixture) + .around(cluster); @ParametersFactory public static Iterable parameters() throws Exception { diff --git a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsClientYamlTestSuiteIT.java b/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsClientYamlTestSuiteIT.java index fa21797540c17..a522c9b17145b 100644 --- a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsClientYamlTestSuiteIT.java +++ b/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsClientYamlTestSuiteIT.java @@ -9,28 +9,48 @@ package org.elasticsearch.repositories.s3; -import fixture.s3.S3HttpFixtureWithECS; +import fixture.aws.imds.Ec2ImdsHttpFixture; +import fixture.s3.S3HttpFixtureWithSessionToken; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.cluster.routing.Murmur3HashFunction; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.junit.ClassRule; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; +import java.util.Set; + public class RepositoryS3EcsClientYamlTestSuiteIT extends AbstractRepositoryS3ClientYamlTestSuiteIT { - private static final S3HttpFixtureWithECS s3Ecs = new S3HttpFixtureWithECS(); + + private static final String HASHED_SEED = Integer.toString(Murmur3HashFunction.hash(System.getProperty("tests.seed"))); + private static final String ECS_ACCESS_KEY = "ecs-access-key-" + HASHED_SEED; + private static final String ECS_SESSION_TOKEN = "ecs-session-token-" + HASHED_SEED; + + private static final S3HttpFixtureWithSessionToken s3Fixture = new S3HttpFixtureWithSessionToken( + "ecs_bucket", + "ecs_base_path", + ECS_ACCESS_KEY, + ECS_SESSION_TOKEN + ); + + private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture( + ECS_ACCESS_KEY, + ECS_SESSION_TOKEN, + Set.of("/ecs_credentials_endpoint") + ); public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .module("repository-s3") - .setting("s3.client.integration_test_ecs.endpoint", s3Ecs::getAddress) - .environment("AWS_CONTAINER_CREDENTIALS_FULL_URI", () -> (s3Ecs.getAddress() + "/ecs_credentials_endpoint")) + .setting("s3.client.integration_test_ecs.endpoint", s3Fixture::getAddress) + .environment("AWS_CONTAINER_CREDENTIALS_FULL_URI", () -> ec2ImdsHttpFixture.getAddress() + "/ecs_credentials_endpoint") .build(); @ClassRule - public static TestRule ruleChain = RuleChain.outerRule(s3Ecs).around(cluster); + public static TestRule ruleChain = RuleChain.outerRule(s3Fixture).around(ec2ImdsHttpFixture).around(cluster); @ParametersFactory public static Iterable parameters() throws Exception { diff --git a/settings.gradle b/settings.gradle index 333f8272447c2..7bf03263031f1 100644 --- a/settings.gradle +++ b/settings.gradle @@ -87,6 +87,7 @@ List projects = [ 'server', 'test:framework', 'test:fixtures:azure-fixture', + 'test:fixtures:ec2-imds-fixture', 'test:fixtures:gcs-fixture', 'test:fixtures:hdfs-fixture', 'test:fixtures:krb5kdc-fixture', diff --git a/test/fixtures/ec2-imds-fixture/build.gradle b/test/fixtures/ec2-imds-fixture/build.gradle new file mode 100644 index 0000000000000..7ad194acbb8fd --- /dev/null +++ b/test/fixtures/ec2-imds-fixture/build.gradle @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +apply plugin: 'elasticsearch.java' + +description = 'Fixture for emulating the Instance Metadata Service (IMDS) running in AWS EC2' + +dependencies { + api project(':server') + api("junit:junit:${versions.junit}") { + transitive = false + } + api project(':test:framework') +} diff --git a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpFixture.java b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpFixture.java new file mode 100644 index 0000000000000..68f46d778018c --- /dev/null +++ b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpFixture.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package fixture.aws.imds; + +import com.sun.net.httpserver.HttpHandler; +import com.sun.net.httpserver.HttpServer; + +import org.junit.rules.ExternalResource; + +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.UnknownHostException; +import java.util.Objects; +import java.util.Set; + +public class Ec2ImdsHttpFixture extends ExternalResource { + + private HttpServer server; + + private final String accessKey; + private final String sessionToken; + private final Set alternativeCredentialsEndpoints; + + public Ec2ImdsHttpFixture(String accessKey, String sessionToken, Set alternativeCredentialsEndpoints) { + this.accessKey = accessKey; + this.sessionToken = sessionToken; + this.alternativeCredentialsEndpoints = alternativeCredentialsEndpoints; + } + + protected HttpHandler createHandler() { + return new Ec2ImdsHttpHandler(accessKey, sessionToken, alternativeCredentialsEndpoints); + } + + public String getAddress() { + return "http://" + server.getAddress().getHostString() + ":" + server.getAddress().getPort(); + } + + public void stop(int delay) { + server.stop(delay); + } + + protected void before() throws Throwable { + server = HttpServer.create(resolveAddress(), 0); + server.createContext("/", Objects.requireNonNull(createHandler())); + server.start(); + } + + @Override + protected void after() { + stop(0); + } + + private static InetSocketAddress resolveAddress() { + try { + return new InetSocketAddress(InetAddress.getByName("localhost"), 0); + } catch (UnknownHostException e) { + throw new RuntimeException(e); + } + } +} diff --git a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java new file mode 100644 index 0000000000000..04e5e83bddfa9 --- /dev/null +++ b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java @@ -0,0 +1,98 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package fixture.aws.imds; + +import com.sun.net.httpserver.HttpExchange; +import com.sun.net.httpserver.HttpHandler; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.time.Clock; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.util.Collection; +import java.util.Objects; +import java.util.Set; + +import static org.elasticsearch.test.ESTestCase.randomIdentifier; + +/** + * Minimal HTTP handler that emulates the EC2 IMDS server + */ +@SuppressForbidden(reason = "this test uses a HttpServer to emulate the EC2 IMDS endpoint") +public class Ec2ImdsHttpHandler implements HttpHandler { + + private static final String IMDS_SECURITY_CREDENTIALS_PATH = "/latest/meta-data/iam/security-credentials/"; + + private final String accessKey; + private final String sessionToken; + private final Set validCredentialsEndpoints = ConcurrentCollections.newConcurrentSet(); + + public Ec2ImdsHttpHandler(String accessKey, String sessionToken, Collection alternativeCredentialsEndpoints) { + this.accessKey = Objects.requireNonNull(accessKey); + this.sessionToken = Objects.requireNonNull(sessionToken); + this.validCredentialsEndpoints.addAll(alternativeCredentialsEndpoints); + } + + @Override + public void handle(final HttpExchange exchange) throws IOException { + // http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html + + try (exchange) { + final var path = exchange.getRequestURI().getPath(); + final var requestMethod = exchange.getRequestMethod(); + + if ("PUT".equals(requestMethod) && "/latest/api/token".equals(path)) { + // Reject IMDSv2 probe + exchange.sendResponseHeaders(RestStatus.METHOD_NOT_ALLOWED.getStatus(), -1); + return; + } + + if ("GET".equals(requestMethod)) { + if (path.equals(IMDS_SECURITY_CREDENTIALS_PATH)) { + final var profileName = randomIdentifier(); + validCredentialsEndpoints.add(IMDS_SECURITY_CREDENTIALS_PATH + profileName); + final byte[] response = profileName.getBytes(StandardCharsets.UTF_8); + exchange.getResponseHeaders().add("Content-Type", "text/plain"); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); + exchange.getResponseBody().write(response); + return; + } else if (validCredentialsEndpoints.contains(path)) { + final byte[] response = Strings.format( + """ + { + "AccessKeyId": "%s", + "Expiration": "%s", + "RoleArn": "%s", + "SecretAccessKey": "%s", + "Token": "%s" + }""", + accessKey, + ZonedDateTime.now(Clock.systemUTC()).plusDays(1L).format(DateTimeFormatter.ISO_DATE_TIME), + randomIdentifier(), + randomIdentifier(), + sessionToken + ).getBytes(StandardCharsets.UTF_8); + exchange.getResponseHeaders().add("Content-Type", "application/json"); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); + exchange.getResponseBody().write(response); + return; + } + } + + ExceptionsHelper.maybeDieOnAnotherThread(new AssertionError("not supported: " + requestMethod + " " + path)); + } + } +} diff --git a/test/fixtures/ec2-imds-fixture/src/test/java/fixture/aws/imds/Ec2ImdsHttpHandlerTests.java b/test/fixtures/ec2-imds-fixture/src/test/java/fixture/aws/imds/Ec2ImdsHttpHandlerTests.java new file mode 100644 index 0000000000000..5d5cbfae3fa60 --- /dev/null +++ b/test/fixtures/ec2-imds-fixture/src/test/java/fixture/aws/imds/Ec2ImdsHttpHandlerTests.java @@ -0,0 +1,188 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package fixture.aws.imds; + +import com.sun.net.httpserver.Headers; +import com.sun.net.httpserver.HttpContext; +import com.sun.net.httpserver.HttpExchange; +import com.sun.net.httpserver.HttpPrincipal; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.InetSocketAddress; +import java.net.URI; +import java.util.Set; + +public class Ec2ImdsHttpHandlerTests extends ESTestCase { + + public void testImdsV1() throws IOException { + final var accessKey = randomIdentifier(); + final var sessionToken = randomIdentifier(); + + final var handler = new Ec2ImdsHttpHandler(accessKey, sessionToken, Set.of()); + + final var roleResponse = handleRequest(handler, "GET", "/latest/meta-data/iam/security-credentials/"); + assertEquals(RestStatus.OK, roleResponse.status()); + final var profileName = roleResponse.body().utf8ToString(); + assertTrue(Strings.hasText(profileName)); + + final var credentialsResponse = handleRequest(handler, "GET", "/latest/meta-data/iam/security-credentials/" + profileName); + assertEquals(RestStatus.OK, credentialsResponse.status()); + + final var responseMap = XContentHelper.convertToMap(XContentType.JSON.xContent(), credentialsResponse.body().streamInput(), false); + assertEquals(Set.of("AccessKeyId", "Expiration", "RoleArn", "SecretAccessKey", "Token"), responseMap.keySet()); + assertEquals(accessKey, responseMap.get("AccessKeyId")); + assertEquals(sessionToken, responseMap.get("Token")); + } + + public void testImdsV2Disabled() { + assertEquals( + RestStatus.METHOD_NOT_ALLOWED, + handleRequest(new Ec2ImdsHttpHandler(randomIdentifier(), randomIdentifier(), Set.of()), "PUT", "/latest/api/token").status() + ); + } + + private record TestHttpResponse(RestStatus status, BytesReference body) {} + + private static TestHttpResponse handleRequest(Ec2ImdsHttpHandler handler, String method, String uri) { + final var httpExchange = new TestHttpExchange(method, uri, BytesArray.EMPTY, TestHttpExchange.EMPTY_HEADERS); + try { + handler.handle(httpExchange); + } catch (IOException e) { + fail(e); + } + assertNotEquals(0, httpExchange.getResponseCode()); + return new TestHttpResponse(RestStatus.fromCode(httpExchange.getResponseCode()), httpExchange.getResponseBodyContents()); + } + + private static class TestHttpExchange extends HttpExchange { + + private static final Headers EMPTY_HEADERS = new Headers(); + + private final String method; + private final URI uri; + private final BytesReference requestBody; + private final Headers requestHeaders; + + private final Headers responseHeaders = new Headers(); + private final BytesStreamOutput responseBody = new BytesStreamOutput(); + private int responseCode; + + TestHttpExchange(String method, String uri, BytesReference requestBody, Headers requestHeaders) { + this.method = method; + this.uri = URI.create(uri); + this.requestBody = requestBody; + this.requestHeaders = requestHeaders; + } + + @Override + public Headers getRequestHeaders() { + return requestHeaders; + } + + @Override + public Headers getResponseHeaders() { + return responseHeaders; + } + + @Override + public URI getRequestURI() { + return uri; + } + + @Override + public String getRequestMethod() { + return method; + } + + @Override + public HttpContext getHttpContext() { + return null; + } + + @Override + public void close() {} + + @Override + public InputStream getRequestBody() { + try { + return requestBody.streamInput(); + } catch (IOException e) { + throw new AssertionError(e); + } + } + + @Override + public OutputStream getResponseBody() { + return responseBody; + } + + @Override + public void sendResponseHeaders(int rCode, long responseLength) { + this.responseCode = rCode; + } + + @Override + public InetSocketAddress getRemoteAddress() { + return null; + } + + @Override + public int getResponseCode() { + return responseCode; + } + + public BytesReference getResponseBodyContents() { + return responseBody.bytes(); + } + + @Override + public InetSocketAddress getLocalAddress() { + return null; + } + + @Override + public String getProtocol() { + return "HTTP/1.1"; + } + + @Override + public Object getAttribute(String name) { + return null; + } + + @Override + public void setAttribute(String name, Object value) { + fail("setAttribute not implemented"); + } + + @Override + public void setStreams(InputStream i, OutputStream o) { + fail("setStreams not implemented"); + } + + @Override + public HttpPrincipal getPrincipal() { + fail("getPrincipal not implemented"); + throw new UnsupportedOperationException("getPrincipal not implemented"); + } + } + +} diff --git a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixtureWithEC2.java b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixtureWithEC2.java deleted file mode 100644 index d7048cbea6b8a..0000000000000 --- a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixtureWithEC2.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ -package fixture.s3; - -import com.sun.net.httpserver.HttpHandler; - -import org.elasticsearch.rest.RestStatus; - -import java.nio.charset.StandardCharsets; -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; -import java.util.Locale; - -public class S3HttpFixtureWithEC2 extends S3HttpFixtureWithSessionToken { - - private static final String EC2_PATH = "/latest/meta-data/iam/security-credentials/"; - private static final String EC2_PROFILE = "ec2Profile"; - - public S3HttpFixtureWithEC2() { - this(true); - } - - public S3HttpFixtureWithEC2(boolean enabled) { - this(enabled, "ec2_bucket", "ec2_base_path", "ec2_access_key", "ec2_session_token"); - } - - public S3HttpFixtureWithEC2(boolean enabled, String bucket, String basePath, String accessKey, String sessionToken) { - super(enabled, bucket, basePath, accessKey, sessionToken); - } - - @Override - protected HttpHandler createHandler() { - final HttpHandler delegate = super.createHandler(); - - return exchange -> { - final String path = exchange.getRequestURI().getPath(); - // http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html - if ("GET".equals(exchange.getRequestMethod()) && path.startsWith(EC2_PATH)) { - if (path.equals(EC2_PATH)) { - final byte[] response = EC2_PROFILE.getBytes(StandardCharsets.UTF_8); - exchange.getResponseHeaders().add("Content-Type", "text/plain"); - exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); - exchange.getResponseBody().write(response); - exchange.close(); - return; - - } else if (path.equals(EC2_PATH + EC2_PROFILE)) { - final byte[] response = buildCredentialResponse(accessKey, sessionToken).getBytes(StandardCharsets.UTF_8); - exchange.getResponseHeaders().add("Content-Type", "application/json"); - exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); - exchange.getResponseBody().write(response); - exchange.close(); - return; - } - - final byte[] response = "unknown profile".getBytes(StandardCharsets.UTF_8); - exchange.getResponseHeaders().add("Content-Type", "text/plain"); - exchange.sendResponseHeaders(RestStatus.NOT_FOUND.getStatus(), response.length); - exchange.getResponseBody().write(response); - exchange.close(); - return; - - } - delegate.handle(exchange); - }; - } - - protected static String buildCredentialResponse(final String ec2AccessKey, final String ec2SessionToken) { - return String.format(Locale.ROOT, """ - { - "AccessKeyId": "%s", - "Expiration": "%s", - "RoleArn": "arn", - "SecretAccessKey": "secret_access_key", - "Token": "%s" - }""", ec2AccessKey, ZonedDateTime.now().plusDays(1L).format(DateTimeFormatter.ISO_DATE_TIME), ec2SessionToken); - } -} diff --git a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixtureWithECS.java b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixtureWithECS.java deleted file mode 100644 index d6266ea75dd3a..0000000000000 --- a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixtureWithECS.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ -package fixture.s3; - -import com.sun.net.httpserver.HttpHandler; - -import org.elasticsearch.rest.RestStatus; - -import java.nio.charset.StandardCharsets; - -public class S3HttpFixtureWithECS extends S3HttpFixtureWithEC2 { - - public S3HttpFixtureWithECS() { - this(true); - } - - public S3HttpFixtureWithECS(boolean enabled) { - this(enabled, "ecs_bucket", "ecs_base_path", "ecs_access_key", "ecs_session_token"); - } - - public S3HttpFixtureWithECS(boolean enabled, String bucket, String basePath, String accessKey, String sessionToken) { - super(enabled, bucket, basePath, accessKey, sessionToken); - } - - @Override - protected HttpHandler createHandler() { - final HttpHandler delegate = super.createHandler(); - - return exchange -> { - // https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-iam-roles.html - if ("GET".equals(exchange.getRequestMethod()) && exchange.getRequestURI().getPath().equals("/ecs_credentials_endpoint")) { - final byte[] response = buildCredentialResponse(accessKey, sessionToken).getBytes(StandardCharsets.UTF_8); - exchange.getResponseHeaders().add("Content-Type", "application/json"); - exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); - exchange.getResponseBody().write(response); - exchange.close(); - return; - } - delegate.handle(exchange); - }; - } -} diff --git a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixtureWithSessionToken.java b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixtureWithSessionToken.java index 1a1cbba651e06..001cc34d9b20d 100644 --- a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixtureWithSessionToken.java +++ b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixtureWithSessionToken.java @@ -18,16 +18,8 @@ public class S3HttpFixtureWithSessionToken extends S3HttpFixture { protected final String sessionToken; - public S3HttpFixtureWithSessionToken() { - this(true); - } - - public S3HttpFixtureWithSessionToken(boolean enabled) { - this(enabled, "session_token_bucket", "session_token_base_path_integration_tests", "session_token_access_key", "session_token"); - } - - public S3HttpFixtureWithSessionToken(boolean enabled, String bucket, String basePath, String accessKey, String sessionToken) { - super(enabled, bucket, basePath, accessKey); + public S3HttpFixtureWithSessionToken(String bucket, String basePath, String accessKey, String sessionToken) { + super(true, bucket, basePath, accessKey); this.sessionToken = sessionToken; } From 79d8eb51b4f87276a5c9259af46b635d42a75058 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 25 Nov 2024 09:30:53 +0000 Subject: [PATCH 210/386] Rename `RepositoryS3RestIT` (#117449) This test suite is less generic than its current name suggests. Relates ES-9984 --- ...stIT.java => RepositoryS3RestReloadCredentialsIT.java} | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) rename modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/{RepositoryS3RestIT.java => RepositoryS3RestReloadCredentialsIT.java} (89%) diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestReloadCredentialsIT.java similarity index 89% rename from modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java rename to modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestReloadCredentialsIT.java index dcd29c6d26c6e..2f3e995b52468 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestReloadCredentialsIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.cluster.routing.Murmur3HashFunction; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.cluster.ElasticsearchCluster; @@ -28,10 +29,11 @@ import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.equalTo; -public class RepositoryS3RestIT extends ESRestTestCase { +public class RepositoryS3RestReloadCredentialsIT extends ESRestTestCase { - private static final String BUCKET = "RepositoryS3JavaRestTest-bucket"; - private static final String BASE_PATH = "RepositoryS3JavaRestTest-base-path"; + private static final String HASHED_SEED = Integer.toString(Murmur3HashFunction.hash(System.getProperty("tests.seed"))); + private static final String BUCKET = "RepositoryS3RestReloadCredentialsIT-bucket-" + HASHED_SEED; + private static final String BASE_PATH = "RepositoryS3RestReloadCredentialsIT-base-path-" + HASHED_SEED; public static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, "ignored"); From fbc6abec0552a29b99675800ed134468910fb9f1 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 25 Nov 2024 11:32:28 +0200 Subject: [PATCH 211/386] [TEST] Unmute randomized logsdb test (#117450) Test fixed in #117228 and here. Fixes #116536 Fixes #117212 --- muted-tests.yml | 6 ------ .../qa/StandardVersusLogsIndexModeChallengeRestIT.java | 10 +++++----- 2 files changed, 5 insertions(+), 11 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 0d2e6b991a5c3..d4b77f5269c10 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -168,9 +168,6 @@ tests: - class: org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshotsCanMatchOnCoordinatorIntegTests method: testSearchableSnapshotShardsAreSkippedBySearchRequestWithoutQueryingAnyNodeWhenTheyAreOutsideOfTheQueryRange issue: https://github.com/elastic/elasticsearch/issues/116523 -- class: org.elasticsearch.xpack.logsdb.qa.StandardVersusLogsIndexModeRandomDataDynamicMappingChallengeRestIT - method: testMatchAllQuery - issue: https://github.com/elastic/elasticsearch/issues/116536 - class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT method: test {categorize.Categorize} issue: https://github.com/elastic/elasticsearch/issues/116434 @@ -208,9 +205,6 @@ tests: - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testMultipleInferencesTriggeringDownloadAndDeploy issue: https://github.com/elastic/elasticsearch/issues/117208 -- class: org.elasticsearch.xpack.logsdb.qa.StandardVersusLogsStoredSourceChallengeRestIT - method: testEsqlSource - issue: https://github.com/elastic/elasticsearch/issues/117212 - class: org.elasticsearch.ingest.geoip.EnterpriseGeoIpDownloaderIT method: testEnterpriseDownloaderTask issue: https://github.com/elastic/elasticsearch/issues/115163 diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java index 8930ff23fb3b0..e411f2f3f314d 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java @@ -181,7 +181,7 @@ protected static void waitForLogs(RestClient client) throws Exception { } public void testMatchAllQuery() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(100, 200); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); @@ -199,7 +199,7 @@ public void testMatchAllQuery() throws IOException { } public void testTermsQuery() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(100, 200); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); @@ -217,7 +217,7 @@ public void testTermsQuery() throws IOException { } public void testHistogramAggregation() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(100, 200); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); @@ -235,7 +235,7 @@ public void testHistogramAggregation() throws IOException { } public void testTermsAggregation() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(100, 200); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); @@ -253,7 +253,7 @@ public void testTermsAggregation() throws IOException { } public void testDateHistogramAggregation() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(100, 200); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); From 1d4c8d85f6641f8f4efa776106392b0eb6980406 Mon Sep 17 00:00:00 2001 From: Luke Whiting Date: Mon, 25 Nov 2024 09:51:11 +0000 Subject: [PATCH 212/386] (#34659) - Add Timezone Configuration to Watcher (#117033) * Add timezone support to Cron objects * Add timezone support to CronnableSchedule * XContent change to support parsing and display of TimeZone fields on schedules * Case insensitive timezone parsing * Doc changes * YAML REST tests * Equals, toString and HashCode now include timezone * Additional random testing for DST transitions * Migrate Cron class to use wrapped LocalDateTime The algorithm depends on some quirks of calendar but LocalDateTime correctly ignores DST during calculations so this uses a LocalDateTime with a wrapper to emulate some of Calendar's behaviours that the Cron algorithm depends on * Additional documentation to explain discontinuity event behaviour * Remove redundant conversions from ZoneId to TimeZone following move to LocalDateTime * Add documentation warning that manual clock changes will cause unpredictable watch execution * Update docs/reference/watcher/trigger/schedule.asciidoc Co-authored-by: Lee Hinman --------- Co-authored-by: Lee Hinman --- .../watching-time-series-data.asciidoc | 8 +- .../watcher/trigger/schedule.asciidoc | 33 +- .../watcher/trigger/schedule/cron.asciidoc | 42 ++- .../watcher/trigger/schedule/daily.asciidoc | 24 ++ .../watcher/trigger/schedule/monthly.asciidoc | 24 +- .../watcher/trigger/schedule/weekly.asciidoc | 24 +- .../watcher/trigger/schedule/yearly.asciidoc | 23 ++ .../xpack/core/scheduler/Cron.java | 287 +++++++++--------- .../scheduler/LocalDateTimeLegacyWrapper.java | 130 ++++++++ .../core/scheduler/CronTimezoneTests.java | 231 ++++++++++++++ .../connector/ConnectorCustomSchedule.java | 2 +- .../connector/ConnectorScheduling.java | 2 +- .../slm/SnapshotLifecyclePolicyTests.java | 4 +- .../put_watch/11_timezoned_schedules.yml | 121 ++++++++ .../trigger/schedule/CronnableSchedule.java | 34 ++- .../trigger/schedule/ScheduleRegistry.java | 40 ++- .../trigger/schedule/ScheduleTrigger.java | 9 +- .../schedule/support/TimezoneUtils.java | 55 ++++ .../schedule/ScheduleRegistryTests.java | 15 +- .../schedule/support/TimezoneUtilsTests.java | 40 +++ 20 files changed, 975 insertions(+), 173 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/LocalDateTimeLegacyWrapper.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/scheduler/CronTimezoneTests.java create mode 100644 x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/put_watch/11_timezoned_schedules.yml create mode 100644 x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/TimezoneUtils.java create mode 100644 x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/TimezoneUtilsTests.java diff --git a/docs/reference/watcher/example-watches/watching-time-series-data.asciidoc b/docs/reference/watcher/example-watches/watching-time-series-data.asciidoc index 421c69619cfea..b1c776baae1de 100644 --- a/docs/reference/watcher/example-watches/watching-time-series-data.asciidoc +++ b/docs/reference/watcher/example-watches/watching-time-series-data.asciidoc @@ -62,20 +62,20 @@ contain the words "error" or "problem". To set up the watch: -. Define the watch trigger--a daily schedule that runs at 12:00 UTC: +. Define the watch trigger--a daily schedule that runs at 12:00 Australian Eastern Standard Time (UTC+10:00): + [source,js] -------------------------------------------------- "trigger" : { "schedule" : { + "timezone": "Australia/Brisbane", "daily" : { "at" : "12:00" } } } -------------------------------------------------- + -NOTE: In {watcher}, you specify times in UTC time. Don't forget to do the - conversion from your local time so the schedule triggers at the time - you intend. +NOTE: In {watcher}, if the timezone is omitted then schedules default to UTC. `timezone` can be specified either +as a +/-HH:mm offset from UTC or as a timezone name from the machines local IANA Time Zone Database. . Define the watch input--a search that uses a filter to constrain the results to the past day. diff --git a/docs/reference/watcher/trigger/schedule.asciidoc b/docs/reference/watcher/trigger/schedule.asciidoc index fa389409d15c4..d2bf466644e10 100644 --- a/docs/reference/watcher/trigger/schedule.asciidoc +++ b/docs/reference/watcher/trigger/schedule.asciidoc @@ -6,12 +6,42 @@ ++++ Schedule <> define when the watch execution should start based -on date and time. All times are specified in UTC time. +on date and time. All times are in UTC time unless a timezone is explicitly specified +in the schedule. {watcher} uses the system clock to determine the current time. To ensure schedules are triggered when expected, you should synchronize the clocks of all nodes in the cluster using a time service such as http://www.ntp.org/[NTP]. +NOTE: {watcher} can't correct for manual adjustments to the system clock. Be aware when making +such changes that watch execution may be affected with watches being skipped or repeated if the +adjustment covers their target execution time. This applies to changes made via NTP as well. + +When specifying a timezone for a watch, keep in mind the effect daylight savings time +transitions may have on the schedule, especially if the watch is scheduled to run +during the transition. Here's how {watcher} handles watches scheduled during discontinuities: + +==== Gap Transitions +These occur when the clock moves forward, such as when daylight savings time starts +and cause certain hours or minutes to be skipped. If your watch is scheduled to run +during a gap transition, the watch is executed at the same time as before the transition. + +Example: If a watch is scheduled to run daily at 1:30AM in the `Europe/London` time zone and +the clock moves forward one hour from 1:00AM (GMT+0) to 2:00AM (GMT+1), the watch is executed +at 2:30AM (GMT+1) which would have been 1:30AM before the transition. Subsequent executions +happen at 1:30AM (GMT+1). + +==== Overlap Transitions +These occur when the clock moves backward, such as when daylight savings time ends +and cause certain hours or minutes to be repeated. If your watch is scheduled to run +during an overlap transition, only the first occurrence of the time causes to the watch +to execute with the second being skipped. + +Example: If a watch is scheduled to run at 1:30 AM and the clock moves backward one hour +from 2:00AM to 1:00AM, the watch is executed at 1:30AM and the second occurrence after the +change is skipped. + +=== Throttling Keep in mind that the throttle period can affect when a watch is actually executed. The default throttle period is five seconds (5000 ms). If you configure a schedule that's more frequent than the throttle period, the throttle period overrides the @@ -20,6 +50,7 @@ and set the schedule to every 10 seconds, the watch is executed no more than once per minute. For more information about throttling, see <>. +=== Schedule Types {watcher} provides several types of schedule triggers: * <> diff --git a/docs/reference/watcher/trigger/schedule/cron.asciidoc b/docs/reference/watcher/trigger/schedule/cron.asciidoc index 673f350435c5f..c33bf524a8737 100644 --- a/docs/reference/watcher/trigger/schedule/cron.asciidoc +++ b/docs/reference/watcher/trigger/schedule/cron.asciidoc @@ -5,14 +5,14 @@ ++++ -Defines a <> using a <> +Defines a <> using a <> that specifiues when to execute a watch. -TIP: While cron expressions are powerful, a regularly occurring schedule -is easier to configure with the other schedule types. -If you must use a cron schedule, make sure you verify it with -<> . +TIP: While cron expressions are powerful, a regularly occurring schedule +is easier to configure with the other schedule types. +If you must use a cron schedule, make sure you verify it with +<> . ===== Configure a cron schedule with one time @@ -60,16 +60,40 @@ minute during the weekend: -------------------------------------------------- // NOTCONSOLE +[[configue_cron_time-zone]] +==== Use a different time zone for a cron schedule +By default, cron expressions are evaluated in the UTC time zone. To use a different time zone, +you can specify the `timezone` parameter in the schedule. For example, the following +`cron` schedule triggers at 6:00 AM and 6:00 PM during weekends in the `America/Los_Angeles` time zone: + + +[source,js] +-------------------------------------------------- +{ + ... + "trigger" : { + "schedule" : { + "timezone" : "America/Los_Angeles", + "cron" : [ + "0 6,18 * * * SAT-SUN", + ] + } + } + ... +} +-------------------------------------------------- +// NOTCONSOLE + [[croneval]] ===== Use croneval to validate cron expressions -{es} provides a <> command line tool -in the `$ES_HOME/bin` directory that you can use to check that your cron expressions +{es} provides a <> command line tool +in the `$ES_HOME/bin` directory that you can use to check that your cron expressions are valid and produce the expected results. -To validate a cron expression, pass it in as a parameter to `elasticsearch-croneval`: +To validate a cron expression, pass it in as a parameter to `elasticsearch-croneval`: [source,bash] -------------------------------------------------- bin/elasticsearch-croneval "0 0/1 * * * ?" --------------------------------------------------- +-------------------------------------------------- diff --git a/docs/reference/watcher/trigger/schedule/daily.asciidoc b/docs/reference/watcher/trigger/schedule/daily.asciidoc index cea2b8316e02f..d258d9c612350 100644 --- a/docs/reference/watcher/trigger/schedule/daily.asciidoc +++ b/docs/reference/watcher/trigger/schedule/daily.asciidoc @@ -97,3 +97,27 @@ or minutes as an array. For example, following `daily` schedule triggers at } -------------------------------------------------- // NOTCONSOLE + +[[specifying-time-zone-for-daily-schedule]] +===== Specifying a time zone for a daily schedule +By default, daily schedules are evaluated in the UTC time zone. To use a different time zone, +you can specify the `timezone` parameter in the schedule. For example, the following +`daily` schedule triggers at 6:00 AM and 6:00 PM in the `Pacific/Galapagos` time zone: + +[source,js] +-------------------------------------------------- +{ + "trigger" : { + "schedule" : { + "timezone" : "Pacific/Galapagos", + "daily" : { + "at" : { + "hour" : [ 6, 18 ], + "minute" : 0 + } + } + } + } +} +-------------------------------------------------- +// NOTCONSOLE diff --git a/docs/reference/watcher/trigger/schedule/monthly.asciidoc b/docs/reference/watcher/trigger/schedule/monthly.asciidoc index 7d13262ed2fa8..694c76aaee23a 100644 --- a/docs/reference/watcher/trigger/schedule/monthly.asciidoc +++ b/docs/reference/watcher/trigger/schedule/monthly.asciidoc @@ -74,4 +74,26 @@ schedule triggers at 12:00 AM and 12:00 PM on the 10th and 20th of each month. } } -------------------------------------------------- -// NOTCONSOLE \ No newline at end of file +// NOTCONSOLE + +==== Configuring time zones for monthly schedules +By default, monthly schedules are evaluated in the UTC time zone. To use a different +time zone, you can specify the `timezone` parameter in the schedule. For example, +the following `monthly` schedule triggers at 6:00 AM and 6:00 PM on the 15th of each month in +the `Asia/Tokyo` time zone: + +[source,js] +-------------------------------------------------- +{ + "trigger" : { + "schedule" : { + "timezone" : "Asia/Tokyo", + "monthly" : { + "on" : [ 15 ], + "at" : [ 6:00, 18:00 ] + } + } + } +} +-------------------------------------------------- +// NOTCONSOLE diff --git a/docs/reference/watcher/trigger/schedule/weekly.asciidoc b/docs/reference/watcher/trigger/schedule/weekly.asciidoc index 5b43de019ad25..53bd2f3167b21 100644 --- a/docs/reference/watcher/trigger/schedule/weekly.asciidoc +++ b/docs/reference/watcher/trigger/schedule/weekly.asciidoc @@ -79,4 +79,26 @@ Alternatively, you can specify days and times in an object that has `on` and } } -------------------------------------------------- -// NOTCONSOLE \ No newline at end of file +// NOTCONSOLE + +==== Use a different time zone for a weekly schedule +By default, weekly schedules are evaluated in the UTC time zone. To use a different time zone, +you can specify the `timezone` parameter in the schedule. For example, the following +`weekly` schedule triggers at 6:00 AM and 6:00 PM on Tuesdays and Fridays in the +`America/Buenos_Aires` time zone: + +[source,js] +-------------------------------------------------- +{ + "trigger" : { + "schedule" : { + "timezone" : "America/Buenos_Aires", + "weekly" : { + "on" : [ "tuesday", "friday" ], + "at" : [ "6:00", "18:00" ] + } + } + } +} +-------------------------------------------------- +// NOTCONSOLE diff --git a/docs/reference/watcher/trigger/schedule/yearly.asciidoc b/docs/reference/watcher/trigger/schedule/yearly.asciidoc index 8fce024bf9f4a..c33321ef5a7dc 100644 --- a/docs/reference/watcher/trigger/schedule/yearly.asciidoc +++ b/docs/reference/watcher/trigger/schedule/yearly.asciidoc @@ -88,3 +88,26 @@ January 20th, December 10th, and December 20th. } -------------------------------------------------- // NOTCONSOLE + +==== Configuring a yearly schedule with a different time zone +By default, the `yearly` schedule is evaluated in the UTC time zone. To use a +different time zone, you can specify the `timezone` parameter in the schedule. +For example, the following `yearly` schedule triggers at 3:30 PM and 8:30 PM +on June 4th in the `Antarctica/Troll` time zone: + +[source,js] +-------------------------------------------------- +{ + "trigger" : { + "schedule" : { + "timezone" : "Antarctica/Troll", + "yearly" : { + "in" : "june", + "on" : 4, + "at" : [ 15:30, 20:30 ] + } + } + } +} +-------------------------------------------------- +// NOTCONSOLE diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/Cron.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/Cron.java index b9d39aa665848..c94b90b6c0c23 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/Cron.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/Cron.java @@ -8,11 +8,15 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.time.LocalDateTime; +import java.time.ZoneId; import java.time.ZoneOffset; +import java.time.temporal.ChronoField; import java.util.Calendar; import java.util.Iterator; import java.util.Locale; @@ -232,6 +236,8 @@ public class Cron implements ToXContentFragment { private final String expression; + private ZoneId timeZone; + private transient TreeSet seconds; private transient TreeSet minutes; private transient TreeSet hours; @@ -246,7 +252,20 @@ public class Cron implements ToXContentFragment { private transient boolean nearestWeekday = false; private transient int lastdayOffset = 0; - public static final int MAX_YEAR = Calendar.getInstance(UTC, Locale.ROOT).get(Calendar.YEAR) + 100; + // Restricted to 50 years as the tzdb only has correct DST transition information for countries using a lunar calendar + // for the next ~60 years + public static final int MAX_YEAR = Calendar.getInstance(UTC, Locale.ROOT).get(Calendar.YEAR) + 50; + + public Cron(String expression, ZoneId timeZone) { + this.timeZone = timeZone; + assert expression != null : "cron expression cannot be null"; + this.expression = expression.toUpperCase(Locale.ROOT); + try { + buildExpression(this.expression); + } catch (Exception e) { + throw illegalArgument("invalid cron expression [{}]", e, expression); + } + } /** * Constructs a new CronExpression based on the specified @@ -259,13 +278,7 @@ public class Cron implements ToXContentFragment { * CronExpression */ public Cron(String expression) { - assert expression != null : "cron expression cannot be null"; - this.expression = expression.toUpperCase(Locale.ROOT); - try { - buildExpression(this.expression); - } catch (Exception e) { - throw illegalArgument("invalid cron expression [{}]", e, expression); - } + this(expression, UTC.toZoneId()); } /** @@ -275,7 +288,11 @@ public Cron(String expression) { * @param cron The existing cron expression to be copied */ public Cron(Cron cron) { - this(cron.expression); + this(cron.expression, cron.timeZone); + } + + public void setTimeZone(ZoneId timeZone) { + this.timeZone = timeZone; } /** @@ -286,31 +303,25 @@ public Cron(Cron cron) { * a time that is previous to the given time) * @return the next valid time (since the epoch) */ + @SuppressForbidden(reason = "In this case, the DST ambiguity of the atZone method is desired, understood and tested") public long getNextValidTimeAfter(final long time) { - // Computation is based on Gregorian year only. - Calendar cl = new java.util.GregorianCalendar(UTC, Locale.ROOT); - - // move ahead one second, since we're computing the time *after* the - // given time - final long afterTime = time + 1000; - // CronTrigger does not deal with milliseconds - cl.setTimeInMillis(afterTime); - cl.set(Calendar.MILLISECOND, 0); + LocalDateTime afterTimeLdt = LocalDateTime.ofInstant(java.time.Instant.ofEpochMilli(time), timeZone).plusSeconds(1); + LocalDateTimeLegacyWrapper cl = new LocalDateTimeLegacyWrapper(afterTimeLdt.with(ChronoField.MILLI_OF_SECOND, 0)); boolean gotOne = false; // loop until we've computed the next time, or we've past the endTime while (gotOne == false) { - if (cl.get(Calendar.YEAR) > 2999) { // prevent endless loop... + if (cl.getYear() > 2999) { // prevent endless loop... return -1; } SortedSet st = null; int t = 0; - int sec = cl.get(Calendar.SECOND); - int min = cl.get(Calendar.MINUTE); + int sec = cl.getSecond(); + int min = cl.getMinute(); // get second................................................. st = seconds.tailSet(sec); @@ -319,12 +330,12 @@ public long getNextValidTimeAfter(final long time) { } else { sec = seconds.first(); min++; - cl.set(Calendar.MINUTE, min); + cl.setMinute(min); } - cl.set(Calendar.SECOND, sec); + cl.setSecond(sec); - min = cl.get(Calendar.MINUTE); - int hr = cl.get(Calendar.HOUR_OF_DAY); + min = cl.getMinute(); + int hr = cl.getHour(); t = -1; // get minute................................................. @@ -337,15 +348,15 @@ public long getNextValidTimeAfter(final long time) { hr++; } if (min != t) { - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, min); - setCalendarHour(cl, hr); + cl.setSecond(0); + cl.setMinute(min); + cl.setHour(hr); continue; } - cl.set(Calendar.MINUTE, min); + cl.setMinute(min); - hr = cl.get(Calendar.HOUR_OF_DAY); - int day = cl.get(Calendar.DAY_OF_MONTH); + hr = cl.getHour(); + int day = cl.getDayOfMonth(); t = -1; // get hour................................................... @@ -358,16 +369,16 @@ public long getNextValidTimeAfter(final long time) { day++; } if (hr != t) { - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, 0); - cl.set(Calendar.DAY_OF_MONTH, day); - setCalendarHour(cl, hr); + cl.setSecond(0); + cl.setMinute(0); + cl.setDayOfMonth(day); + cl.setHour(hr); continue; } - cl.set(Calendar.HOUR_OF_DAY, hr); + cl.setHour(hr); - day = cl.get(Calendar.DAY_OF_MONTH); - int mon = cl.get(Calendar.MONTH) + 1; + day = cl.getDayOfMonth(); + int mon = cl.getMonth() + 1; // '+ 1' because calendar is 0-based for this field, and we are // 1-based t = -1; @@ -381,32 +392,32 @@ public long getNextValidTimeAfter(final long time) { if (lastdayOfMonth) { if (nearestWeekday == false) { t = day; - day = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); + day = getLastDayOfMonth(mon, cl.getYear()); day -= lastdayOffset; if (t > day) { mon++; if (mon > 12) { mon = 1; tmon = 3333; // ensure test of mon != tmon further below fails - cl.add(Calendar.YEAR, 1); + cl.plusYears(1); } day = 1; } } else { t = day; - day = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); + day = getLastDayOfMonth(mon, cl.getYear()); day -= lastdayOffset; - Calendar tcal = Calendar.getInstance(UTC, Locale.ROOT); - tcal.set(Calendar.SECOND, 0); - tcal.set(Calendar.MINUTE, 0); - tcal.set(Calendar.HOUR_OF_DAY, 0); - tcal.set(Calendar.DAY_OF_MONTH, day); - tcal.set(Calendar.MONTH, mon - 1); - tcal.set(Calendar.YEAR, cl.get(Calendar.YEAR)); + LocalDateTimeLegacyWrapper tcal = new LocalDateTimeLegacyWrapper(LocalDateTime.now(timeZone)); + tcal.setSecond(0); + tcal.setMinute(0); + tcal.setHour(0); + tcal.setDayOfMonth(day); + tcal.setMonth(mon - 1); + tcal.setYear(cl.getYear()); - int ldom = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); - int dow = tcal.get(Calendar.DAY_OF_WEEK); + int ldom = getLastDayOfMonth(mon, cl.getYear()); + int dow = tcal.getDayOfWeek(); if (dow == Calendar.SATURDAY && day == 1) { day += 2; @@ -418,13 +429,12 @@ public long getNextValidTimeAfter(final long time) { day += 1; } - tcal.set(Calendar.SECOND, sec); - tcal.set(Calendar.MINUTE, min); - tcal.set(Calendar.HOUR_OF_DAY, hr); - tcal.set(Calendar.DAY_OF_MONTH, day); - tcal.set(Calendar.MONTH, mon - 1); - long nTime = tcal.getTimeInMillis(); - if (nTime < afterTime) { + tcal.setSecond(sec); + tcal.setMinute(min); + tcal.setHour(hr); + tcal.setDayOfMonth(day); + tcal.setMonth(mon - 1); + if (tcal.isBefore(afterTimeLdt)) { day = 1; mon++; } @@ -433,16 +443,16 @@ public long getNextValidTimeAfter(final long time) { t = day; day = daysOfMonth.first(); - Calendar tcal = Calendar.getInstance(UTC, Locale.ROOT); - tcal.set(Calendar.SECOND, 0); - tcal.set(Calendar.MINUTE, 0); - tcal.set(Calendar.HOUR_OF_DAY, 0); - tcal.set(Calendar.DAY_OF_MONTH, day); - tcal.set(Calendar.MONTH, mon - 1); - tcal.set(Calendar.YEAR, cl.get(Calendar.YEAR)); + LocalDateTimeLegacyWrapper tcal = new LocalDateTimeLegacyWrapper(LocalDateTime.now(timeZone)); + tcal.setSecond(0); + tcal.setMinute(0); + tcal.setHour(0); + tcal.setDayOfMonth(day); + tcal.setMonth(mon - 1); + tcal.setYear(cl.getYear()); - int ldom = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); - int dow = tcal.get(Calendar.DAY_OF_WEEK); + int ldom = getLastDayOfMonth(mon, cl.getYear()); + int dow = tcal.getDayOfWeek(); if (dow == Calendar.SATURDAY && day == 1) { day += 2; @@ -454,13 +464,12 @@ public long getNextValidTimeAfter(final long time) { day += 1; } - tcal.set(Calendar.SECOND, sec); - tcal.set(Calendar.MINUTE, min); - tcal.set(Calendar.HOUR_OF_DAY, hr); - tcal.set(Calendar.DAY_OF_MONTH, day); - tcal.set(Calendar.MONTH, mon - 1); - long nTime = tcal.getTimeInMillis(); - if (nTime < afterTime) { + tcal.setSecond(sec); + tcal.setMinute(min); + tcal.setHour(hr); + tcal.setDayOfMonth(day); + tcal.setMonth(mon - 1); + if (tcal.isAfter(afterTimeLdt)) { day = daysOfMonth.first(); mon++; } @@ -468,7 +477,7 @@ public long getNextValidTimeAfter(final long time) { t = day; day = st.first(); // make sure we don't over-run a short month, such as february - int lastDay = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); + int lastDay = getLastDayOfMonth(mon, cl.getYear()); if (day > lastDay) { day = daysOfMonth.first(); mon++; @@ -479,11 +488,11 @@ public long getNextValidTimeAfter(final long time) { } if (day != t || mon != tmon) { - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, 0); - cl.set(Calendar.HOUR_OF_DAY, 0); - cl.set(Calendar.DAY_OF_MONTH, day); - cl.set(Calendar.MONTH, mon - 1); + cl.setSecond(0); + cl.setMinute(0); + cl.setHour(0); + cl.setDayOfMonth(day); + cl.setMonth(mon - 1); // '- 1' because calendar is 0-based for this field, and we // are 1-based continue; @@ -493,7 +502,7 @@ public long getNextValidTimeAfter(final long time) { // the month? int dow = daysOfWeek.first(); // desired // d-o-w - int cDow = cl.get(Calendar.DAY_OF_WEEK); // current d-o-w + int cDow = cl.getDayOfWeek(); // current d-o-w int daysToAdd = 0; if (cDow < dow) { daysToAdd = dow - cDow; @@ -502,15 +511,15 @@ public long getNextValidTimeAfter(final long time) { daysToAdd = dow + (7 - cDow); } - int lDay = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); + int lDay = getLastDayOfMonth(mon, cl.getYear()); if (day + daysToAdd > lDay) { // did we already miss the // last one? - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, 0); - cl.set(Calendar.HOUR_OF_DAY, 0); - cl.set(Calendar.DAY_OF_MONTH, 1); - cl.set(Calendar.MONTH, mon); + cl.setSecond(0); + cl.setMinute(0); + cl.setHour(0); + cl.setDayOfMonth(1); + cl.setMonth(mon); // no '- 1' here because we are promoting the month continue; } @@ -523,11 +532,11 @@ public long getNextValidTimeAfter(final long time) { day += daysToAdd; if (daysToAdd > 0) { - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, 0); - cl.set(Calendar.HOUR_OF_DAY, 0); - cl.set(Calendar.DAY_OF_MONTH, day); - cl.set(Calendar.MONTH, mon - 1); + cl.setSecond(0); + cl.setMinute(0); + cl.setHour(0); + cl.setDayOfMonth(day); + cl.setMonth(mon - 1); // '- 1' here because we are not promoting the month continue; } @@ -536,7 +545,7 @@ public long getNextValidTimeAfter(final long time) { // are we looking for the Nth XXX day in the month? int dow = daysOfWeek.first(); // desired // d-o-w - int cDow = cl.get(Calendar.DAY_OF_WEEK); // current d-o-w + int cDow = cl.getDayOfWeek(); // current d-o-w int daysToAdd = 0; if (cDow < dow) { daysToAdd = dow - cDow; @@ -557,25 +566,25 @@ public long getNextValidTimeAfter(final long time) { daysToAdd = (nthdayOfWeek - weekOfMonth) * 7; day += daysToAdd; - if (daysToAdd < 0 || day > getLastDayOfMonth(mon, cl.get(Calendar.YEAR))) { - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, 0); - cl.set(Calendar.HOUR_OF_DAY, 0); - cl.set(Calendar.DAY_OF_MONTH, 1); - cl.set(Calendar.MONTH, mon); + if (daysToAdd < 0 || day > getLastDayOfMonth(mon, cl.getYear())) { + cl.setSecond(0); + cl.setMinute(0); + cl.setHour(0); + cl.setDayOfMonth(1); + cl.setMonth(mon); // no '- 1' here because we are promoting the month continue; } else if (daysToAdd > 0 || dayShifted) { - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, 0); - cl.set(Calendar.HOUR_OF_DAY, 0); - cl.set(Calendar.DAY_OF_MONTH, day); - cl.set(Calendar.MONTH, mon - 1); + cl.setSecond(0); + cl.setMinute(0); + cl.setHour(0); + cl.setDayOfMonth(day); + cl.setMonth(mon - 1); // '- 1' here because we are NOT promoting the month continue; } } else { - int cDow = cl.get(Calendar.DAY_OF_WEEK); // current d-o-w + int cDow = cl.getDayOfWeek(); // current d-o-w int dow = daysOfWeek.first(); // desired // d-o-w st = daysOfWeek.tailSet(cDow); @@ -591,23 +600,23 @@ public long getNextValidTimeAfter(final long time) { daysToAdd = dow + (7 - cDow); } - int lDay = getLastDayOfMonth(mon, cl.get(Calendar.YEAR)); + int lDay = getLastDayOfMonth(mon, cl.getYear()); if (day + daysToAdd > lDay) { // will we pass the end of // the month? - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, 0); - cl.set(Calendar.HOUR_OF_DAY, 0); - cl.set(Calendar.DAY_OF_MONTH, 1); - cl.set(Calendar.MONTH, mon); + cl.setSecond(0); + cl.setMinute(0); + cl.setHour(0); + cl.setDayOfMonth(1); + cl.setMonth(mon); // no '- 1' here because we are promoting the month continue; } else if (daysToAdd > 0) { // are we swithing days? - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, 0); - cl.set(Calendar.HOUR_OF_DAY, 0); - cl.set(Calendar.DAY_OF_MONTH, day + daysToAdd); - cl.set(Calendar.MONTH, mon - 1); + cl.setSecond(0); + cl.setMinute(0); + cl.setHour(0); + cl.setDayOfMonth(day + daysToAdd); + cl.setMonth(mon - 1); // '- 1' because calendar is 0-based for this field, // and we are 1-based continue; @@ -618,12 +627,12 @@ public long getNextValidTimeAfter(final long time) { // throw new UnsupportedOperationException( // "Support for specifying both a day-of-week AND a day-of-month parameter is not implemented."); } - cl.set(Calendar.DAY_OF_MONTH, day); + cl.setDayOfMonth(day); - mon = cl.get(Calendar.MONTH) + 1; + mon = cl.getMonth() + 1; // '+ 1' because calendar is 0-based for this field, and we are // 1-based - int year = cl.get(Calendar.YEAR); + int year = cl.getYear(); t = -1; // test for expressions that never generate a valid fire date, @@ -643,21 +652,21 @@ public long getNextValidTimeAfter(final long time) { year++; } if (mon != t) { - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, 0); - cl.set(Calendar.HOUR_OF_DAY, 0); - cl.set(Calendar.DAY_OF_MONTH, 1); - cl.set(Calendar.MONTH, mon - 1); + cl.setSecond(0); + cl.setMinute(0); + cl.setHour(0); + cl.setDayOfMonth(1); + cl.setMonth(mon - 1); // '- 1' because calendar is 0-based for this field, and we are // 1-based - cl.set(Calendar.YEAR, year); + cl.setYear(year); continue; } - cl.set(Calendar.MONTH, mon - 1); + cl.setMonth(mon - 1); // '- 1' because calendar is 0-based for this field, and we are // 1-based - year = cl.get(Calendar.YEAR); + year = cl.getYear(); t = -1; // get year................................................... @@ -671,22 +680,24 @@ public long getNextValidTimeAfter(final long time) { } if (year != t) { - cl.set(Calendar.SECOND, 0); - cl.set(Calendar.MINUTE, 0); - cl.set(Calendar.HOUR_OF_DAY, 0); - cl.set(Calendar.DAY_OF_MONTH, 1); - cl.set(Calendar.MONTH, 0); + cl.setSecond(0); + cl.setMinute(0); + cl.setHour(0); + cl.setDayOfMonth(1); + cl.setMonth(0); // '- 1' because calendar is 0-based for this field, and we are // 1-based - cl.set(Calendar.YEAR, year); + cl.setYear(year); continue; } - cl.set(Calendar.YEAR, year); + cl.setYear(year); gotOne = true; } // while( done == false ) - return cl.getTimeInMillis(); + LocalDateTime nextRuntime = cl.getLocalDateTime(); + + return nextRuntime.atZone(timeZone).toInstant().toEpochMilli(); } public String expression() { @@ -735,7 +746,7 @@ public String getExpressionSummary() { @Override public int hashCode() { - return Objects.hash(expression); + return Objects.hash(expression, timeZone); } @Override @@ -747,7 +758,7 @@ public boolean equals(Object obj) { return false; } final Cron other = (Cron) obj; - return Objects.equals(this.expression, other.expression); + return Objects.equals(this.expression, other.expression) && Objects.equals(this.timeZone, other.timeZone); } /** @@ -757,7 +768,7 @@ public boolean equals(Object obj) { */ @Override public String toString() { - return expression; + return "Cron{" + "timeZone=" + timeZone + ", expression='" + expression + '\'' + '}'; } /** @@ -1430,7 +1441,7 @@ private static int getLastDayOfMonth(int monthNum, int year) { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.value(toString()); + return builder.value(expression); } private static class ValueSet { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/LocalDateTimeLegacyWrapper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/LocalDateTimeLegacyWrapper.java new file mode 100644 index 0000000000000..e540acc8042eb --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/LocalDateTimeLegacyWrapper.java @@ -0,0 +1,130 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.scheduler; + +import java.time.LocalDateTime; +import java.time.chrono.ChronoLocalDateTime; + +/** + * This class is designed to wrap the LocalDateTime class in order to make it behave, in terms of mutation, like a legacy Calendar class. + * This is to provide compatibility with the existing Cron next runtime calculation algorithm which relies on certain quirks of the Calendar + * such as days of the week being numbered starting on Sunday==1 and being able to set the current hour to 24 and have it roll over to + * midnight the next day. + */ +public class LocalDateTimeLegacyWrapper { + + private LocalDateTime ldt; + + public LocalDateTimeLegacyWrapper(LocalDateTime ldt) { + this.ldt = ldt; + } + + public int getYear() { + return ldt.getYear(); + } + + public int getDayOfMonth() { + return ldt.getDayOfMonth(); + } + + public int getHour() { + return ldt.getHour(); + } + + public int getMinute() { + return ldt.getMinute(); + } + + public int getSecond() { + return ldt.getSecond(); + } + + public int getDayOfWeek() { + return (ldt.getDayOfWeek().getValue() % 7) + 1; + } + + public int getMonth() { + return ldt.getMonthValue() - 1; + } + + public void setYear(int year) { + ldt = ldt.withYear(year); + } + + public void setDayOfMonth(int dayOfMonth) { + var lengthOfMonth = ldt.getMonth().length(ldt.toLocalDate().isLeapYear()); + if (dayOfMonth <= lengthOfMonth) { + ldt = ldt.withDayOfMonth(dayOfMonth); + } else { + var months = dayOfMonth / lengthOfMonth; + var day = dayOfMonth % lengthOfMonth; + ldt = ldt.plusMonths(months).withDayOfMonth(day); + } + } + + public void setMonth(int month) { + month++; // Months are 0-based in Calendar + if (month <= 12) { + ldt = ldt.withMonth(month); + } else { + var years = month / 12; + var monthOfYear = month % 12; + ldt = ldt.plusYears(years).withMonth(monthOfYear); + } + } + + public void setHour(int hour) { + if (hour < 24) { + ldt = ldt.withHour(hour); + } else { + var days = hour / 24; + var hourOfDay = hour % 24; + ldt = ldt.plusDays(days).withHour(hourOfDay); + } + } + + public void setMinute(int minute) { + if (minute < 60) { + ldt = ldt.withMinute(minute); + } else { + var hours = minute / 60; + var minuteOfHour = minute % 60; + ldt = ldt.plusHours(hours).withMinute(minuteOfHour); + } + } + + public void setSecond(int second) { + if (second < 60) { + ldt = ldt.withSecond(second); + } else { + var minutes = second / 60; + var secondOfMinute = second % 60; + ldt = ldt.plusMinutes(minutes).withSecond(secondOfMinute); + } + } + + public void plusYears(long years) { + ldt = ldt.plusYears(years); + } + + public void plusSeconds(long seconds) { + ldt = ldt.plusSeconds(seconds); + } + + public boolean isAfter(ChronoLocalDateTime other) { + return ldt.isAfter(other); + } + + public boolean isBefore(ChronoLocalDateTime other) { + return ldt.isBefore(other); + } + + public LocalDateTime getLocalDateTime() { + return ldt; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/scheduler/CronTimezoneTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/scheduler/CronTimezoneTests.java new file mode 100644 index 0000000000000..1e469002457d8 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/scheduler/CronTimezoneTests.java @@ -0,0 +1,231 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.scheduler; + +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; + +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.temporal.ChronoUnit; +import java.time.zone.ZoneOffsetTransition; +import java.time.zone.ZoneRules; + +import static java.time.Instant.ofEpochMilli; +import static java.util.TimeZone.getTimeZone; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.not; + +public class CronTimezoneTests extends ESTestCase { + + public void testForFixedOffsetCorrectlyCalculateNextRuntime() { + Cron cron = new Cron("0 0 2 * * ?", ZoneOffset.of("+1")); + long midnightUTC = Instant.parse("2020-01-01T00:00:00Z").toEpochMilli(); + long nextValidTimeAfter = cron.getNextValidTimeAfter(midnightUTC); + assertThat(Instant.ofEpochMilli(nextValidTimeAfter), equalTo(Instant.parse("2020-01-01T01:00:00Z"))); + } + + public void testForLondonFixedDSTTransitionCheckCorrectSchedule() { + ZoneId londonZone = getTimeZone("Europe/London").toZoneId(); + + Cron cron = new Cron("0 0 2 * * ?", londonZone); + ZoneRules londonZoneRules = londonZone.getRules(); + Instant springMidnight = Instant.parse("2020-03-01T00:00:00Z"); + long timeBeforeDST = springMidnight.toEpochMilli(); + + assertThat(cron.getNextValidTimeAfter(timeBeforeDST), equalTo(Instant.parse("2020-03-01T02:00:00Z").toEpochMilli())); + + ZoneOffsetTransition zoneOffsetTransition = londonZoneRules.nextTransition(springMidnight); + + Instant timeAfterDST = zoneOffsetTransition.getDateTimeBefore() + .plusDays(1) + .atZone(ZoneOffset.UTC) + .withHour(0) + .withMinute(0) + .toInstant(); + + assertThat(cron.getNextValidTimeAfter(timeAfterDST.toEpochMilli()), equalTo(Instant.parse("2020-03-30T01:00:00Z").toEpochMilli())); + } + + public void testRandomDSTTransitionCalculateNextTimeCorrectlyRelativeToUTC() { + ZoneId timeZone = generateRandomDSTZone(); + + logger.info("Testing for timezone {}", timeZone); + + ZoneOffsetTransition zoneOffsetTransition = timeZone.getRules().nextTransition(Instant.now()); + + ZonedDateTime midnightBefore = zoneOffsetTransition.getDateTimeBefore().atZone(timeZone).minusDays(2).withHour(0).withMinute(0); + ZonedDateTime midnightAfter = zoneOffsetTransition.getDateTimeAfter().atZone(timeZone).plusDays(2).withHour(0).withMinute(0); + + long epochBefore = midnightBefore.toInstant().toEpochMilli(); + long epochAfter = midnightAfter.toInstant().toEpochMilli(); + + Cron cron = new Cron("0 0 2 * * ?", timeZone); + + long nextScheduleBefore = cron.getNextValidTimeAfter(epochBefore); + long nextScheduleAfter = cron.getNextValidTimeAfter(epochAfter); + + assertThat(nextScheduleBefore - epochBefore, equalTo(2 * 60 * 60 * 1000L)); // 2 hours + assertThat(nextScheduleAfter - epochAfter, equalTo(2 * 60 * 60 * 1000L)); // 2 hours + + ZonedDateTime utcMidnightBefore = zoneOffsetTransition.getDateTimeBefore() + .atZone(ZoneOffset.UTC) + .minusDays(2) + .withHour(0) + .withMinute(0); + + ZonedDateTime utcMidnightAfter = zoneOffsetTransition.getDateTimeAfter() + .atZone(ZoneOffset.UTC) + .plusDays(2) + .withHour(0) + .withMinute(0); + + long utcEpochBefore = utcMidnightBefore.toInstant().toEpochMilli(); + long utcEpochAfter = utcMidnightAfter.toInstant().toEpochMilli(); + + long nextUtcScheduleBefore = cron.getNextValidTimeAfter(utcEpochBefore); + long nextUtcScheduleAfter = cron.getNextValidTimeAfter(utcEpochAfter); + + assertThat(nextUtcScheduleBefore - utcEpochBefore, not(equalTo(nextUtcScheduleAfter - utcEpochAfter))); + + } + + private ZoneId generateRandomDSTZone() { + ZoneId timeZone; + int i = 0; + boolean found; + do { + timeZone = randomZone(); + found = getTimeZone(timeZone).useDaylightTime(); + i++; + } while (found == false && i <= 500); // Infinite loop prevention + + if (found == false) { + fail("Could not find a timezone with DST"); + } + + logger.debug("Testing for timezone {} after {} iterations", timeZone, i); + return timeZone; + } + + public void testForGMTGapTransitionTriggerTimeIsAsIfTransitionHasntHappenedYet() { + ZoneId london = ZoneId.of("Europe/London"); + Cron cron = new Cron("0 30 1 * * ?", london); // Every day at 1:30 + + Instant beforeTransition = Instant.parse("2025-03-30T00:00:00Z"); + long beforeTransitionEpoch = beforeTransition.toEpochMilli(); + + long nextValidTimeAfter = cron.getNextValidTimeAfter(beforeTransitionEpoch); + assertThat(ofEpochMilli(nextValidTimeAfter), equalTo(Instant.parse("2025-03-30T01:30:00Z"))); + } + + public void testForGMTOverlapTransitionTriggerSkipSecondExecution() { + ZoneId london = ZoneId.of("Europe/London"); + Cron cron = new Cron("0 30 1 * * ?", london); // Every day at 01:30 + + Instant beforeTransition = Instant.parse("2024-10-27T00:00:00Z"); + long beforeTransitionEpoch = beforeTransition.toEpochMilli(); + + long firstValidTimeAfter = cron.getNextValidTimeAfter(beforeTransitionEpoch); + assertThat(ofEpochMilli(firstValidTimeAfter), equalTo(Instant.parse("2024-10-27T00:30:00Z"))); + + long nextValidTimeAfter = cron.getNextValidTimeAfter(firstValidTimeAfter); + assertThat(ofEpochMilli(nextValidTimeAfter), equalTo(Instant.parse("2024-10-28T01:30:00Z"))); + } + + // This test checks that once per minute crons will be unaffected by a DST transition + public void testDiscontinuityResolutionForNonHourCronInRandomTimezone() { + var timezone = generateRandomDSTZone(); + + var cron = new Cron("0 * * * * ?", timezone); // Once per minute + + Instant referenceTime = randomInstantBetween(Instant.now(), Instant.now().plus(1826, ChronoUnit.DAYS)); // ~5 years + ZoneOffsetTransition transition1 = timezone.getRules().nextTransition(referenceTime); + + // Currently there are no known timezones with DST transitions shorter than 10 minutes but this guards against future changes + if (Math.abs(transition1.getOffsetBefore().getTotalSeconds() - transition1.getOffsetAfter().getTotalSeconds()) < 600) { + fail("Transition is not long enough to test"); + } + + testNonHourCronTransition(transition1, cron); + + var transition2 = timezone.getRules().nextTransition(transition1.getInstant().plus(1, ChronoUnit.DAYS)); + + testNonHourCronTransition(transition2, cron); + + } + + private static void testNonHourCronTransition(ZoneOffsetTransition transition, Cron cron) { + Instant insideTransition; + if (transition.isGap()) { + insideTransition = transition.getInstant().plus(10, ChronoUnit.MINUTES); + Instant nextTrigger = ofEpochMilli(cron.getNextValidTimeAfter(insideTransition.toEpochMilli())); + assertThat(nextTrigger, equalTo(insideTransition.plus(1, ChronoUnit.MINUTES))); + } else { + insideTransition = transition.getInstant().minus(10, ChronoUnit.MINUTES); + Instant nextTrigger = ofEpochMilli(cron.getNextValidTimeAfter(insideTransition.toEpochMilli())); + assertThat(nextTrigger, equalTo(insideTransition.plus(1, ChronoUnit.MINUTES))); + + insideTransition = insideTransition.plus(transition.getDuration()); + nextTrigger = ofEpochMilli(cron.getNextValidTimeAfter(insideTransition.toEpochMilli())); + assertThat(nextTrigger, equalTo(insideTransition.plus(1, ChronoUnit.MINUTES))); + } + } + + // This test checks that once per day crons will behave correctly during a DST transition + public void testDiscontinuityResolutionForCronInRandomTimezone() { + var timezone = generateRandomDSTZone(); + + Instant referenceTime = randomInstantBetween(Instant.now(), Instant.now().plus(1826, ChronoUnit.DAYS)); // ~5 years + ZoneOffsetTransition transition1 = timezone.getRules().nextTransition(referenceTime); + + // Currently there are no known timezones with DST transitions shorter than 10 minutes but this guards against future changes + if (Math.abs(transition1.getOffsetBefore().getTotalSeconds() - transition1.getOffsetAfter().getTotalSeconds()) < 600) { + fail("Transition is not long enough to test"); + } + + testHourCronTransition(transition1, timezone); + + var transition2 = timezone.getRules().nextTransition(transition1.getInstant().plus(1, ChronoUnit.DAYS)); + + testHourCronTransition(transition2, timezone); + } + + private static void testHourCronTransition(ZoneOffsetTransition transition, ZoneId timezone) { + if (transition.isGap()) { + LocalDateTime targetTime = transition.getDateTimeBefore().plusMinutes(10); + + var cron = new Cron("0 " + targetTime.getMinute() + " " + targetTime.getHour() + " * * ?", timezone); + + long nextTrigger = cron.getNextValidTimeAfter(transition.getInstant().minus(10, ChronoUnit.MINUTES).toEpochMilli()); + + assertThat(ofEpochMilli(nextTrigger), equalTo(transition.getInstant().plus(10, ChronoUnit.MINUTES))); + } else { + LocalDateTime targetTime = transition.getDateTimeAfter().plusMinutes(10); + var cron = new Cron("0 " + targetTime.getMinute() + " " + targetTime.getHour() + " * * ?", timezone); + + long transitionLength = Math.abs(transition.getDuration().toSeconds()); + long firstTrigger = cron.getNextValidTimeAfter( + transition.getInstant().minusSeconds(transitionLength).minus(10, ChronoUnit.MINUTES).toEpochMilli() + ); + + assertThat( + ofEpochMilli(firstTrigger), + equalTo(transition.getInstant().minusSeconds(transitionLength).plus(10, ChronoUnit.MINUTES)) + ); + + var repeatTrigger = cron.getNextValidTimeAfter(firstTrigger + (1000 * 60L)); // 1 minute + + assertThat(repeatTrigger - firstTrigger, Matchers.greaterThan(24 * 60 * 60 * 1000L)); // 24 hours + } + } + +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorCustomSchedule.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorCustomSchedule.java index 7badf6926c574..387224408a14b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorCustomSchedule.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorCustomSchedule.java @@ -140,7 +140,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public void writeTo(StreamOutput out) throws IOException { out.writeWriteable(configurationOverrides); out.writeBoolean(enabled); - out.writeString(interval.toString()); + out.writeString(interval.expression()); out.writeOptionalInstant(lastSynced); out.writeString(name); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorScheduling.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorScheduling.java index 3c08a5ac1e218..008cbca0cd5ea 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorScheduling.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorScheduling.java @@ -222,7 +222,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(enabled); - out.writeString(interval.toString()); + out.writeString(interval.expression()); } @Override diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecyclePolicyTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecyclePolicyTests.java index b7674a2d60bff..0ab3e99e1efc9 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecyclePolicyTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecyclePolicyTests.java @@ -64,12 +64,12 @@ public void testNextExecutionTimeSchedule() { SnapshotLifecyclePolicy p = new SnapshotLifecyclePolicy( "id", "name", - "0 1 2 3 4 ? 2099", + "0 1 2 3 4 ? 2049", "repo", Collections.emptyMap(), SnapshotRetentionConfiguration.EMPTY ); - assertThat(p.calculateNextExecution(-1, Clock.systemUTC()), equalTo(4078864860000L)); + assertThat(p.calculateNextExecution(-1, Clock.systemUTC()), equalTo(2501028060000L)); } public void testNextExecutionTimeInterval() { diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/put_watch/11_timezoned_schedules.yml b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/put_watch/11_timezoned_schedules.yml new file mode 100644 index 0000000000000..0371443367603 --- /dev/null +++ b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/put_watch/11_timezoned_schedules.yml @@ -0,0 +1,121 @@ +--- +setup: + - do: + cluster.health: + wait_for_status: yellow + +--- +"Test put watch api with timezone": + - do: + watcher.put_watch: + id: "my_watch" + body: > + { + "trigger": { + "schedule": { + "timezone": "America/Los_Angeles", + "hourly": { + "minute": [ 0, 5 ] + } + } + }, + "input": { + "simple": { + "payload": { + "send": "yes" + } + } + }, + "condition": { + "always": {} + }, + "actions": { + "test_index": { + "index": { + "index": "test" + } + } + } + } + - match: { _id: "my_watch" } + - do: + watcher.get_watch: + id: "my_watch" + - match: { watch.trigger.schedule.timezone: "America/Los_Angeles" } + +--- +"Test put watch api without timezone": + - do: + watcher.put_watch: + id: "my_watch" + body: > + { + "trigger": { + "schedule": { + "hourly": { + "minute": [ 0, 5 ] + } + } + }, + "input": { + "simple": { + "payload": { + "send": "yes" + } + } + }, + "condition": { + "always": {} + }, + "actions": { + "test_index": { + "index": { + "index": "test" + } + } + } + } + - match: { _id: "my_watch" } + - do: + watcher.get_watch: + id: "my_watch" + - is_false: watch.trigger.schedule.timezone + +--- +"Reject put watch with invalid timezone": + - do: + watcher.put_watch: + id: "my_watch" + body: > + { + "trigger": { + "schedule": { + "timezone": "Pangea/Tethys", + "hourly": { + "minute": [ 0, 5 ] + } + } + }, + "input": { + "simple": { + "payload": { + "send": "yes" + } + } + }, + "condition": { + "always": {} + }, + "actions": { + "test_index": { + "index": { + "index": "test" + } + } + } + } + catch: bad_request + - match: { error.type: "parse_exception" } + - match: { error.reason: "could not parse schedule. invalid timezone [Pangea/Tethys]" } + - match: { error.caused_by.type: "zone_rules_exception" } + - match: { error.caused_by.reason: "Unknown time-zone ID: Pangea/Tethys" } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/CronnableSchedule.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/CronnableSchedule.java index 63e9dae88de41..0db99af9b3fc2 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/CronnableSchedule.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/CronnableSchedule.java @@ -8,6 +8,7 @@ import org.elasticsearch.xpack.core.scheduler.Cron; +import java.time.ZoneId; import java.util.Arrays; import java.util.Comparator; import java.util.Objects; @@ -17,6 +18,7 @@ public abstract class CronnableSchedule implements Schedule { private static final Comparator CRON_COMPARATOR = Comparator.comparing(Cron::expression); protected final Cron[] crons; + private ZoneId timeZone; CronnableSchedule(String... expressions) { this(crons(expressions)); @@ -28,6 +30,17 @@ private CronnableSchedule(Cron... crons) { Arrays.sort(crons, CRON_COMPARATOR); } + protected void setTimeZone(ZoneId timeZone) { + this.timeZone = timeZone; + for (Cron cron : crons) { + cron.setTimeZone(timeZone); + } + } + + public ZoneId getTimeZone() { + return timeZone; + } + @Override public long nextScheduledTimeAfter(long startTime, long time) { assert time >= startTime; @@ -45,21 +58,22 @@ public Cron[] crons() { return crons; } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CronnableSchedule that = (CronnableSchedule) o; + return Objects.deepEquals(crons, that.crons) && Objects.equals(timeZone, that.timeZone); + } + @Override public int hashCode() { - return Objects.hash((Object[]) crons); + return Objects.hash(Arrays.hashCode(crons), timeZone); } @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - final CronnableSchedule other = (CronnableSchedule) obj; - return Objects.deepEquals(this.crons, other.crons); + public String toString() { + return "CronnableSchedule{" + "crons=" + Arrays.toString(crons) + ", timeZone=" + timeZone + '}'; } static Cron[] crons(String... expressions) { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleRegistry.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleRegistry.java index 31cf46f8abaac..5d2259db71f77 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleRegistry.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleRegistry.java @@ -8,8 +8,11 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.watcher.trigger.schedule.support.TimezoneUtils; import java.io.IOException; +import java.time.DateTimeException; +import java.time.ZoneId; import java.util.HashMap; import java.util.Map; import java.util.Set; @@ -29,9 +32,15 @@ public Schedule parse(String context, XContentParser parser) throws IOException String type = null; XContentParser.Token token; Schedule schedule = null; + ZoneId timeZone = null; // Default to UTC while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { - type = parser.currentName(); + var fieldName = parser.currentName(); + if (fieldName.equals(ScheduleTrigger.TIMEZONE_FIELD)) { + timeZone = parseTimezone(parser); + } else { + type = parser.currentName(); + } } else if (type != null) { schedule = parse(context, type, parser); } else { @@ -44,9 +53,38 @@ public Schedule parse(String context, XContentParser parser) throws IOException if (schedule == null) { throw new ElasticsearchParseException("could not parse schedule. expected a schedule type field, but no fields were found"); } + + if (timeZone != null && schedule instanceof CronnableSchedule cronnableSchedule) { + cronnableSchedule.setTimeZone(timeZone); + } else if (timeZone != null) { + throw new ElasticsearchParseException( + "could not parse schedule. Timezone is not supported for schedule type [{}]", + schedule.type() + ); + } + return schedule; } + private static ZoneId parseTimezone(XContentParser parser) throws IOException { + ZoneId timeZone; + XContentParser.Token token = parser.nextToken(); + if (token == XContentParser.Token.VALUE_STRING) { + String text = parser.text(); + try { + timeZone = TimezoneUtils.parse(text); + } catch (DateTimeException e) { + throw new ElasticsearchParseException("could not parse schedule. invalid timezone [{}]", e, text); + } + } else { + throw new ElasticsearchParseException( + "could not parse schedule. expected a string value for timezone, but found [{}] instead", + token + ); + } + return timeZone; + } + public Schedule parse(String context, String type, XContentParser parser) throws IOException { Schedule.Parser scheduleParser = parsers.get(type); if (scheduleParser == null) { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleTrigger.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleTrigger.java index 4a67841e6c88e..cc6ec8f5aaa57 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleTrigger.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleTrigger.java @@ -14,6 +14,7 @@ public class ScheduleTrigger implements Trigger { public static final String TYPE = "schedule"; + public static final String TIMEZONE_FIELD = "timezone"; private final Schedule schedule; @@ -49,7 +50,13 @@ public int hashCode() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.startObject().field(schedule.type(), schedule, params).endObject(); + builder.startObject(); + if (schedule instanceof CronnableSchedule cronnableSchedule && cronnableSchedule.getTimeZone() != null) { + builder.field(TIMEZONE_FIELD, cronnableSchedule.getTimeZone().getId()); + } + + builder.field(schedule.type(), schedule, params); + return builder.endObject(); } public static Builder builder(Schedule schedule) { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/TimezoneUtils.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/TimezoneUtils.java new file mode 100644 index 0000000000000..c77fdda803bec --- /dev/null +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/TimezoneUtils.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.watcher.trigger.schedule.support; + +import java.time.DateTimeException; +import java.time.ZoneId; +import java.util.Locale; +import java.util.Map; + +import static java.util.stream.Collectors.toMap; + +/** + * Utility class for dealing with Timezone related operations. + */ +public class TimezoneUtils { + + private static final Map caseInsensitiveTZLookup; + + static { + caseInsensitiveTZLookup = ZoneId.getAvailableZoneIds() + .stream() + .collect(toMap(zoneId -> zoneId.toLowerCase(Locale.ROOT), ZoneId::of)); + } + + /** + * Parses a timezone string into a {@link ZoneId} object. The timezone string can be a valid timezone ID, or a + * timezone offset string and is case-insensitive. + * + * @param timezoneString The timezone string to parse + * @return The parsed {@link ZoneId} object + * @throws DateTimeException If the timezone string is not a valid timezone ID or offset + */ + public static ZoneId parse(String timezoneString) throws DateTimeException { + try { + return ZoneId.of(timezoneString); + } catch (DateTimeException e) { + ZoneId timeZone = caseInsensitiveTZLookup.get(timezoneString.toLowerCase(Locale.ROOT)); + if (timeZone != null) { + return timeZone; + } + try { + return ZoneId.of(timezoneString.toUpperCase(Locale.ROOT)); + } catch (DateTimeException ignored) { + // ignore + } + throw e; + } + } + +} diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleRegistryTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleRegistryTests.java index 7fc4739c342f1..aa39701d207c3 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleRegistryTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleRegistryTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.xcontent.json.JsonXContent; import org.junit.Before; +import java.time.ZoneId; import java.util.HashSet; import java.util.Set; @@ -49,15 +50,23 @@ public void testParserInterval() throws Exception { } public void testParseCron() throws Exception { - Object cron = randomBoolean() ? Schedules.cron("* 0/5 * * * ?") : Schedules.cron("* 0/2 * * * ?", "* 0/3 * * * ?", "* 0/5 * * * ?"); - XContentBuilder builder = jsonBuilder().startObject().field(CronSchedule.TYPE, cron).endObject(); + var cron = randomBoolean() ? Schedules.cron("* 0/5 * * * ?") : Schedules.cron("* 0/2 * * * ?", "* 0/3 * * * ?", "* 0/5 * * * ?"); + ZoneId timeZone = null; + XContentBuilder builder = jsonBuilder().startObject().field(CronSchedule.TYPE, cron); + if (randomBoolean()) { + timeZone = randomTimeZone().toZoneId(); + cron.setTimeZone(timeZone); + builder.field(ScheduleTrigger.TIMEZONE_FIELD, timeZone.getId()); + } + builder.endObject(); BytesReference bytes = BytesReference.bytes(builder); XContentParser parser = createParser(JsonXContent.jsonXContent, bytes); parser.nextToken(); - Schedule schedule = registry.parse("ctx", parser); + CronnableSchedule schedule = (CronnableSchedule) registry.parse("ctx", parser); assertThat(schedule, notNullValue()); assertThat(schedule, instanceOf(CronSchedule.class)); assertThat(schedule, is(cron)); + assertThat(schedule.getTimeZone(), equalTo(timeZone)); } public void testParseHourly() throws Exception { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/TimezoneUtilsTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/TimezoneUtilsTests.java new file mode 100644 index 0000000000000..aa797ec610eca --- /dev/null +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/support/TimezoneUtilsTests.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.watcher.trigger.schedule.support; + +import org.elasticsearch.test.ESTestCase; + +import java.time.ZoneId; +import java.util.Locale; + +import static org.hamcrest.Matchers.equalTo; + +public class TimezoneUtilsTests extends ESTestCase { + + public void testExpectedFormatParsing() { + assertThat(TimezoneUtils.parse("Europe/London").getId(), equalTo("Europe/London")); + assertThat(TimezoneUtils.parse("+1").getId(), equalTo("+01:00")); + assertThat(TimezoneUtils.parse("GMT+01:00").getId(), equalTo("GMT+01:00")); + } + + public void testParsingIsCaseInsensitive() { + ZoneId timeZone = randomTimeZone().toZoneId(); + assertThat(TimezoneUtils.parse(timeZone.getId()), equalTo(timeZone)); + assertThat(TimezoneUtils.parse(timeZone.getId().toLowerCase(Locale.ROOT)), equalTo(timeZone)); + assertThat(TimezoneUtils.parse(timeZone.getId().toUpperCase(Locale.ROOT)), equalTo(timeZone)); + } + + public void testParsingOffsets() { + ZoneId timeZone = ZoneId.of("GMT+01:00"); + assertThat(TimezoneUtils.parse("GMT+01:00"), equalTo(timeZone)); + assertThat(TimezoneUtils.parse("gmt+01:00"), equalTo(timeZone)); + assertThat(TimezoneUtils.parse("GMT+1"), equalTo(timeZone)); + + assertThat(TimezoneUtils.parse("+1"), equalTo(ZoneId.of("+01:00"))); + } +} From 97296598c3e8df2af53c2d49d8de97d16ef1dc56 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Mon, 25 Nov 2024 11:28:15 +0100 Subject: [PATCH 213/386] [Build] Tweak BWC tasks caching (#117423) * do not track certain env vars for LoggedExec * Fix some more tasks on build cacheability * Some more cleanup on task inputs * Mark more tasks as cacheable --- .../conventions/precommit/PomValidationTask.java | 3 +++ .../gradle/internal/BwcSetupExtension.java | 6 +++--- .../InternalDistributionBwcSetupPlugin.java | 3 ++- .../precommit/CheckstylePrecommitPlugin.java | 12 +++++++++--- .../internal/precommit/FilePermissionsTask.java | 3 +++ .../internal/test/rest/CopyRestTestsTask.java | 3 +++ .../java/org/elasticsearch/gradle/LoggedExec.java | 15 +++++++++++++-- .../plugin/GeneratePluginPropertiesTask.java | 5 +++++ 8 files changed, 41 insertions(+), 9 deletions(-) diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/PomValidationTask.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/PomValidationTask.java index 9d06e632ec928..89bab313a0069 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/PomValidationTask.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/PomValidationTask.java @@ -16,6 +16,8 @@ import org.gradle.api.file.RegularFileProperty; import org.gradle.api.model.ObjectFactory; import org.gradle.api.tasks.InputFile; +import org.gradle.api.tasks.PathSensitive; +import org.gradle.api.tasks.PathSensitivity; import org.gradle.api.tasks.TaskAction; import java.io.FileReader; @@ -37,6 +39,7 @@ public PomValidationTask(ObjectFactory objects) { } @InputFile + @PathSensitive(PathSensitivity.RELATIVE) public RegularFileProperty getPomFile() { return pomFile; } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java index d7bf839817e12..5992a40275b46 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java @@ -115,9 +115,9 @@ private static TaskProvider createRunBwcGradleTask( if (OS.current() == OS.WINDOWS) { loggedExec.getExecutable().set("cmd"); - loggedExec.args("/C", "call", new File(checkoutDir.get(), "gradlew").toString()); + loggedExec.args("/C", "call", "gradlew"); } else { - loggedExec.getExecutable().set(new File(checkoutDir.get(), "gradlew").toString()); + loggedExec.getExecutable().set("./gradlew"); } if (useUniqueUserHome) { @@ -177,7 +177,7 @@ private static String readFromFile(File file) { } } - public static abstract class JavaHomeValueSource implements ValueSource { + public abstract static class JavaHomeValueSource implements ValueSource { private String minimumCompilerVersionPath(Version bwcVersion) { return (bwcVersion.onOrAfter(BUILD_TOOL_MINIMUM_VERSION)) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java index 80fd6db59cf9f..c17127f9bbfcf 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java @@ -23,6 +23,7 @@ import org.gradle.api.provider.Provider; import org.gradle.api.provider.ProviderFactory; import org.gradle.api.tasks.Copy; +import org.gradle.api.tasks.PathSensitivity; import org.gradle.api.tasks.TaskProvider; import org.gradle.jvm.toolchain.JavaToolchainService; import org.gradle.language.base.plugins.LifecycleBasePlugin; @@ -322,7 +323,7 @@ static void createBuildBwcTask( File expectedOutputFile = useNativeExpanded ? new File(projectArtifact.expandedDistDir, "elasticsearch-" + bwcVersion.get() + "-SNAPSHOT") : projectArtifact.distFile; - c.getInputs().file(new File(project.getBuildDir(), "refspec")); + c.getInputs().file(new File(project.getBuildDir(), "refspec")).withPathSensitivity(PathSensitivity.RELATIVE); if (useNativeExpanded) { c.getOutputs().dir(expectedOutputFile); } else { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/CheckstylePrecommitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/CheckstylePrecommitPlugin.java index 81ff081ffa82b..dbbe35905d208 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/CheckstylePrecommitPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/CheckstylePrecommitPlugin.java @@ -19,6 +19,7 @@ import org.gradle.api.plugins.quality.Checkstyle; import org.gradle.api.plugins.quality.CheckstyleExtension; import org.gradle.api.provider.Provider; +import org.gradle.api.tasks.PathSensitivity; import org.gradle.api.tasks.SourceSetContainer; import org.gradle.api.tasks.TaskProvider; @@ -42,18 +43,23 @@ public TaskProvider createTask(Project project) { File checkstyleSuppressions = new File(checkstyleDir, "checkstyle_suppressions.xml"); File checkstyleConf = new File(checkstyleDir, "checkstyle.xml"); TaskProvider copyCheckstyleConf = project.getTasks().register("copyCheckstyleConf"); - // configure inputs and outputs so up to date works properly copyCheckstyleConf.configure(t -> t.getOutputs().files(checkstyleSuppressions, checkstyleConf)); if ("jar".equals(checkstyleConfUrl.getProtocol())) { try { JarURLConnection jarURLConnection = (JarURLConnection) checkstyleConfUrl.openConnection(); - copyCheckstyleConf.configure(t -> t.getInputs().file(jarURLConnection.getJarFileURL())); + copyCheckstyleConf.configure( + t -> t.getInputs().file(jarURLConnection.getJarFileURL()).withPathSensitivity(PathSensitivity.RELATIVE) + ); } catch (IOException e) { throw new UncheckedIOException(e); } } else if ("file".equals(checkstyleConfUrl.getProtocol())) { - copyCheckstyleConf.configure(t -> t.getInputs().files(checkstyleConfUrl.getFile(), checkstyleSuppressionsUrl.getFile())); + copyCheckstyleConf.configure( + t -> t.getInputs() + .files(checkstyleConfUrl.getFile(), checkstyleSuppressionsUrl.getFile()) + .withPathSensitivity(PathSensitivity.RELATIVE) + ); } // Explicitly using an Action interface as java lambdas diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsTask.java index a198034c3c09b..479b6f431b867 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsTask.java @@ -19,6 +19,8 @@ import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.Internal; import org.gradle.api.tasks.OutputFile; +import org.gradle.api.tasks.PathSensitive; +import org.gradle.api.tasks.PathSensitivity; import org.gradle.api.tasks.SkipWhenEmpty; import org.gradle.api.tasks.StopExecutionException; import org.gradle.api.tasks.TaskAction; @@ -79,6 +81,7 @@ private static boolean isExecutableFile(File file) { @InputFiles @IgnoreEmptyDirectories @SkipWhenEmpty + @PathSensitive(PathSensitivity.RELATIVE) public FileCollection getFiles() { return getSources().get() .stream() diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestTestsTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestTestsTask.java index 02309bb9c1811..6890cfb652952 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestTestsTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestTestsTask.java @@ -24,6 +24,8 @@ import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.Optional; import org.gradle.api.tasks.OutputDirectory; +import org.gradle.api.tasks.PathSensitive; +import org.gradle.api.tasks.PathSensitivity; import org.gradle.api.tasks.SkipWhenEmpty; import org.gradle.api.tasks.TaskAction; import org.gradle.api.tasks.util.PatternFilterable; @@ -106,6 +108,7 @@ public Map getSubstitutions() { @SkipWhenEmpty @IgnoreEmptyDirectories @InputFiles + @PathSensitive(PathSensitivity.RELATIVE) public FileTree getInputDir() { FileTree coreFileTree = null; FileTree xpackFileTree = null; diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/LoggedExec.java b/build-tools/src/main/java/org/elasticsearch/gradle/LoggedExec.java index 505e9a5b114d1..28018b4c50abe 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/LoggedExec.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/LoggedExec.java @@ -20,6 +20,7 @@ import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; import org.gradle.api.provider.ProviderFactory; +import org.gradle.api.tasks.CacheableTask; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.Internal; import org.gradle.api.tasks.Optional; @@ -53,6 +54,7 @@ * Exec task implementation. */ @SuppressWarnings("unchecked") +@CacheableTask public abstract class LoggedExec extends DefaultTask implements FileSystemOperationsAware { private static final Logger LOGGER = Logging.getLogger(LoggedExec.class); @@ -87,6 +89,14 @@ public abstract class LoggedExec extends DefaultTask implements FileSystemOperat abstract public Property getCaptureOutput(); @Input + public Provider getWorkingDirPath() { + return getWorkingDir().map(file -> { + String relativeWorkingDir = projectLayout.getProjectDirectory().getAsFile().toPath().relativize(file.toPath()).toString(); + return relativeWorkingDir; + }); + } + + @Internal abstract public Property getWorkingDir(); @Internal @@ -117,9 +127,10 @@ public LoggedExec( * can be reused across different build invocations. * */ private void setupDefaultEnvironment(ProviderFactory providerFactory) { - getEnvironment().putAll(providerFactory.environmentVariablesPrefixedBy("BUILDKITE")); getEnvironment().putAll(providerFactory.environmentVariablesPrefixedBy("GRADLE_BUILD_CACHE")); - getEnvironment().putAll(providerFactory.environmentVariablesPrefixedBy("VAULT")); + + getNonTrackedEnvironment().putAll(providerFactory.environmentVariablesPrefixedBy("BUILDKITE")); + getNonTrackedEnvironment().putAll(providerFactory.environmentVariablesPrefixedBy("VAULT")); Provider javaToolchainHome = providerFactory.environmentVariable("JAVA_TOOLCHAIN_HOME"); if (javaToolchainHome.isPresent()) { getEnvironment().put("JAVA_TOOLCHAIN_HOME", javaToolchainHome); diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/GeneratePluginPropertiesTask.java b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/GeneratePluginPropertiesTask.java index e144122f97770..6cf01814a45ef 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/GeneratePluginPropertiesTask.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/GeneratePluginPropertiesTask.java @@ -19,10 +19,13 @@ import org.gradle.api.file.RegularFileProperty; import org.gradle.api.provider.ListProperty; import org.gradle.api.provider.Property; +import org.gradle.api.tasks.CacheableTask; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.Optional; import org.gradle.api.tasks.OutputFile; +import org.gradle.api.tasks.PathSensitive; +import org.gradle.api.tasks.PathSensitivity; import org.gradle.api.tasks.TaskAction; import org.objectweb.asm.ClassReader; import org.objectweb.asm.tree.ClassNode; @@ -39,6 +42,7 @@ import javax.inject.Inject; +@CacheableTask public abstract class GeneratePluginPropertiesTask extends DefaultTask { public static final String PROPERTIES_FILENAME = "plugin-descriptor.properties"; @@ -82,6 +86,7 @@ public GeneratePluginPropertiesTask(ProjectLayout projectLayout) { public abstract Property getIsLicensed(); @InputFiles + @PathSensitive(PathSensitivity.RELATIVE) public abstract ConfigurableFileCollection getModuleInfoFile(); @OutputFile From 32aaacbd7b383188f2b5eb64fce69a09d11bfc94 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Mon, 25 Nov 2024 12:09:30 +0100 Subject: [PATCH 214/386] LOOKUP JOIN using field-caps for field mapping (#117246) * LOOKUP JOIN using field-caps for field mapping Removes the hard-coded hack for languages_lookup, and instead does a field-caps check for the real join index. * Update docs/changelog/117246.yaml * Some code review comments --- docs/changelog/117246.yaml | 5 + .../xpack/esql/CsvTestsDataLoader.java | 9 +- .../resources/languages_lookup-settings.json | 5 + .../xpack/esql/analysis/Analyzer.java | 37 ++--- .../xpack/esql/analysis/AnalyzerContext.java | 14 +- .../xpack/esql/session/EsqlSession.java | 144 ++++++++++++------ 6 files changed, 135 insertions(+), 79 deletions(-) create mode 100644 docs/changelog/117246.yaml create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/languages_lookup-settings.json diff --git a/docs/changelog/117246.yaml b/docs/changelog/117246.yaml new file mode 100644 index 0000000000000..29c4464855967 --- /dev/null +++ b/docs/changelog/117246.yaml @@ -0,0 +1,5 @@ +pr: 117246 +summary: LOOKUP JOIN using field-caps for field mapping +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index 0d6659ad37a27..ffbac2829ea4a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -56,6 +56,8 @@ public class CsvTestsDataLoader { private static final TestsDataset APPS = new TestsDataset("apps"); private static final TestsDataset APPS_SHORT = APPS.withIndex("apps_short").withTypeMapping(Map.of("id", "short")); private static final TestsDataset LANGUAGES = new TestsDataset("languages"); + private static final TestsDataset LANGUAGES_LOOKUP = LANGUAGES.withIndex("languages_lookup") + .withSetting("languages_lookup-settings.json"); private static final TestsDataset ALERTS = new TestsDataset("alerts"); private static final TestsDataset UL_LOGS = new TestsDataset("ul_logs"); private static final TestsDataset SAMPLE_DATA = new TestsDataset("sample_data"); @@ -93,14 +95,13 @@ public class CsvTestsDataLoader { private static final TestsDataset BOOKS = new TestsDataset("books"); private static final TestsDataset SEMANTIC_TEXT = new TestsDataset("semantic_text").withInferenceEndpoint(true); - private static final String LOOKUP_INDEX_SUFFIX = "_lookup"; - public static final Map CSV_DATASET_MAP = Map.ofEntries( Map.entry(EMPLOYEES.indexName, EMPLOYEES), Map.entry(HOSTS.indexName, HOSTS), Map.entry(APPS.indexName, APPS), Map.entry(APPS_SHORT.indexName, APPS_SHORT), Map.entry(LANGUAGES.indexName, LANGUAGES), + Map.entry(LANGUAGES_LOOKUP.indexName, LANGUAGES_LOOKUP), Map.entry(UL_LOGS.indexName, UL_LOGS), Map.entry(SAMPLE_DATA.indexName, SAMPLE_DATA), Map.entry(ALERTS.indexName, ALERTS), @@ -130,9 +131,7 @@ public class CsvTestsDataLoader { Map.entry(DISTANCES.indexName, DISTANCES), Map.entry(ADDRESSES.indexName, ADDRESSES), Map.entry(BOOKS.indexName, BOOKS), - Map.entry(SEMANTIC_TEXT.indexName, SEMANTIC_TEXT), - // JOIN LOOKUP alias - Map.entry(LANGUAGES.indexName + LOOKUP_INDEX_SUFFIX, LANGUAGES.withIndex(LANGUAGES.indexName + LOOKUP_INDEX_SUFFIX)) + Map.entry(SEMANTIC_TEXT.indexName, SEMANTIC_TEXT) ); private static final EnrichConfig LANGUAGES_ENRICH = new EnrichConfig("languages_policy", "enrich-policy-languages.json"); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/languages_lookup-settings.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/languages_lookup-settings.json new file mode 100644 index 0000000000000..b73d1f9accf92 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/languages_lookup-settings.json @@ -0,0 +1,5 @@ +{ + "index": { + "mode": "lookup" + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 7ad4c3d3e644d..dde7bc09ac615 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -62,6 +62,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.index.EsIndex; +import org.elasticsearch.xpack.esql.index.IndexResolution; import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.esql.plan.TableIdentifier; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; @@ -106,7 +107,6 @@ import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.function.Function; @@ -199,11 +199,12 @@ private static class ResolveTable extends ParameterizedAnalyzerRule"), enrichResolution); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 9630a520e8654..25bb6d80d0dd0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.compute.data.Block; @@ -62,6 +63,8 @@ import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; import org.elasticsearch.xpack.esql.plan.logical.UnresolvedRelation; import org.elasticsearch.xpack.esql.plan.logical.join.InlineJoin; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; +import org.elasticsearch.xpack.esql.plan.logical.join.LookupJoin; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize; @@ -76,7 +79,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.function.BiFunction; import java.util.function.Predicate; import java.util.stream.Collectors; @@ -272,9 +274,12 @@ public void analyzedPlan(LogicalPlan parsed, EsqlExecutionInfo executionInfo, Ac return; } - preAnalyze(parsed, executionInfo, (indices, policies) -> { + preAnalyze(parsed, executionInfo, (indices, lookupIndices, policies) -> { planningMetrics.gatherPreAnalysisMetrics(parsed); - Analyzer analyzer = new Analyzer(new AnalyzerContext(configuration, functionRegistry, indices, policies), verifier); + Analyzer analyzer = new Analyzer( + new AnalyzerContext(configuration, functionRegistry, indices, lookupIndices, policies), + verifier + ); var plan = analyzer.analyze(parsed); plan.setAnalyzed(); LOGGER.debug("Analyzed plan:\n{}", plan); @@ -285,7 +290,7 @@ public void analyzedPlan(LogicalPlan parsed, EsqlExecutionInfo executionInfo, Ac private void preAnalyze( LogicalPlan parsed, EsqlExecutionInfo executionInfo, - BiFunction action, + TriFunction action, ActionListener listener ) { PreAnalyzer.PreAnalysis preAnalysis = preAnalyzer.preAnalyze(parsed); @@ -299,63 +304,81 @@ private void preAnalyze( ).keySet(); enrichPolicyResolver.resolvePolicies(targetClusters, unresolvedPolicies, listener.delegateFailureAndWrap((l, enrichResolution) -> { // first we need the match_fields names from enrich policies and THEN, with an updated list of fields, we call field_caps API - var matchFields = enrichResolution.resolvedEnrichPolicies() + var enrichMatchFields = enrichResolution.resolvedEnrichPolicies() .stream() .map(ResolvedEnrichPolicy::matchField) .collect(Collectors.toSet()); - Map unavailableClusters = enrichResolution.getUnavailableClusters(); - preAnalyzeIndices(parsed, executionInfo, unavailableClusters, l.delegateFailureAndWrap((ll, indexResolution) -> { - // TODO in follow-PR (for skip_unavailble handling of missing concrete indexes) add some tests for invalid index - // resolution to updateExecutionInfo - if (indexResolution.isValid()) { - EsqlSessionCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); - EsqlSessionCCSUtils.updateExecutionInfoWithUnavailableClusters(executionInfo, indexResolution.unavailableClusters()); - if (executionInfo.isCrossClusterSearch() - && executionInfo.getClusterStateCount(EsqlExecutionInfo.Cluster.Status.RUNNING) == 0) { - // for a CCS, if all clusters have been marked as SKIPPED, nothing to search so send a sentinel - // Exception to let the LogicalPlanActionListener decide how to proceed - ll.onFailure(new NoClustersToSearchException()); - return; - } - - Set newClusters = enrichPolicyResolver.groupIndicesPerCluster( - indexResolution.get().concreteIndices().toArray(String[]::new) - ).keySet(); - // If new clusters appear when resolving the main indices, we need to resolve the enrich policies again - // or exclude main concrete indices. Since this is rare, it's simpler to resolve the enrich policies again. - // TODO: add a test for this - if (targetClusters.containsAll(newClusters) == false - // do not bother with a re-resolution if only remotes were requested and all were offline - && executionInfo.getClusterStateCount(EsqlExecutionInfo.Cluster.Status.RUNNING) > 0) { - enrichPolicyResolver.resolvePolicies( - newClusters, - unresolvedPolicies, - ll.map(newEnrichResolution -> action.apply(indexResolution, newEnrichResolution)) - ); - return; - } - } - ll.onResponse(action.apply(indexResolution, enrichResolution)); - }), matchFields); + // get the field names from the parsed plan combined with the ENRICH match fields from the ENRICH policy + var fieldNames = fieldNames(parsed, enrichMatchFields); + // First resolve the lookup indices, then the main indices + preAnalyzeLookupIndices( + preAnalysis.lookupIndices, + fieldNames, + l.delegateFailureAndWrap( + (lx, lookupIndexResolution) -> preAnalyzeIndices( + indices, + executionInfo, + enrichResolution.getUnavailableClusters(), + fieldNames, + lx.delegateFailureAndWrap((ll, indexResolution) -> { + // TODO in follow-PR (for skip_unavailble handling of missing concrete indexes) add some tests for invalid + // index resolution to updateExecutionInfo + if (indexResolution.isValid()) { + EsqlSessionCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); + EsqlSessionCCSUtils.updateExecutionInfoWithUnavailableClusters( + executionInfo, + indexResolution.unavailableClusters() + ); + if (executionInfo.isCrossClusterSearch() + && executionInfo.getClusterStateCount(EsqlExecutionInfo.Cluster.Status.RUNNING) == 0) { + // for a CCS, if all clusters have been marked as SKIPPED, nothing to search so send a sentinel + // Exception to let the LogicalPlanActionListener decide how to proceed + ll.onFailure(new NoClustersToSearchException()); + return; + } + + Set newClusters = enrichPolicyResolver.groupIndicesPerCluster( + indexResolution.get().concreteIndices().toArray(String[]::new) + ).keySet(); + // If new clusters appear when resolving the main indices, we need to resolve the enrich policies again + // or exclude main concrete indices. Since this is rare, it's simpler to resolve the enrich policies + // again. + // TODO: add a test for this + if (targetClusters.containsAll(newClusters) == false + // do not bother with a re-resolution if only remotes were requested and all were offline + && executionInfo.getClusterStateCount(EsqlExecutionInfo.Cluster.Status.RUNNING) > 0) { + enrichPolicyResolver.resolvePolicies( + newClusters, + unresolvedPolicies, + ll.map( + newEnrichResolution -> action.apply(indexResolution, lookupIndexResolution, newEnrichResolution) + ) + ); + return; + } + } + ll.onResponse(action.apply(indexResolution, lookupIndexResolution, enrichResolution)); + }) + ) + ) + ); })); } private void preAnalyzeIndices( - LogicalPlan parsed, + List indices, EsqlExecutionInfo executionInfo, Map unavailableClusters, // known to be unavailable from the enrich policy API call - ActionListener listener, - Set enrichPolicyMatchFields + Set fieldNames, + ActionListener listener ) { - PreAnalyzer.PreAnalysis preAnalysis = new PreAnalyzer().preAnalyze(parsed); // TODO we plan to support joins in the future when possible, but for now we'll just fail early if we see one - if (preAnalysis.indices.size() > 1) { + if (indices.size() > 1) { // Note: JOINs are not supported but we detect them when listener.onFailure(new MappingException("Queries with multiple indices are not supported")); - } else if (preAnalysis.indices.size() == 1) { - TableInfo tableInfo = preAnalysis.indices.get(0); + } else if (indices.size() == 1) { + TableInfo tableInfo = indices.get(0); TableIdentifier table = tableInfo.id(); - var fieldNames = fieldNames(parsed, enrichPolicyMatchFields); Map clusterIndices = indicesExpressionGrouper.groupIndices(IndicesOptions.DEFAULT, table.index()); for (Map.Entry entry : clusterIndices.entrySet()) { @@ -401,6 +424,25 @@ private void preAnalyzeIndices( } } + private void preAnalyzeLookupIndices(List indices, Set fieldNames, ActionListener listener) { + if (indices.size() > 1) { + // Note: JOINs on more than one index are not yet supported + listener.onFailure(new MappingException("More than one LOOKUP JOIN is not supported")); + } else if (indices.size() == 1) { + TableInfo tableInfo = indices.get(0); + TableIdentifier table = tableInfo.id(); + // call the EsqlResolveFieldsAction (field-caps) to resolve indices and get field types + indexResolver.resolveAsMergedMapping(table.index(), fieldNames, listener); + } else { + try { + // No lookup indices specified + listener.onResponse(IndexResolution.invalid("[none specified]")); + } catch (Exception ex) { + listener.onFailure(ex); + } + } + } + static Set fieldNames(LogicalPlan parsed, Set enrichPolicyMatchFields) { if (false == parsed.anyMatch(plan -> plan instanceof Aggregate || plan instanceof Project)) { // no explicit columns selection, for example "from employees" @@ -422,6 +464,7 @@ static Set fieldNames(LogicalPlan parsed, Set enrichPolicyMatchF // "keep" attributes are special whenever a wildcard is used in their name // ie "from test | eval lang = languages + 1 | keep *l" should consider both "languages" and "*l" as valid fields to ask for AttributeSet keepCommandReferences = new AttributeSet(); + AttributeSet keepJoinReferences = new AttributeSet(); List> keepMatches = new ArrayList<>(); List keepPatterns = new ArrayList<>(); @@ -440,6 +483,11 @@ static Set fieldNames(LogicalPlan parsed, Set enrichPolicyMatchF // The exact name of the field will be added later as part of enrichPolicyMatchFields Set enrichRefs.removeIf(attr -> attr instanceof EmptyAttribute); references.addAll(enrichRefs); + } else if (p instanceof LookupJoin join) { + keepJoinReferences.addAll(join.config().matchFields()); // TODO: why is this empty + if (join.config().type() instanceof JoinTypes.UsingJoinType usingJoinType) { + keepJoinReferences.addAll(usingJoinType.columns()); + } } else { references.addAll(p.references()); if (p instanceof UnresolvedRelation ur && ur.indexMode() == IndexMode.TIME_SERIES) { @@ -473,6 +521,8 @@ static Set fieldNames(LogicalPlan parsed, Set enrichPolicyMatchF references.removeIf(attr -> matchByName(attr, alias.name(), keepCommandReferences.contains(attr))); }); }); + // Add JOIN ON column references afterward to avoid Alias removal + references.addAll(keepJoinReferences); // remove valid metadata attributes because they will be filtered out by the IndexResolver anyway // otherwise, in some edge cases, we will fail to ask for "*" (all fields) instead From 3896de6639f1807676345d71b70b4cc218f9f37b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Iv=C3=A1n=20Cea=20Fontenla?= Date: Mon, 25 Nov 2024 12:34:20 +0100 Subject: [PATCH 215/386] ESQL: Fix AttributeSet#add() returning the opposite expected value (#117367) Set/Collection#add() is supposed to return `true` if the collection changed (If it actually added something). In this case, it must return if the old value is null. Extracted from https://github.com/elastic/elasticsearch/pull/114317 (Where it's being used) --- .../elasticsearch/xpack/esql/core/expression/AttributeSet.java | 2 +- .../org/elasticsearch/xpack/ql/expression/AttributeSet.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/AttributeSet.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/AttributeSet.java index e3eac60703915..a092e17931237 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/AttributeSet.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/AttributeSet.java @@ -113,7 +113,7 @@ public T[] toArray(T[] a) { @Override public boolean add(Attribute e) { - return delegate.put(e, PRESENT) != null; + return delegate.put(e, PRESENT) == null; } @Override diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/AttributeSet.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/AttributeSet.java index 0ee291af29ae1..a44764dab2a38 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/AttributeSet.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/AttributeSet.java @@ -113,7 +113,7 @@ public T[] toArray(T[] a) { @Override public boolean add(Attribute e) { - return delegate.put(e, PRESENT) != null; + return delegate.put(e, PRESENT) == null; } @Override From e319875d7e69d241283fc2e762cd6d8424886715 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 25 Nov 2024 13:27:38 +0100 Subject: [PATCH 216/386] Make InternalComposite.InternalBucket leaner (#117368) This commit removes reverseMuls and missingOrder from InternalComposite.InternalBucket . --- .../bucket/composite/CompositeAggregator.java | 10 +---- .../bucket/composite/InternalComposite.java | 45 ++++--------------- .../composite/InternalCompositeTests.java | 14 +----- 3 files changed, 11 insertions(+), 58 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index 0baecf6e3f92b..441b30f872a35 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -205,15 +205,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw CompositeKey key = queue.toCompositeKey(slot); InternalAggregations aggs = subAggsForBuckets.apply(slot); long docCount = queue.getDocCount(slot); - buckets[(int) queue.size()] = new InternalComposite.InternalBucket( - sourceNames, - formats, - key, - reverseMuls, - missingOrders, - docCount, - aggs - ); + buckets[(int) queue.size()] = new InternalComposite.InternalBucket(sourceNames, formats, key, docCount, aggs); } CompositeKey lastBucket = num > 0 ? buckets[num - 1].getRawKey() : null; return new InternalAggregation[] { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index 8b3253418bc23..faa953e77edd8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -19,7 +19,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; -import org.elasticsearch.search.aggregations.KeyComparable; import org.elasticsearch.search.aggregations.bucket.BucketReducer; import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.search.aggregations.support.SamplingContext; @@ -103,7 +102,7 @@ public InternalComposite(StreamInput in) throws IOException { } this.reverseMuls = in.readIntArray(); this.missingOrders = in.readArray(MissingOrder::readFromStream, MissingOrder[]::new); - this.buckets = in.readCollectionAsList((input) -> new InternalBucket(input, sourceNames, formats, reverseMuls, missingOrders)); + this.buckets = in.readCollectionAsList((input) -> new InternalBucket(input, sourceNames, formats)); this.afterKey = in.readOptionalWriteable(CompositeKey::new); this.earlyTerminated = in.readBoolean(); } @@ -155,15 +154,7 @@ public InternalComposite create(List newBuckets) { @Override public InternalBucket createBucket(InternalAggregations aggregations, InternalBucket prototype) { - return new InternalBucket( - prototype.sourceNames, - prototype.formats, - prototype.key, - prototype.reverseMuls, - prototype.missingOrders, - prototype.docCount, - aggregations - ); + return new InternalBucket(prototype.sourceNames, prototype.formats, prototype.key, prototype.docCount, aggregations); } public int getSize() { @@ -206,7 +197,7 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont private final PriorityQueue> pq = new PriorityQueue<>(size) { @Override protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { - return a.current().compareKey(b.current()) < 0; + return a.current().compareKey(b.current(), reverseMuls, missingOrders) < 0; } }; private boolean earlyTerminated = false; @@ -227,7 +218,7 @@ public InternalAggregation get() { final List result = new ArrayList<>(); while (pq.size() > 0) { IteratorAndCurrent top = pq.top(); - if (lastBucket != null && top.current().compareKey(lastBucket) != 0) { + if (lastBucket != null && top.current().compareKey(lastBucket, reverseMuls, missingOrders) != 0) { InternalBucket reduceBucket = reduceBucket(buckets, reduceContext); buckets.clear(); result.add(reduceBucket); @@ -306,7 +297,7 @@ private InternalBucket reduceBucket(List buckets, AggregationRed final var reducedFormats = reducer.getProto().formats; final long docCount = reducer.getDocCount(); final InternalAggregations aggs = reducer.getAggregations(); - return new InternalBucket(sourceNames, reducedFormats, reducer.getProto().key, reverseMuls, missingOrders, docCount, aggs); + return new InternalBucket(sourceNames, reducedFormats, reducer.getProto().key, docCount, aggs); } } @@ -329,16 +320,11 @@ public int hashCode() { return Objects.hash(super.hashCode(), size, buckets, afterKey, Arrays.hashCode(reverseMuls), Arrays.hashCode(missingOrders)); } - public static class InternalBucket extends InternalMultiBucketAggregation.InternalBucket - implements - CompositeAggregation.Bucket, - KeyComparable { + public static class InternalBucket extends InternalMultiBucketAggregation.InternalBucket implements CompositeAggregation.Bucket { private final CompositeKey key; private final long docCount; private final InternalAggregations aggregations; - private final transient int[] reverseMuls; - private final transient MissingOrder[] missingOrders; private final transient List sourceNames; private final transient List formats; @@ -346,32 +332,20 @@ public static class InternalBucket extends InternalMultiBucketAggregation.Intern List sourceNames, List formats, CompositeKey key, - int[] reverseMuls, - MissingOrder[] missingOrders, long docCount, InternalAggregations aggregations ) { this.key = key; this.docCount = docCount; this.aggregations = aggregations; - this.reverseMuls = reverseMuls; - this.missingOrders = missingOrders; this.sourceNames = sourceNames; this.formats = formats; } - InternalBucket( - StreamInput in, - List sourceNames, - List formats, - int[] reverseMuls, - MissingOrder[] missingOrders - ) throws IOException { + InternalBucket(StreamInput in, List sourceNames, List formats) throws IOException { this.key = new CompositeKey(in); this.docCount = in.readVLong(); this.aggregations = InternalAggregations.readFrom(in); - this.reverseMuls = reverseMuls; - this.missingOrders = missingOrders; this.sourceNames = sourceNames; this.formats = formats; } @@ -444,8 +418,7 @@ List getFormats() { return formats; } - @Override - public int compareKey(InternalBucket other) { + int compareKey(InternalBucket other, int[] reverseMuls, MissingOrder[] missingOrders) { for (int i = 0; i < key.size(); i++) { if (key.get(i) == null) { if (other.key.get(i) == null) { @@ -470,8 +443,6 @@ InternalBucket finalizeSampling(SamplingContext samplingContext) { sourceNames, formats, key, - reverseMuls, - missingOrders, samplingContext.scaleUp(docCount), InternalAggregations.finalizeSampling(aggregations, samplingContext) ); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java index 5fb1d0e760afa..7e7ccb1d72e80 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java @@ -143,18 +143,10 @@ protected InternalComposite createTestInstance(String name, Map continue; } keys.add(key); - InternalComposite.InternalBucket bucket = new InternalComposite.InternalBucket( - sourceNames, - formats, - key, - reverseMuls, - missingOrders, - 1L, - aggregations - ); + InternalComposite.InternalBucket bucket = new InternalComposite.InternalBucket(sourceNames, formats, key, 1L, aggregations); buckets.add(bucket); } - Collections.sort(buckets, (o1, o2) -> o1.compareKey(o2)); + Collections.sort(buckets, (o1, o2) -> o1.compareKey(o2, reverseMuls, missingOrders)); CompositeKey lastBucket = buckets.size() > 0 ? buckets.get(buckets.size() - 1).getRawKey() : null; return new InternalComposite( name, @@ -191,8 +183,6 @@ protected InternalComposite mutateInstance(InternalComposite instance) { sourceNames, formats, createCompositeKey(), - reverseMuls, - missingOrders, randomLongBetween(1, 100), InternalAggregations.EMPTY ) From 339e4310814cd545339d5a8380f3d3de3479b461 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Mon, 25 Nov 2024 14:07:30 +0100 Subject: [PATCH 217/386] [DOCS] Documents that ELSER is the default service for `semantic_text` (#115769) --- .../mapping/types/semantic-text.asciidoc | 24 +++++++- .../semantic-search-semantic-text.asciidoc | 59 +++---------------- 2 files changed, 31 insertions(+), 52 deletions(-) diff --git a/docs/reference/mapping/types/semantic-text.asciidoc b/docs/reference/mapping/types/semantic-text.asciidoc index 684ad7c369e7d..f76a9352c2fe8 100644 --- a/docs/reference/mapping/types/semantic-text.asciidoc +++ b/docs/reference/mapping/types/semantic-text.asciidoc @@ -13,25 +13,45 @@ Long passages are <> to smaller secti The `semantic_text` field type specifies an inference endpoint identifier that will be used to generate embeddings. You can create the inference endpoint by using the <>. This field type and the <> type make it simpler to perform semantic search on your data. +If you don't specify an inference endpoint, the <> is used by default. Using `semantic_text`, you won't need to specify how to generate embeddings for your data, or how to index it. The {infer} endpoint automatically determines the embedding generation, indexing, and query to use. +If you use the ELSER service, you can set up `semantic_text` with the following API request: + [source,console] ------------------------------------------------------------ PUT my-index-000001 +{ + "mappings": { + "properties": { + "inference_field": { + "type": "semantic_text" + } + } + } +} +------------------------------------------------------------ + +If you use a service other than ELSER, you must create an {infer} endpoint using the <> and reference it when setting up `semantic_text` as the following example demonstrates: + +[source,console] +------------------------------------------------------------ +PUT my-index-000002 { "mappings": { "properties": { "inference_field": { "type": "semantic_text", - "inference_id": "my-elser-endpoint" + "inference_id": "my-openai-endpoint" <1> } } } } ------------------------------------------------------------ // TEST[skip:Requires inference endpoint] +<1> The `inference_id` of the {infer} endpoint to use to generate embeddings. The recommended way to use semantic_text is by having dedicated {infer} endpoints for ingestion and search. @@ -40,7 +60,7 @@ After creating dedicated {infer} endpoints for both, you can reference them usin [source,console] ------------------------------------------------------------ -PUT my-index-000002 +PUT my-index-000003 { "mappings": { "properties": { diff --git a/docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc b/docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc index 60692c19c184a..ba9c81db21384 100644 --- a/docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc @@ -21,45 +21,9 @@ This tutorial uses the <> for demonstra [[semantic-text-requirements]] ==== Requirements -To use the `semantic_text` field type, you must have an {infer} endpoint deployed in -your cluster using the <>. +This tutorial uses the <> for demonstration, which is created automatically as needed. +To use the `semantic_text` field type with an {infer} service other than ELSER, you must create an inference endpoint using the <>. -[discrete] -[[semantic-text-infer-endpoint]] -==== Create the {infer} endpoint - -Create an inference endpoint by using the <>: - -[source,console] ------------------------------------------------------------- -PUT _inference/sparse_embedding/my-elser-endpoint <1> -{ - "service": "elser", <2> - "service_settings": { - "adaptive_allocations": { <3> - "enabled": true, - "min_number_of_allocations": 3, - "max_number_of_allocations": 10 - }, - "num_threads": 1 - } -} ------------------------------------------------------------- -// TEST[skip:TBD] -<1> The task type is `sparse_embedding` in the path as the `elser` service will -be used and ELSER creates sparse vectors. The `inference_id` is -`my-elser-endpoint`. -<2> The `elser` service is used in this example. -<3> This setting enables and configures {ml-docs}/ml-nlp-auto-scale.html#nlp-model-adaptive-allocations[adaptive allocations]. -Adaptive allocations make it possible for ELSER to automatically scale up or down resources based on the current load on the process. - -[NOTE] -==== -You might see a 502 bad gateway error in the response when using the {kib} Console. -This error usually just reflects a timeout, while the model downloads in the background. -You can check the download progress in the {ml-app} UI. -If using the Python client, you can set the `timeout` parameter to a higher value. -==== [discrete] [[semantic-text-index-mapping]] @@ -75,8 +39,7 @@ PUT semantic-embeddings "mappings": { "properties": { "content": { <1> - "type": "semantic_text", <2> - "inference_id": "my-elser-endpoint" <3> + "type": "semantic_text" <2> } } } @@ -85,18 +48,14 @@ PUT semantic-embeddings // TEST[skip:TBD] <1> The name of the field to contain the generated embeddings. <2> The field to contain the embeddings is a `semantic_text` field. -<3> The `inference_id` is the inference endpoint you created in the previous step. -It will be used to generate the embeddings based on the input text. -Every time you ingest data into the related `semantic_text` field, this endpoint will be used for creating the vector representation of the text. +Since no `inference_id` is provided, the <> is used by default. +To use a different {infer} service, you must create an {infer} endpoint first using the <> and then specify it in the `semantic_text` field mapping using the `inference_id` parameter. [NOTE] ==== -If you're using web crawlers or connectors to generate indices, you have to -<> for these indices to -include the `semantic_text` field. Once the mapping is updated, you'll need to run -a full web crawl or a full connector sync. This ensures that all existing -documents are reprocessed and updated with the new semantic embeddings, -enabling semantic search on the updated data. +If you're using web crawlers or connectors to generate indices, you have to <> for these indices to include the `semantic_text` field. +Once the mapping is updated, you'll need to run a full web crawl or a full connector sync. +This ensures that all existing documents are reprocessed and updated with the new semantic embeddings, enabling semantic search on the updated data. ==== @@ -288,4 +247,4 @@ query from the `semantic-embedding` index: * If you want to use `semantic_text` in hybrid search, refer to https://colab.research.google.com/github/elastic/elasticsearch-labs/blob/main/notebooks/search/09-semantic-text.ipynb[this notebook] for a step-by-step guide. * For more information on how to optimize your ELSER endpoints, refer to {ml-docs}/ml-nlp-elser.html#elser-recommendations[the ELSER recommendations] section in the model documentation. -* To learn more about model autoscaling, refer to the {ml-docs}/ml-nlp-auto-scale.html[trained model autoscaling] page. \ No newline at end of file +* To learn more about model autoscaling, refer to the {ml-docs}/ml-nlp-auto-scale.html[trained model autoscaling] page. From 86098f8c7f83368a1b6f50ffdef4e9c8eb8583b0 Mon Sep 17 00:00:00 2001 From: Mike Pellegrini Date: Mon, 25 Nov 2024 08:09:37 -0500 Subject: [PATCH 218/386] Mute default ELSER tests (#117390) --- muted-tests.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index d4b77f5269c10..ff19d7d59da75 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -223,6 +223,12 @@ tests: - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT method: testInferDeploysDefaultElser issue: https://github.com/elastic/elasticsearch/issues/114913 +- class: org.elasticsearch.xpack.inference.InferenceRestIT + method: test {p0=inference/40_semantic_text_query/Query a field that uses the default ELSER 2 endpoint} + issue: https://github.com/elastic/elasticsearch/issues/117027 +- class: org.elasticsearch.xpack.inference.InferenceRestIT + method: test {p0=inference/30_semantic_text_inference/Calculates embeddings using the default ELSER 2 endpoint} + issue: https://github.com/elastic/elasticsearch/issues/117349 - class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT method: testEveryActionIsEitherOperatorOnlyOrNonOperator issue: https://github.com/elastic/elasticsearch/issues/102992 From 105d4f89a6e0f3b663c2fbb99939408e53e9c1c0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 26 Nov 2024 00:19:00 +1100 Subject: [PATCH 219/386] Mute org.elasticsearch.xpack.test.rest.XPackRestIT test {p0=transform/transforms_reset/Test reset running transform} #117473 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index ff19d7d59da75..da8a093ebe674 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -232,6 +232,9 @@ tests: - class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT method: testEveryActionIsEitherOperatorOnlyOrNonOperator issue: https://github.com/elastic/elasticsearch/issues/102992 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=transform/transforms_reset/Test reset running transform} + issue: https://github.com/elastic/elasticsearch/issues/117473 # Examples: # From ff58d891a168078c99334204081ef95607f8d48f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Mon, 25 Nov 2024 14:22:11 +0100 Subject: [PATCH 220/386] ES|QL kql function. (#116764) --- .../esql/functions/description/kql.asciidoc | 5 + .../esql/functions/examples/kql.asciidoc | 13 ++ .../esql/functions/kibana/definition/kql.json | 37 ++++ .../esql/functions/kibana/docs/kql.md | 14 ++ .../esql/functions/layout/kql.asciidoc | 17 ++ .../esql/functions/parameters/kql.asciidoc | 6 + .../esql/functions/signature/kql.svg | 1 + .../esql/functions/types/kql.asciidoc | 10 + x-pack/plugin/esql/build.gradle | 2 + .../src/main/resources/kql-function.csv-spec | 153 +++++++++++++++ .../src/main/resources/qstr-function.csv-spec | 10 +- .../xpack/esql/plugin/KqlFunctionIT.java | 144 ++++++++++++++ .../xpack/esql/action/EsqlCapabilities.java | 5 + .../xpack/esql/analysis/Verifier.java | 22 ++- .../function/EsqlFunctionRegistry.java | 2 + .../function/fulltext/FullTextWritables.java | 16 +- .../expression/function/fulltext/Kql.java | 73 +++++++ .../physical/local/PushFiltersToSource.java | 6 +- .../planner/EsqlExpressionTranslators.java | 10 + .../xpack/esql/querydsl/query/KqlQuery.java | 85 ++++++++ .../elasticsearch/xpack/esql/CsvTests.java | 4 + .../xpack/esql/analysis/VerifierTests.java | 89 +++++++++ .../function/fulltext/KqlTests.java | 41 ++++ .../NoneFieldFullTextFunctionTestCase.java | 62 ++++++ .../function/fulltext/QueryStringTests.java | 43 +--- .../LocalPhysicalPlanOptimizerTests.java | 184 +++++++++++++++++- .../esql/querydsl/query/KqlQueryTests.java | 139 +++++++++++++ .../xpack/kql/query/KqlQueryBuilder.java | 20 +- .../rest-api-spec/test/esql/60_usage.yml | 2 +- 29 files changed, 1149 insertions(+), 66 deletions(-) create mode 100644 docs/reference/esql/functions/description/kql.asciidoc create mode 100644 docs/reference/esql/functions/examples/kql.asciidoc create mode 100644 docs/reference/esql/functions/kibana/definition/kql.json create mode 100644 docs/reference/esql/functions/kibana/docs/kql.md create mode 100644 docs/reference/esql/functions/layout/kql.asciidoc create mode 100644 docs/reference/esql/functions/parameters/kql.asciidoc create mode 100644 docs/reference/esql/functions/signature/kql.svg create mode 100644 docs/reference/esql/functions/types/kql.asciidoc create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/kql-function.csv-spec create mode 100644 x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/KqlFunctionIT.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Kql.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/KqlQuery.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/KqlTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/NoneFieldFullTextFunctionTestCase.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/KqlQueryTests.java diff --git a/docs/reference/esql/functions/description/kql.asciidoc b/docs/reference/esql/functions/description/kql.asciidoc new file mode 100644 index 0000000000000..e1fe411e6689c --- /dev/null +++ b/docs/reference/esql/functions/description/kql.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Performs a KQL query. Returns true if the provided KQL query string matches the row. diff --git a/docs/reference/esql/functions/examples/kql.asciidoc b/docs/reference/esql/functions/examples/kql.asciidoc new file mode 100644 index 0000000000000..1f8518aeec394 --- /dev/null +++ b/docs/reference/esql/functions/examples/kql.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/kql-function.csv-spec[tag=kql-with-field] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/kql-function.csv-spec[tag=kql-with-field-result] +|=== + diff --git a/docs/reference/esql/functions/kibana/definition/kql.json b/docs/reference/esql/functions/kibana/definition/kql.json new file mode 100644 index 0000000000000..6960681fbbf0d --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/kql.json @@ -0,0 +1,37 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "kql", + "description" : "Performs a KQL query. Returns true if the provided KQL query string matches the row.", + "signatures" : [ + { + "params" : [ + { + "name" : "query", + "type" : "keyword", + "optional" : false, + "description" : "Query string in KQL query string format." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "query", + "type" : "text", + "optional" : false, + "description" : "Query string in KQL query string format." + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM books \n| WHERE KQL(\"author: Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;" + ], + "preview" : true, + "snapshot_only" : true +} diff --git a/docs/reference/esql/functions/kibana/docs/kql.md b/docs/reference/esql/functions/kibana/docs/kql.md new file mode 100644 index 0000000000000..0ba419c1cd032 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/kql.md @@ -0,0 +1,14 @@ + + +### KQL +Performs a KQL query. Returns true if the provided KQL query string matches the row. + +``` +FROM books +| WHERE KQL("author: Faulkner") +| KEEP book_no, author +| SORT book_no +| LIMIT 5; +``` diff --git a/docs/reference/esql/functions/layout/kql.asciidoc b/docs/reference/esql/functions/layout/kql.asciidoc new file mode 100644 index 0000000000000..8cf2687b240c1 --- /dev/null +++ b/docs/reference/esql/functions/layout/kql.asciidoc @@ -0,0 +1,17 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-kql]] +=== `KQL` + +preview::["Do not use on production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] + +*Syntax* + +[.text-center] +image::esql/functions/signature/kql.svg[Embedded,opts=inline] + +include::../parameters/kql.asciidoc[] +include::../description/kql.asciidoc[] +include::../types/kql.asciidoc[] +include::../examples/kql.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/kql.asciidoc b/docs/reference/esql/functions/parameters/kql.asciidoc new file mode 100644 index 0000000000000..6fb69323ff73c --- /dev/null +++ b/docs/reference/esql/functions/parameters/kql.asciidoc @@ -0,0 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`query`:: +Query string in KQL query string format. diff --git a/docs/reference/esql/functions/signature/kql.svg b/docs/reference/esql/functions/signature/kql.svg new file mode 100644 index 0000000000000..3f550f27ccdff --- /dev/null +++ b/docs/reference/esql/functions/signature/kql.svg @@ -0,0 +1 @@ +KQL(query) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/kql.asciidoc b/docs/reference/esql/functions/types/kql.asciidoc new file mode 100644 index 0000000000000..866a39e925665 --- /dev/null +++ b/docs/reference/esql/functions/types/kql.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +query | result +keyword | boolean +text | boolean +|=== diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index f92c895cc5b7b..02f9752d21e09 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -34,6 +34,7 @@ dependencies { compileOnly project(':modules:lang-painless:spi') compileOnly project(xpackModule('esql-core')) compileOnly project(xpackModule('ml')) + implementation project(xpackModule('kql')) implementation project('compute') implementation project('compute:ann') implementation project(':libs:dissect') @@ -50,6 +51,7 @@ dependencies { testImplementation(testArtifact(project(xpackModule('core')))) testImplementation project(path: xpackModule('enrich')) testImplementation project(path: xpackModule('spatial')) + testImplementation project(path: xpackModule('kql')) testImplementation project(path: ':modules:reindex') testImplementation project(path: ':modules:parent-join') diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/kql-function.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/kql-function.csv-spec new file mode 100644 index 0000000000000..02be58efac774 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/kql-function.csv-spec @@ -0,0 +1,153 @@ +############################################### +# Tests for KQL function +# + +kqlWithField +required_capability: kql_function + +// tag::kql-with-field[] +FROM books +| WHERE KQL("author: Faulkner") +| KEEP book_no, author +| SORT book_no +| LIMIT 5; +// end::kql-with-field[] + +// tag::kql-with-field-result[] +book_no:keyword | author:text +2378 | [Carol Faulkner, Holly Byers Ochoa, Lucretia Mott] +2713 | William Faulkner +2847 | Colleen Faulkner +2883 | William Faulkner +3293 | Danny Faulkner +; +// end::kql-with-field-result[] + +kqlWithMultipleFields +required_capability: kql_function + +from books +| where kql("title:Return* AND author:*Tolkien") +| keep book_no, title; +ignoreOrder:true + +book_no:keyword | title:text +2714 | Return of the King Being the Third Part of The Lord of the Rings +7350 | Return of the Shadow +; + +kqlWithQueryExpressions +required_capability: kql_function + +from books +| where kql(CONCAT("title:Return*", " AND author:*Tolkien")) +| keep book_no, title; +ignoreOrder:true + +book_no:keyword | title:text +2714 | Return of the King Being the Third Part of The Lord of the Rings +7350 | Return of the Shadow +; + +kqlWithConjunction +required_capability: kql_function + +from books +| where kql("title: Rings") and ratings > 4.6 +| keep book_no, title; +ignoreOrder:true + +book_no:keyword | title:text +4023 | A Tolkien Compass: Including J. R. R. Tolkien's Guide to the Names in The Lord of the Rings +7140 | The Lord of the Rings Poster Collection: Six Paintings by Alan Lee (No. 1) +; + +kqlWithFunctionPushedToLucene +required_capability: kql_function + +from hosts +| where kql("host: beta") and cidr_match(ip1, "127.0.0.2/32", "127.0.0.3/32") +| keep card, host, ip0, ip1; +ignoreOrder:true + +card:keyword |host:keyword |ip0:ip |ip1:ip +eth1 |beta |127.0.0.1 |127.0.0.2 +; + +kqlWithNonPushableConjunction +required_capability: kql_function + +from books +| where kql("title: Rings") and length(title) > 75 +| keep book_no, title; +ignoreOrder:true + +book_no:keyword | title:text +4023 |A Tolkien Compass: Including J. R. R. Tolkien's Guide to the Names in The Lord of the Rings +; + +kqlWithMultipleWhereClauses +required_capability: kql_function + +from books +| where kql("title: rings") +| where kql("year > 1 AND year < 2005") +| keep book_no, title; +ignoreOrder:true + +book_no:keyword | title:text +4023 | A Tolkien Compass: Including J. R. R. Tolkien's Guide to the Names in The Lord of the Rings +7140 | The Lord of the Rings Poster Collection: Six Paintings by Alan Lee (No. 1) +; + + +kqlWithMultivaluedTextField +required_capability: kql_function + +from employees +| where kql("job_positions: Tech Lead AND job_positions:(Reporting Analyst)") +| keep emp_no, first_name, last_name; +ignoreOrder:true + +emp_no:integer | first_name:keyword | last_name:keyword +10004 | Chirstian | Koblick +10010 | Duangkaew | Piveteau +10011 | Mary | Sluis +10088 | Jungsoon | Syrzycki +10093 | Sailaja | Desikan +10097 | Remzi | Waschkowski +; + +kqlWithMultivaluedNumericField +required_capability: kql_function + +from employees +| where kql("salary_change > 14") +| keep emp_no, first_name, last_name, salary_change; +ignoreOrder:true + +emp_no:integer | first_name:keyword | last_name:keyword | salary_change:double +10003 | Parto | Bamford | [12.82, 14.68] +10015 | Guoxiang | Nooteboom | [12.4, 14.25] +10023 | Bojan | Montemayor | [0.8, 14.63] +10040 | Weiyi | Meriste | [-8.94, 1.92, 6.97, 14.74] +10061 | Tse | Herber | [-2.58, -0.95, 14.39] +10065 | Satosi | Awdeh | [-9.81, -1.47, 14.44] +10099 | Valter | Sullins | [-8.78, -3.98, 10.71, 14.26] +; + +testMultiValuedFieldWithConjunction +required_capability: kql_function + +from employees +| where (kql("job_positions: (Data Scientist) OR job_positions:(Support Engineer)")) and gender == "F" +| keep emp_no, first_name, last_name; +ignoreOrder:true + +emp_no:integer | first_name:keyword | last_name:keyword +10023 | Bojan | Montemayor +10041 | Uri | Lenart +10044 | Mingsen | Casley +10053 | Sanjiv | Zschoche +10069 | Margareta | Bierman +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec index 3e92e55928d64..6039dc05b6c44 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec @@ -101,8 +101,8 @@ book_no:keyword | title:text ; -matchMultivaluedTextField -required_capability: match_function +qstrWithMultivaluedTextField +required_capability: qstr_function from employees | where qstr("job_positions: (Tech Lead) AND job_positions:(Reporting Analyst)") @@ -118,8 +118,8 @@ emp_no:integer | first_name:keyword | last_name:keyword 10097 | Remzi | Waschkowski ; -matchMultivaluedNumericField -required_capability: match_function +qstrWithMultivaluedNumericField +required_capability: qstr_function from employees | where qstr("salary_change: [14 TO *]") @@ -137,7 +137,7 @@ emp_no:integer | first_name:keyword | last_name:keyword | salary_change:double ; testMultiValuedFieldWithConjunction -required_capability: match_function +required_capability: qstr_function from employees | where (qstr("job_positions: (Data Scientist) OR job_positions:(Support Engineer)")) and gender == "F" diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/KqlFunctionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/KqlFunctionIT.java new file mode 100644 index 0000000000000..d58637ab52c86 --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/KqlFunctionIT.java @@ -0,0 +1,144 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.QueryShardException; +import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.util.List; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.CoreMatchers.containsString; + +public class KqlFunctionIT extends AbstractEsqlIntegTestCase { + + @BeforeClass + protected static void ensureKqlFunctionEnabled() { + assumeTrue("kql function capability not available", EsqlCapabilities.Cap.KQL_FUNCTION.isEnabled()); + } + + @Before + public void setupIndex() { + createAndPopulateIndex(); + } + + public void testSimpleKqlQuery() { + var query = """ + FROM test + | WHERE kql("content: dog") + | KEEP id + | SORT id + """; + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id")); + assertColumnTypes(resp.columns(), List.of("integer")); + assertValues(resp.values(), List.of(List.of(1), List.of(3), List.of(4), List.of(5))); + } + } + + public void testMultiFieldKqlQuery() { + var query = """ + FROM test + | WHERE kql("dog OR canine") + | KEEP id + """; + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id")); + assertColumnTypes(resp.columns(), List.of("integer")); + assertValuesInAnyOrder(resp.values(), List.of(List.of(1), List.of(2), List.of(3), List.of(4), List.of(5))); + } + } + + public void testKqlQueryWithinEval() { + var query = """ + FROM test + | EVAL matches_query = kql("title: fox") + """; + + var error = expectThrows(VerificationException.class, () -> run(query)); + assertThat(error.getMessage(), containsString("[KQL] function is only supported in WHERE commands")); + } + + public void testInvalidKqlQueryEof() { + var query = """ + FROM test + | WHERE kql("content: ((((dog") + """; + + var error = expectThrows(QueryShardException.class, () -> run(query)); + assertThat(error.getMessage(), containsString("Failed to parse KQL query [content: ((((dog]")); + assertThat(error.getRootCause().getMessage(), containsString("line 1:11: mismatched input '('")); + } + + public void testInvalidKqlQueryLexicalError() { + var query = """ + FROM test + | WHERE kql(":") + """; + + var error = expectThrows(QueryShardException.class, () -> run(query)); + assertThat(error.getMessage(), containsString("Failed to parse KQL query [:]")); + assertThat(error.getRootCause().getMessage(), containsString("line 1:1: extraneous input ':' ")); + } + + private void createAndPopulateIndex() { + var indexName = "test"; + var client = client().admin().indices(); + var CreateRequest = client.prepareCreate(indexName) + .setSettings(Settings.builder().put("index.number_of_shards", 1)) + .setMapping("id", "type=integer", "content", "type=text"); + assertAcked(CreateRequest); + client().prepareBulk() + .add( + new IndexRequest(indexName).id("1") + .source("id", 1, "content", "The quick brown animal swiftly jumps over a lazy dog", "title", "A Swift Fox's Journey") + ) + .add( + new IndexRequest(indexName).id("2") + .source("id", 2, "content", "A speedy brown fox hops effortlessly over a sluggish canine", "title", "The Fox's Leap") + ) + .add( + new IndexRequest(indexName).id("3") + .source("id", 3, "content", "Quick and nimble, the fox vaults over the lazy dog", "title", "Brown Fox in Action") + ) + .add( + new IndexRequest(indexName).id("4") + .source( + "id", + 4, + "content", + "A fox that is quick and brown jumps over a dog that is quite lazy", + "title", + "Speedy Animals" + ) + ) + .add( + new IndexRequest(indexName).id("5") + .source( + "id", + 5, + "content", + "With agility, a quick brown fox bounds over a slow-moving dog", + "title", + "Foxes and Canines" + ) + ) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); + ensureYellow(indexName); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index d675f772b5a3b..d9ce7fca312b3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -415,6 +415,11 @@ public enum Cap { */ MATCH_FUNCTION, + /** + * KQL function + */ + KQL_FUNCTION(Build.current().isSnapshot()), + /** * Don't optimize CASE IS NOT NULL function by not requiring the fields to be not null as well. * https://github.com/elastic/elasticsearch/issues/112704 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index 3ebb52641232e..2be13398dab2f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.FilteredExpression; import org.elasticsearch.xpack.esql.expression.function.aggregate.Rate; import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Kql; import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.expression.function.fulltext.QueryString; import org.elasticsearch.xpack.esql.expression.function.grouping.Categorize; @@ -793,14 +794,19 @@ private static void checkNotPresentInDisjunctions( private static void checkFullTextQueryFunctions(LogicalPlan plan, Set failures) { if (plan instanceof Filter f) { Expression condition = f.condition(); - checkCommandsBeforeExpression( - plan, - condition, - QueryString.class, - lp -> (lp instanceof Filter || lp instanceof OrderBy || lp instanceof EsRelation), - qsf -> "[" + qsf.functionName() + "] " + qsf.functionType(), - failures - ); + + List.of(QueryString.class, Kql.class).forEach(functionClass -> { + // Check for limitations of QSTR and KQL function. + checkCommandsBeforeExpression( + plan, + condition, + functionClass, + lp -> (lp instanceof Filter || lp instanceof OrderBy || lp instanceof EsRelation), + fullTextFunction -> "[" + fullTextFunction.functionName() + "] " + fullTextFunction.functionType(), + failures + ); + }); + checkCommandsBeforeExpression( plan, condition, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index ea1669ccc7a4f..3d26bc170b723 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -33,6 +33,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Top; import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; import org.elasticsearch.xpack.esql.expression.function.aggregate.WeightedAvg; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Kql; import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.expression.function.fulltext.QueryString; import org.elasticsearch.xpack.esql.expression.function.grouping.Bucket; @@ -411,6 +412,7 @@ private static FunctionDefinition[][] snapshotFunctions() { // This is an experimental function and can be removed without notice. def(Delay.class, Delay::new, "delay"), def(Categorize.class, Categorize::new, "categorize"), + def(Kql.class, Kql::new, "kql"), def(Rate.class, Rate::withUnresolvedTimestamp, "rate") } }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java index d59c736783172..8804a031de78c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java @@ -8,14 +8,28 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MatchQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MultiMatchQueryPredicate; +import java.util.ArrayList; +import java.util.Collections; import java.util.List; public class FullTextWritables { public static List getNamedWriteables() { - return List.of(MatchQueryPredicate.ENTRY, MultiMatchQueryPredicate.ENTRY, QueryString.ENTRY, Match.ENTRY); + List entries = new ArrayList<>(); + + entries.add(MatchQueryPredicate.ENTRY); + entries.add(MultiMatchQueryPredicate.ENTRY); + entries.add(QueryString.ENTRY); + entries.add(Match.ENTRY); + + if (EsqlCapabilities.Cap.KQL_FUNCTION.isEnabled()) { + entries.add(Kql.ENTRY); + } + + return Collections.unmodifiableList(entries); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Kql.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Kql.java new file mode 100644 index 0000000000000..c03902373c02e --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Kql.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.querydsl.query.KqlQuery; + +import java.io.IOException; +import java.util.List; + +/** + * Full text function that performs a {@link KqlQuery} . + */ +public class Kql extends FullTextFunction { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Kql", Kql::new); + + @FunctionInfo( + returnType = "boolean", + preview = true, + description = "Performs a KQL query. Returns true if the provided KQL query string matches the row.", + examples = { @Example(file = "kql-function", tag = "kql-with-field") } + ) + public Kql( + Source source, + @Param( + name = "query", + type = { "keyword", "text" }, + description = "Query string in KQL query string format." + ) Expression queryString + ) { + super(source, queryString, List.of(queryString)); + } + + private Kql(StreamInput in) throws IOException { + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + source().writeTo(out); + out.writeNamedWriteable(query()); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + public Expression replaceChildren(List newChildren) { + return new Kql(source(), newChildren.get(0)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Kql::new, query()); + } + +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java index 9f574ee8005b2..3d6c35e914294 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java @@ -30,8 +30,8 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; import org.elasticsearch.xpack.esql.core.util.Queries; +import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; -import org.elasticsearch.xpack.esql.expression.function.fulltext.QueryString; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.BinarySpatialFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; @@ -252,10 +252,10 @@ static boolean canPushToSource(Expression exp, LucenePushdownPredicates lucenePu && Expressions.foldable(cidrMatch.matches()); } else if (exp instanceof SpatialRelatesFunction spatial) { return canPushSpatialFunctionToSource(spatial, lucenePushdownPredicates); - } else if (exp instanceof QueryString) { - return true; } else if (exp instanceof Match mf) { return mf.field() instanceof FieldAttribute && DataType.isString(mf.field().dataType()); + } else if (exp instanceof FullTextFunction) { + return true; } return false; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java index 6fac7bab2bd80..1580b77931240 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.Check; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Kql; import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.expression.function.fulltext.QueryString; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; @@ -47,6 +48,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; +import org.elasticsearch.xpack.esql.querydsl.query.KqlQuery; import org.elasticsearch.xpack.esql.querydsl.query.SpatialRelatesQuery; import org.elasticsearch.xpack.versionfield.Version; @@ -89,6 +91,7 @@ public final class EsqlExpressionTranslators { new ExpressionTranslators.MultiMatches(), new MatchFunctionTranslator(), new QueryStringFunctionTranslator(), + new KqlFunctionTranslator(), new Scalars() ); @@ -538,4 +541,11 @@ protected Query asQuery(QueryString queryString, TranslatorHandler handler) { return new QueryStringQuery(queryString.source(), queryString.queryAsText(), Map.of(), Map.of()); } } + + public static class KqlFunctionTranslator extends ExpressionTranslator { + @Override + protected Query asQuery(Kql kqlFunction, TranslatorHandler handler) { + return new KqlQuery(kqlFunction.source(), kqlFunction.queryAsText()); + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/KqlQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/KqlQuery.java new file mode 100644 index 0000000000000..c388a131b9ab6 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/KqlQuery.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.querydsl.query; + +import org.elasticsearch.core.Booleans; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.kql.query.KqlQueryBuilder; + +import java.util.Collections; +import java.util.Map; +import java.util.Objects; +import java.util.function.BiConsumer; + +import static java.util.Map.entry; + +public class KqlQuery extends Query { + + private static final Map> BUILDER_APPLIERS = Map.ofEntries( + entry(KqlQueryBuilder.TIME_ZONE_FIELD.getPreferredName(), KqlQueryBuilder::timeZone), + entry(KqlQueryBuilder.DEFAULT_FIELD_FIELD.getPreferredName(), KqlQueryBuilder::defaultField), + entry(KqlQueryBuilder.CASE_INSENSITIVE_FIELD.getPreferredName(), (qb, s) -> qb.caseInsensitive(Booleans.parseBoolean(s))) + ); + + private final String query; + + private final Map options; + + // dedicated constructor for QueryTranslator + public KqlQuery(Source source, String query) { + this(source, query, null); + } + + public KqlQuery(Source source, String query, Map options) { + super(source); + this.query = query; + this.options = options == null ? Collections.emptyMap() : options; + } + + @Override + public QueryBuilder asBuilder() { + final KqlQueryBuilder queryBuilder = new KqlQueryBuilder(query); + options.forEach((k, v) -> { + if (BUILDER_APPLIERS.containsKey(k)) { + BUILDER_APPLIERS.get(k).accept(queryBuilder, v); + } else { + throw new IllegalArgumentException("illegal kql query option [" + k + "]"); + } + }); + return queryBuilder; + } + + public String query() { + return query; + } + + public Map options() { + return options; + } + + @Override + public int hashCode() { + return Objects.hash(query, options); + } + + @Override + public boolean equals(Object obj) { + if (false == super.equals(obj)) { + return false; + } + + KqlQuery other = (KqlQuery) obj; + return Objects.equals(query, other.query) && Objects.equals(options, other.options); + } + + @Override + protected String innerToString() { + return query; + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 012720db9efd9..010a60ef7da15 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -257,6 +257,10 @@ public final void test() throws Throwable { "can't use MATCH function in csv tests", testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.MATCH_FUNCTION.capabilityName()) ); + assumeFalse( + "can't use KQL function in csv tests", + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.KQL_FUNCTION.capabilityName()) + ); assumeFalse( "lookup join disabled for csv tests", testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP.capabilityName()) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 7b2f85b80b3b6..f25b19c4e5d1c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1263,11 +1263,74 @@ public void testQueryStringFunctionsNotAllowedAfterCommands() throws Exception { ); } + public void testKqlFunctionsNotAllowedAfterCommands() throws Exception { + // Skip test if the kql function is not enabled. + assumeTrue("kql function capability not available", EsqlCapabilities.Cap.KQL_FUNCTION.isEnabled()); + + // Source commands + assertEquals("1:13: [KQL] function cannot be used after SHOW", error("show info | where kql(\"8.16.0\")")); + assertEquals("1:17: [KQL] function cannot be used after ROW", error("row a= \"Anna\" | where kql(\"Anna\")")); + + // Processing commands + assertEquals( + "1:43: [KQL] function cannot be used after DISSECT", + error("from test | dissect first_name \"%{foo}\" | where kql(\"Connection\")") + ); + assertEquals("1:27: [KQL] function cannot be used after DROP", error("from test | drop emp_no | where kql(\"Anna\")")); + assertEquals( + "1:71: [KQL] function cannot be used after ENRICH", + error("from test | enrich languages on languages with lang = language_name | where kql(\"Anna\")") + ); + assertEquals("1:26: [KQL] function cannot be used after EVAL", error("from test | eval z = 2 | where kql(\"Anna\")")); + assertEquals( + "1:44: [KQL] function cannot be used after GROK", + error("from test | grok last_name \"%{WORD:foo}\" | where kql(\"Anna\")") + ); + assertEquals("1:27: [KQL] function cannot be used after KEEP", error("from test | keep emp_no | where kql(\"Anna\")")); + assertEquals("1:24: [KQL] function cannot be used after LIMIT", error("from test | limit 10 | where kql(\"Anna\")")); + assertEquals("1:35: [KQL] function cannot be used after MV_EXPAND", error("from test | mv_expand last_name | where kql(\"Anna\")")); + assertEquals( + "1:45: [KQL] function cannot be used after RENAME", + error("from test | rename last_name as full_name | where kql(\"Anna\")") + ); + assertEquals( + "1:52: [KQL] function cannot be used after STATS", + error("from test | STATS c = COUNT(emp_no) BY languages | where kql(\"Anna\")") + ); + + // Some combination of processing commands + assertEquals("1:38: [KQL] function cannot be used after LIMIT", error("from test | keep emp_no | limit 10 | where kql(\"Anna\")")); + assertEquals( + "1:46: [KQL] function cannot be used after MV_EXPAND", + error("from test | limit 10 | mv_expand last_name | where kql(\"Anna\")") + ); + assertEquals( + "1:52: [KQL] function cannot be used after KEEP", + error("from test | mv_expand last_name | keep last_name | where kql(\"Anna\")") + ); + assertEquals( + "1:77: [KQL] function cannot be used after RENAME", + error("from test | STATS c = COUNT(emp_no) BY languages | rename c as total_emps | where kql(\"Anna\")") + ); + assertEquals( + "1:54: [KQL] function cannot be used after DROP", + error("from test | rename last_name as name | drop emp_no | where kql(\"Anna\")") + ); + } + public void testQueryStringFunctionOnlyAllowedInWhere() throws Exception { assertEquals("1:9: [QSTR] function is only supported in WHERE commands", error("row a = qstr(\"Anna\")")); checkFullTextFunctionsOnlyAllowedInWhere("QSTR", "qstr(\"Anna\")", "function"); } + public void testKqlFunctionOnlyAllowedInWhere() throws Exception { + // Skip test if the kql function is not enabled. + assumeTrue("kql function capability not available", EsqlCapabilities.Cap.KQL_FUNCTION.isEnabled()); + + assertEquals("1:9: [KQL] function is only supported in WHERE commands", error("row a = kql(\"Anna\")")); + checkFullTextFunctionsOnlyAllowedInWhere("KQL", "kql(\"Anna\")", "function"); + } + public void testMatchFunctionOnlyAllowedInWhere() throws Exception { checkFullTextFunctionsOnlyAllowedInWhere("MATCH", "match(first_name, \"Anna\")", "function"); } @@ -1309,10 +1372,29 @@ public void testQueryStringFunctionArgNotNullOrConstant() throws Exception { // Other value types are tested in QueryStringFunctionTests } + public void testKqlFunctionArgNotNullOrConstant() throws Exception { + // Skip test if the kql function is not enabled. + assumeTrue("kql function capability not available", EsqlCapabilities.Cap.KQL_FUNCTION.isEnabled()); + + assertEquals( + "1:19: argument of [kql(first_name)] must be a constant, received [first_name]", + error("from test | where kql(first_name)") + ); + assertEquals("1:19: argument of [kql(null)] cannot be null, received [null]", error("from test | where kql(null)")); + // Other value types are tested in KqlFunctionTests + } + public void testQueryStringWithDisjunctions() { checkWithDisjunctions("QSTR", "qstr(\"first_name: Anna\")", "function"); } + public void testKqlFunctionWithDisjunctions() { + // Skip test if the kql function is not enabled. + assumeTrue("kql function capability not available", EsqlCapabilities.Cap.KQL_FUNCTION.isEnabled()); + + checkWithDisjunctions("KQL", "kql(\"first_name: Anna\")", "function"); + } + public void testMatchFunctionWithDisjunctions() { checkWithDisjunctions("MATCH", "match(first_name, \"Anna\")", "function"); } @@ -1368,6 +1450,13 @@ public void testQueryStringFunctionWithNonBooleanFunctions() { checkFullTextFunctionsWithNonBooleanFunctions("QSTR", "qstr(\"first_name: Anna\")", "function"); } + public void testKqlFunctionWithNonBooleanFunctions() { + // Skip test if the kql function is not enabled. + assumeTrue("kql function capability not available", EsqlCapabilities.Cap.KQL_FUNCTION.isEnabled()); + + checkFullTextFunctionsWithNonBooleanFunctions("KQL", "kql(\"first_name: Anna\")", "function"); + } + public void testMatchFunctionWithNonBooleanFunctions() { checkFullTextFunctionsWithNonBooleanFunctions("MATCH", "match(first_name, \"Anna\")", "function"); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/KqlTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/KqlTests.java new file mode 100644 index 0000000000000..d97be6b169eef --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/KqlTests.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.junit.BeforeClass; + +import java.util.List; +import java.util.function.Supplier; + +public class KqlTests extends NoneFieldFullTextFunctionTestCase { + @BeforeClass + protected static void ensureKqlFunctionEnabled() { + assumeTrue("kql function capability not available", EsqlCapabilities.Cap.KQL_FUNCTION.isEnabled()); + } + + public KqlTests(@Name("TestCase") Supplier testCaseSupplier) { + super(testCaseSupplier); + } + + @ParametersFactory + public static Iterable parameters() { + return generateParameters(); + } + + @Override + protected Expression build(Source source, List args) { + return new Kql(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/NoneFieldFullTextFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/NoneFieldFullTextFunctionTestCase.java new file mode 100644 index 0000000000000..383cb8671053d --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/NoneFieldFullTextFunctionTestCase.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.LinkedList; +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public abstract class NoneFieldFullTextFunctionTestCase extends AbstractFunctionTestCase { + + public NoneFieldFullTextFunctionTestCase(Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + public final void testFold() { + Expression expression = buildLiteralExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } + assertFalse("expected resolved", expression.typeResolved().unresolved()); + } + + protected static Iterable generateParameters() { + List suppliers = new LinkedList<>(); + for (DataType strType : DataType.stringTypes()) { + suppliers.add( + new TestCaseSupplier( + "<" + strType + ">", + List.of(strType), + () -> testCase(strType, randomAlphaOfLengthBetween(1, 10), equalTo(true)) + ) + ); + } + List errorsSuppliers = errorsForCasesWithoutExamples(suppliers, (v, p) -> "string"); + // Don't test null, as it is not allowed but the expected message is not a type error - so we check it separately in VerifierTests + return parameterSuppliersFromTypedData(errorsSuppliers.stream().filter(s -> s.types().contains(DataType.NULL) == false).toList()); + } + + private static TestCaseSupplier.TestCase testCase(DataType strType, String str, Matcher matcher) { + return new TestCaseSupplier.TestCase( + List.of(new TestCaseSupplier.TypedData(new BytesRef(str), strType, "query")), + "EndsWithEvaluator[str=Attribute[channel=0], suffix=Attribute[channel=1]]", + DataType.BOOLEAN, + matcher + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringTests.java index b4b4ebcaacde6..f573e59ab205a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringTests.java @@ -10,61 +10,24 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.hamcrest.Matcher; -import java.util.LinkedList; import java.util.List; import java.util.function.Supplier; -import static org.hamcrest.Matchers.equalTo; - @FunctionName("qstr") -public class QueryStringTests extends AbstractFunctionTestCase { +public class QueryStringTests extends NoneFieldFullTextFunctionTestCase { public QueryStringTests(@Name("TestCase") Supplier testCaseSupplier) { - this.testCase = testCaseSupplier.get(); + super(testCaseSupplier); } @ParametersFactory public static Iterable parameters() { - List suppliers = new LinkedList<>(); - for (DataType strType : DataType.stringTypes()) { - suppliers.add( - new TestCaseSupplier( - "<" + strType + ">", - List.of(strType), - () -> testCase(strType, randomAlphaOfLengthBetween(1, 10), equalTo(true)) - ) - ); - } - List errorsSuppliers = errorsForCasesWithoutExamples(suppliers, (v, p) -> "string"); - // Don't test null, as it is not allowed but the expected message is not a type error - so we check it separately in VerifierTests - return parameterSuppliersFromTypedData(errorsSuppliers.stream().filter(s -> s.types().contains(DataType.NULL) == false).toList()); - } - - public final void testFold() { - Expression expression = buildLiteralExpression(testCase); - if (testCase.getExpectedTypeError() != null) { - assertTypeResolutionFailure(expression); - return; - } - assertFalse("expected resolved", expression.typeResolved().unresolved()); - } - - private static TestCaseSupplier.TestCase testCase(DataType strType, String str, Matcher matcher) { - return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(new BytesRef(str), strType, "query")), - "EndsWithEvaluator[str=Attribute[channel=0], suffix=Attribute[channel=1]]", - DataType.BOOLEAN, - matcher - ); + return generateParameters(); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 269b4806680a6..4612ccb425ba2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.EsqlTestUtils.TestSearchStats; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; @@ -62,6 +63,7 @@ import org.elasticsearch.xpack.esql.stats.Metrics; import org.elasticsearch.xpack.esql.stats.SearchContextStats; import org.elasticsearch.xpack.esql.stats.SearchStats; +import org.elasticsearch.xpack.kql.query.KqlQueryBuilder; import org.junit.Before; import java.io.IOException; @@ -678,7 +680,7 @@ public void testMatchFunctionMultipleWhereClauses() { * \_EsQueryExec[test], indexMode[standard], query[{"bool":{"must":[{"match":{"last_name":{"query":"Smith"}}}, * {"match":{"first_name":{"query":"John"}}}],"boost":1.0}}][_doc{f}#14], limit[1000], sort[] estimatedRowSize[324] */ - public void testMatchFunctionMultipleQstrClauses() { + public void testMatchFunctionMultipleMatchClauses() { String queryText = """ from test | where match(last_name, "Smith") and match(first_name, "John") @@ -698,6 +700,182 @@ public void testMatchFunctionMultipleQstrClauses() { assertThat(query.query().toString(), is(expected.toString())); } + /** + * Expecting + * LimitExec[1000[INTEGER]] + * \_ExchangeExec[[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gender{f}#4, job{f}#9, job.raw{f}#10, languages{f}#5, last_na + * me{f}#6, long_noidx{f}#11, salary{f}#7],false] + * \_ProjectExec[[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gender{f}#4, job{f}#9, job.raw{f}#10, languages{f}#5, last_na + * me{f}#6, long_noidx{f}#11, salary{f}#7]] + * \_FieldExtractExec[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gen] + * \_EsQueryExec[test], indexMode[standard], query[{"kql":{"query":"last_name: Smith"}}] + */ + public void testKqlFunction() { + // Skip test if the kql function is not enabled. + assumeTrue("kql function capability not available", EsqlCapabilities.Cap.KQL_FUNCTION.isEnabled()); + + var plan = plannerOptimizer.plan(""" + from test + | where kql("last_name: Smith") + """, IS_SV_STATS); + + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var field = as(project.child(), FieldExtractExec.class); + var query = as(field.child(), EsQueryExec.class); + assertThat(query.limit().fold(), is(1000)); + var expected = kqlQueryBuilder("last_name: Smith"); + assertThat(query.query().toString(), is(expected.toString())); + } + + /** + * Expecting + * LimitExec[1000[INTEGER]] + * \_ExchangeExec[[_meta_field{f}#1419, emp_no{f}#1413, first_name{f}#1414, gender{f}#1415, job{f}#1420, job.raw{f}#1421, langua + * ges{f}#1416, last_name{f}#1417, long_noidx{f}#1422, salary{f}#1418],false] + * \_ProjectExec[[_meta_field{f}#1419, emp_no{f}#1413, first_name{f}#1414, gender{f}#1415, job{f}#1420, job.raw{f}#1421, langua + * ges{f}#1416, last_name{f}#1417, long_noidx{f}#1422, salary{f}#1418]] + * \_FieldExtractExec[_meta_field{f}#1419, emp_no{f}#1413, first_name{f}#] + * \_EsQueryExec[test], indexMode[standard], query[{"bool":{"must":[{"kql":{"query":"last_name: Smith"}} + * ,{"esql_single_value":{"field":"emp_no","next":{"range":{"emp_no":{"gt":10010,"boost":1.0}}},"source":"emp_no > 10010"}}], + * "boost":1.0}}][_doc{f}#1423], limit[1000], sort[] estimatedRowSize[324] + */ + public void testKqlFunctionConjunctionWhereOperands() { + // Skip test if the kql function is not enabled. + assumeTrue("kql function capability not available", EsqlCapabilities.Cap.KQL_FUNCTION.isEnabled()); + + String queryText = """ + from test + | where kql("last_name: Smith") and emp_no > 10010 + """; + var plan = plannerOptimizer.plan(queryText, IS_SV_STATS); + + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var field = as(project.child(), FieldExtractExec.class); + var query = as(field.child(), EsQueryExec.class); + assertThat(query.limit().fold(), is(1000)); + + Source filterSource = new Source(2, 36, "emp_no > 10000"); + var range = wrapWithSingleQuery(queryText, QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no", filterSource); + var kqlQuery = kqlQueryBuilder("last_name: Smith"); + var expected = QueryBuilders.boolQuery().must(kqlQuery).must(range); + assertThat(query.query().toString(), is(expected.toString())); + } + + /** + * Expecting + * LimitExec[1000[INTEGER]] + * \_ExchangeExec[[!alias_integer, boolean{f}#4, byte{f}#5, constant_keyword-foo{f}#6, date{f}#7, double{f}#8, float{f}#9, half_ + * float{f}#10, integer{f}#12, ip{f}#13, keyword{f}#14, long{f}#15, scaled_float{f}#11, short{f}#17, text{f}#18, unsigned_long{f}#16], + * false] + * \_ProjectExec[[!alias_integer, boolean{f}#4, byte{f}#5, constant_keyword-foo{f}#6, date{f}#7, double{f}#8, float{f}#9, half_ + * float{f}#10, integer{f}#12, ip{f}#13, keyword{f}#14, long{f}#15, scaled_float{f}#11, short{f}#17, text{f}#18, unsigned_long{f}#16] + * \_FieldExtractExec[!alias_integer, boolean{f}#4, byte{f}#5, constant_k..] + * \_EsQueryExec[test], indexMode[standard], query[{"bool":{"must":[{"kql":{"query":"last_name: Smith"}},{ + * "esql_single_value":{"field":"ip","next":{"terms":{"ip":["127.0.0.1/32"],"boost":1.0}}, + * "source":"cidr_match(ip, \"127.0.0.1/32\")@2:38"}}],"boost":1.0}}][_doc{f}#21], limit[1000], sort[] estimatedRowSize[354] + */ + public void testKqlFunctionWithFunctionsPushedToLucene() { + // Skip test if the kql function is not enabled. + assumeTrue("kql function capability not available", EsqlCapabilities.Cap.KQL_FUNCTION.isEnabled()); + + String queryText = """ + from test + | where kql("last_name: Smith") and cidr_match(ip, "127.0.0.1/32") + """; + var analyzer = makeAnalyzer("mapping-all-types.json"); + var plan = plannerOptimizer.plan(queryText, IS_SV_STATS, analyzer); + + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var field = as(project.child(), FieldExtractExec.class); + var query = as(field.child(), EsQueryExec.class); + assertThat(query.limit().fold(), is(1000)); + + Source filterSource = new Source(2, 36, "cidr_match(ip, \"127.0.0.1/32\")"); + var terms = wrapWithSingleQuery(queryText, QueryBuilders.termsQuery("ip", "127.0.0.1/32"), "ip", filterSource); + var kqlQuery = kqlQueryBuilder("last_name: Smith"); + var expected = QueryBuilders.boolQuery().must(kqlQuery).must(terms); + assertThat(query.query().toString(), is(expected.toString())); + } + + /** + * Expecting + * LimitExec[1000[INTEGER]] + * \_ExchangeExec[[_meta_field{f}#1163, emp_no{f}#1157, first_name{f}#1158, gender{f}#1159, job{f}#1164, job.raw{f}#1165, langua + * ges{f}#1160, last_name{f}#1161, long_noidx{f}#1166, salary{f}#1162],false] + * \_ProjectExec[[_meta_field{f}#1163, emp_no{f}#1157, first_name{f}#1158, gender{f}#1159, job{f}#1164, job.raw{f}#1165, langua + * ges{f}#1160, last_name{f}#1161, long_noidx{f}#1166, salary{f}#1162]] + * \_FieldExtractExec[_meta_field{f}#1163, emp_no{f}#1157, first_name{f}#] + * \_EsQueryExec[test], indexMode[standard], + * query[{"bool":{"must":[{"kql":{"query":"last_name: Smith"}}, + * {"esql_single_value":{"field":"emp_no","next":{"range":{"emp_no":{"gt":10010,"boost":1.0}}},"source":"emp_no > 10010@3:9"}}], + * "boost":1.0}}][_doc{f}#1167], limit[1000], sort[] estimatedRowSize[324] + */ + public void testKqlFunctionMultipleWhereClauses() { + // Skip test if the kql function is not enabled. + assumeTrue("kql function capability not available", EsqlCapabilities.Cap.KQL_FUNCTION.isEnabled()); + + String queryText = """ + from test + | where kql("last_name: Smith") + | where emp_no > 10010 + """; + var plan = plannerOptimizer.plan(queryText, IS_SV_STATS); + + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var field = as(project.child(), FieldExtractExec.class); + var query = as(field.child(), EsQueryExec.class); + assertThat(query.limit().fold(), is(1000)); + + Source filterSource = new Source(3, 8, "emp_no > 10000"); + var range = wrapWithSingleQuery(queryText, QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no", filterSource); + var kqlQuery = kqlQueryBuilder("last_name: Smith"); + var expected = QueryBuilders.boolQuery().must(kqlQuery).must(range); + assertThat(query.query().toString(), is(expected.toString())); + } + + /** + * Expecting + * LimitExec[1000[INTEGER]] + * \_ExchangeExec[[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gender{f}#4, job{f}#9, job.raw{f}#10, languages{f}#5, last_na + * me{f}#6, long_noidx{f}#11, salary{f}#7],false] + * \_ProjectExec[[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gender{f}#4, job{f}#9, job.raw{f}#10, languages{f}#5, last_na + * me{f}#6, long_noidx{f}#11, salary{f}#7]] + * \_FieldExtractExec[_meta_field{f}#8, emp_no{f}#2, first_name{f}#3, gen] + * \_EsQueryExec[test], indexMode[standard], query[{"bool": {"must":[ + * {"kql":{"query":"last_name: Smith"}}, + * {"kql":{"query":"emp_no > 10010"}}],"boost":1.0}}] + */ + public void testKqlFunctionMultipleKqlClauses() { + // Skip test if the kql function is not enabled. + assumeTrue("kql function capability not available", EsqlCapabilities.Cap.KQL_FUNCTION.isEnabled()); + + String queryText = """ + from test + | where kql("last_name: Smith") and kql("emp_no > 10010") + """; + var plan = plannerOptimizer.plan(queryText, IS_SV_STATS); + + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var field = as(project.child(), FieldExtractExec.class); + var query = as(field.child(), EsQueryExec.class); + assertThat(query.limit().fold(), is(1000)); + + var kqlQueryLeft = kqlQueryBuilder("last_name: Smith"); + var kqlQueryRight = kqlQueryBuilder("emp_no > 10010"); + var expected = QueryBuilders.boolQuery().must(kqlQueryLeft).must(kqlQueryRight); + assertThat(query.query().toString(), is(expected.toString())); + } + // optimizer doesn't know yet how to break down different multi count public void testCountFieldsAndAllWithFilter() { var plan = plannerOptimizer.plan(""" @@ -1166,4 +1344,8 @@ private Stat queryStatsFor(PhysicalPlan plan) { protected List filteredWarnings() { return withDefaultLimitWarning(super.filteredWarnings()); } + + private static KqlQueryBuilder kqlQueryBuilder(String query) { + return new KqlQueryBuilder(query); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/KqlQueryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/KqlQueryTests.java new file mode 100644 index 0000000000000..8dfb50f84ac1e --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/KqlQueryTests.java @@ -0,0 +1,139 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.querydsl.query; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.tree.SourceTests; +import org.elasticsearch.xpack.esql.core.util.StringUtils; +import org.elasticsearch.xpack.kql.query.KqlQueryBuilder; + +import java.time.ZoneId; +import java.time.zone.ZoneRulesException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; +import static org.hamcrest.Matchers.equalTo; + +public class KqlQueryTests extends ESTestCase { + static KqlQuery randomKqkQueryQuery() { + Map options = new HashMap<>(); + + if (randomBoolean()) { + options.put(KqlQueryBuilder.CASE_INSENSITIVE_FIELD.getPreferredName(), String.valueOf(randomBoolean())); + } + + if (randomBoolean()) { + options.put(KqlQueryBuilder.DEFAULT_FIELD_FIELD.getPreferredName(), randomIdentifier()); + } + + if (randomBoolean()) { + options.put(KqlQueryBuilder.TIME_ZONE_FIELD.getPreferredName(), randomZone().getId()); + } + + return new KqlQuery(SourceTests.randomSource(), randomAlphaOfLength(5), Collections.unmodifiableMap(options)); + } + + public void testEqualsAndHashCode() { + for (int runs = 0; runs < 100; runs++) { + checkEqualsAndHashCode(randomKqkQueryQuery(), KqlQueryTests::copy, KqlQueryTests::mutate); + } + } + + private static KqlQuery copy(KqlQuery query) { + return new KqlQuery(query.source(), query.query(), query.options()); + } + + private static KqlQuery mutate(KqlQuery query) { + List> options = Arrays.asList( + q -> new KqlQuery(SourceTests.mutate(q.source()), q.query(), q.options()), + q -> new KqlQuery(q.source(), randomValueOtherThan(q.query(), () -> randomAlphaOfLength(5)), q.options()), + q -> new KqlQuery(q.source(), q.query(), mutateOptions(q.options())) + ); + + return randomFrom(options).apply(query); + } + + private static Map mutateOptions(Map options) { + Map mutatedOptions = new HashMap<>(options); + if (options.isEmpty() == false && randomBoolean()) { + mutatedOptions = options.entrySet() + .stream() + .filter(entry -> randomBoolean()) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + while (mutatedOptions.equals(options)) { + if (randomBoolean()) { + mutatedOptions = mutateOption( + mutatedOptions, + KqlQueryBuilder.CASE_INSENSITIVE_FIELD.getPreferredName(), + () -> String.valueOf(randomBoolean()) + ); + } + + if (randomBoolean()) { + mutatedOptions = mutateOption( + mutatedOptions, + KqlQueryBuilder.DEFAULT_FIELD_FIELD.getPreferredName(), + () -> randomIdentifier() + ); + } + + if (randomBoolean()) { + mutatedOptions = mutateOption( + mutatedOptions, + KqlQueryBuilder.TIME_ZONE_FIELD.getPreferredName(), + () -> randomZone().getId() + ); + } + } + + return Collections.unmodifiableMap(mutatedOptions); + } + + private static Map mutateOption(Map options, String optionName, Supplier valueSupplier) { + options = new HashMap<>(options); + options.put(optionName, randomValueOtherThan(options.get(optionName), valueSupplier)); + return options; + } + + public void testQueryBuilding() { + KqlQueryBuilder qb = getBuilder(Map.of("case_insensitive", "false")); + assertThat(qb.caseInsensitive(), equalTo(false)); + + qb = getBuilder(Map.of("case_insensitive", "false", "time_zone", "UTC", "default_field", "foo")); + assertThat(qb.caseInsensitive(), equalTo(false)); + assertThat(qb.timeZone(), equalTo(ZoneId.of("UTC"))); + assertThat(qb.defaultField(), equalTo("foo")); + + Exception e = expectThrows(IllegalArgumentException.class, () -> getBuilder(Map.of("pizza", "yummy"))); + assertThat(e.getMessage(), equalTo("illegal kql query option [pizza]")); + + e = expectThrows(ZoneRulesException.class, () -> getBuilder(Map.of("time_zone", "aoeu"))); + assertThat(e.getMessage(), equalTo("Unknown time-zone ID: aoeu")); + } + + private static KqlQueryBuilder getBuilder(Map options) { + final Source source = new Source(1, 1, StringUtils.EMPTY); + final KqlQuery kqlQuery = new KqlQuery(source, "eggplant", options); + return (KqlQueryBuilder) kqlQuery.asBuilder(); + } + + public void testToString() { + final Source source = new Source(1, 1, StringUtils.EMPTY); + final KqlQuery kqlQuery = new KqlQuery(source, "eggplant", Map.of()); + assertEquals("KqlQuery@1:2[eggplant]", kqlQuery.toString()); + } +} diff --git a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilder.java b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilder.java index 5dff9126b6be4..e2817665d8f79 100644 --- a/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilder.java +++ b/x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilder.java @@ -17,6 +17,7 @@ import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -26,6 +27,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.kql.parser.KqlParser; import org.elasticsearch.xpack.kql.parser.KqlParsingContext; +import org.elasticsearch.xpack.kql.parser.KqlParsingException; import java.io.IOException; import java.time.ZoneId; @@ -37,9 +39,9 @@ public class KqlQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "kql"; public static final ParseField QUERY_FIELD = new ParseField("query"); - private static final ParseField CASE_INSENSITIVE_FIELD = new ParseField("case_insensitive"); - private static final ParseField TIME_ZONE_FIELD = new ParseField("time_zone"); - private static final ParseField DEFAULT_FIELD_FIELD = new ParseField("default_field"); + public static final ParseField CASE_INSENSITIVE_FIELD = new ParseField("case_insensitive"); + public static final ParseField TIME_ZONE_FIELD = new ParseField("time_zone"); + public static final ParseField DEFAULT_FIELD_FIELD = new ParseField("default_field"); private static final Logger log = LogManager.getLogger(KqlQueryBuilder.class); private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, a -> { @@ -151,12 +153,16 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep @Override protected QueryBuilder doIndexMetadataRewrite(QueryRewriteContext context) throws IOException { - KqlParser parser = new KqlParser(); - QueryBuilder rewrittenQuery = parser.parseKqlQuery(query, createKqlParserContext(context)); + try { + KqlParser parser = new KqlParser(); + QueryBuilder rewrittenQuery = parser.parseKqlQuery(query, createKqlParserContext(context)); - log.trace(() -> Strings.format("KQL query %s translated to Query DSL: %s", query, Strings.toString(rewrittenQuery))); + log.trace(() -> Strings.format("KQL query %s translated to Query DSL: %s", query, Strings.toString(rewrittenQuery))); - return rewrittenQuery; + return rewrittenQuery; + } catch (KqlParsingException e) { + throw new QueryShardException(context, "Failed to parse KQL query [{}]", e, query); + } } @Override diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index 72c7c51655378..f7dd979540afa 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -92,7 +92,7 @@ setup: - gt: {esql.functions.to_long: $functions_to_long} - match: {esql.functions.coalesce: $functions_coalesce} # Testing for the entire function set isn't feasbile, so we just check that we return the correct count as an approximation. - - length: {esql.functions: 121} # check the "sister" test below for a likely update to the same esql.functions length check + - length: {esql.functions: 122} # check the "sister" test below for a likely update to the same esql.functions length check --- "Basic ESQL usage output (telemetry) non-snapshot version": From fd6e8857bc9bf56895b3c53996a27c226a3a80fa Mon Sep 17 00:00:00 2001 From: Philippus Baalman Date: Mon, 25 Nov 2024 14:50:09 +0100 Subject: [PATCH 221/386] Mention `bbq_hnsw` for `m` and `ef_construction` options in docs (#117022) --- docs/reference/mapping/types/dense-vector.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/mapping/types/dense-vector.asciidoc b/docs/reference/mapping/types/dense-vector.asciidoc index 4c16f260c13e7..e6e11d6dd539f 100644 --- a/docs/reference/mapping/types/dense-vector.asciidoc +++ b/docs/reference/mapping/types/dense-vector.asciidoc @@ -338,12 +338,12 @@ by 32x at the cost of accuracy. See < Date: Mon, 25 Nov 2024 14:53:48 +0100 Subject: [PATCH 222/386] =?UTF-8?q?[ML]=20Unmuting=20ForecastIT=20=C2=BB?= =?UTF-8?q?=20testOverflowToDisk=20for=20Windows=20(#114807)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Running the test on Jenkins caused flakiness in Windows. See #44609. Since we moved to Buildkite I am unmuting this test, assuming that Buildkite is not creating such deep directory structures as Jenkins (see this comment). We will mute this test again, if this assumption turns out to be wrong. Closes #44609. --- .../org/elasticsearch/xpack/ml/integration/ForecastIT.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java index 447bca4f4e688..94fbde69e29c7 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.ml.integration; -import org.apache.lucene.util.Constants; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -222,8 +221,6 @@ public void testMemoryStatus() { } public void testOverflowToDisk() throws Exception { - assumeFalse("https://github.com/elastic/elasticsearch/issues/44609", Constants.WINDOWS); - Detector.Builder detector = new Detector.Builder("mean", "value"); detector.setByFieldName("clientIP"); From c184b2277769854a102c02d70c7018a50056395c Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 25 Nov 2024 15:00:44 +0100 Subject: [PATCH 223/386] Stop using _source.mode attribute in builtin templates (#117448) Use index.source.mode index setting in builtin templates instead of the deprecated _source.mode mapping attribute. --- .../metrics-apm@mappings.yaml | 2 -- .../metrics-apm@settings.yaml | 16 +++++++++------- .../apm-data/src/main/resources/resources.yaml | 2 +- .../component-template/profiling-events.json | 8 +++++--- .../profiling-executables.json | 10 ++++++---- .../component-template/profiling-metrics.json | 8 +++++--- .../profiling-stacktraces.json | 8 +++++--- .../index-template/profiling-sq-executables.json | 10 ++++++---- .../index-template/profiling-sq-leafframes.json | 10 ++++++---- .../ProfilingIndexTemplateRegistry.java | 2 +- 10 files changed, 44 insertions(+), 32 deletions(-) diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@mappings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@mappings.yaml index af28cbb7415a0..660db3a6b0e2e 100644 --- a/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@mappings.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@mappings.yaml @@ -5,8 +5,6 @@ _meta: managed: true template: mappings: - _source: - mode: synthetic properties: processor.event: type: constant_keyword diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml index 819d5d7eafb8e..d8fc13bce79b1 100644 --- a/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml @@ -5,10 +5,12 @@ _meta: managed: true template: settings: - codec: best_compression - mapping: - # apm@settings sets `ignore_malformed: true`, but we need - # to disable this for metrics since they use synthetic source, - # and this combination is incompatible with the - # aggregate_metric_double field type. - ignore_malformed: false + index: + codec: best_compression + mapping: + # apm@settings sets `ignore_malformed: true`, but we need + # to disable this for metrics since they use synthetic source, + # and this combination is incompatible with the + # aggregate_metric_double field type. + ignore_malformed: false + source.mode: synthetic diff --git a/x-pack/plugin/apm-data/src/main/resources/resources.yaml b/x-pack/plugin/apm-data/src/main/resources/resources.yaml index fa209cdec3695..9484f577583eb 100644 --- a/x-pack/plugin/apm-data/src/main/resources/resources.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/resources.yaml @@ -1,7 +1,7 @@ # "version" holds the version of the templates and ingest pipelines installed # by xpack-plugin apm-data. This must be increased whenever an existing template or # pipeline is changed, in order for it to be updated on Elasticsearch upgrade. -version: 11 +version: 12 component-templates: # Data lifecycle. diff --git a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json index f90d2202db0d3..c7424571dd678 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json @@ -15,14 +15,16 @@ "container.name", "process.thread.name" ] + }, + "mapping": { + "source": { + "mode": "synthetic" + } } }, "codec": "best_compression" }, "mappings": { - "_source": { - "mode": "synthetic" - }, "_meta": { "index-template-version": ${xpack.profiling.template.version}, "index-version": ${xpack.profiling.index.events.version}, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-executables.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-executables.json index f1e5e01d50c16..ac72a03202646 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-executables.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-executables.json @@ -5,13 +5,15 @@ "auto_expand_replicas": "0-1", "refresh_interval": "10s", "hidden": true, - "lifecycle.rollover_alias": "profiling-executables" + "lifecycle.rollover_alias": "profiling-executables", + "mapping": { + "source": { + "mode": "synthetic" + } + } } }, "mappings": { - "_source": { - "mode": "synthetic" - }, "_meta": { "index-template-version": ${xpack.profiling.template.version}, "index-version": ${xpack.profiling.index.executables.version}, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-metrics.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-metrics.json index 35f53a36b2d0b..bb893a07c70a1 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-metrics.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-metrics.json @@ -10,14 +10,16 @@ "@timestamp", "host.id" ] + }, + "mapping": { + "source": { + "mode": "synthetic" + } } }, "codec": "best_compression" }, "mappings": { - "_source": { - "mode": "synthetic" - }, "_meta": { "index-template-version": ${xpack.profiling.template.version}, "index-version": ${xpack.profiling.index.metrics.version}, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-stacktraces.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-stacktraces.json index 6c96fb21673ae..1170e3a32d8e2 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-stacktraces.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-stacktraces.json @@ -11,13 +11,15 @@ "field": [ "Stacktrace.frame.ids" ] + }, + "mapping": { + "source": { + "mode": "synthetic" + } } } }, "mappings": { - "_source": { - "mode": "synthetic" - }, "_meta": { "index-template-version": ${xpack.profiling.template.version}, "index-version": ${xpack.profiling.index.stacktraces.version}, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-sq-executables.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-sq-executables.json index 71c4d15989b7a..d5d24a22fc58e 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-sq-executables.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-sq-executables.json @@ -7,13 +7,15 @@ "index": { "auto_expand_replicas": "0-1", "refresh_interval": "10s", - "hidden": true + "hidden": true, + "mapping": { + "source": { + "mode": "synthetic" + } + } } }, "mappings": { - "_source": { - "mode": "synthetic" - }, "_meta": { "index-template-version": ${xpack.profiling.template.version}, "index-version": ${xpack.profiling.index.sq.executables.version}, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-sq-leafframes.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-sq-leafframes.json index 20849bfe8f27d..b56b4b2874743 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-sq-leafframes.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-sq-leafframes.json @@ -7,13 +7,15 @@ "index": { "auto_expand_replicas": "0-1", "refresh_interval": "10s", - "hidden": true + "hidden": true, + "mapping": { + "source": { + "mode": "synthetic" + } + } } }, "mappings": { - "_source": { - "mode": "synthetic" - }, "_meta": { "index-template-version": ${xpack.profiling.template.version}, "index-version": ${xpack.profiling.index.sq.leafframes.version}, diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java index 7d8a474453c4c..71e8dcbff4ee6 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java @@ -54,7 +54,7 @@ public class ProfilingIndexTemplateRegistry extends IndexTemplateRegistry { // version 11: Added 'profiling.agent.protocol' keyword mapping to profiling-hosts // version 12: Added 'profiling.agent.env_https_proxy' keyword mapping to profiling-hosts // version 13: Added 'container.id' keyword mapping to profiling-events - public static final int INDEX_TEMPLATE_VERSION = 13; + public static final int INDEX_TEMPLATE_VERSION = 14; // history for individual indices / index templates. Only bump these for breaking changes that require to create a new index public static final int PROFILING_EVENTS_VERSION = 5; From fa9f2bff0edafae15816f9a77df979e3a38f4e9c Mon Sep 17 00:00:00 2001 From: florent-leborgne Date: Mon, 25 Nov 2024 15:13:23 +0100 Subject: [PATCH 224/386] Docs for starred esql queries in Kibana (#117468) --- docs/reference/esql/esql-kibana.asciidoc | 17 ++++++++++++++++- .../esql/esql-discover-query-history.png | Bin 290854 -> 217954 bytes .../esql/esql-discover-query-starred.png | Bin 0 -> 209828 bytes 3 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 docs/reference/images/esql/esql-discover-query-starred.png diff --git a/docs/reference/esql/esql-kibana.asciidoc b/docs/reference/esql/esql-kibana.asciidoc index 85969e19957af..87dd4d87fa8e3 100644 --- a/docs/reference/esql/esql-kibana.asciidoc +++ b/docs/reference/esql/esql-kibana.asciidoc @@ -106,12 +106,27 @@ detailed warning, expand the query bar, and click *warnings*. ==== Query history You can reuse your recent {esql} queries in the query bar. -In the query bar click *Show recent queries*. +In the query bar, click *Show recent queries*. You can then scroll through your recent queries: image::images/esql/esql-discover-query-history.png[align="center",size="50%"] +[discrete] +[[esql-kibana-starred-queries]] +==== Starred queries + +From the query history, you can mark some queries as favorite to find and access them faster later. + +In the query bar, click *Show recent queries*. + +From the **Recent** tab, you can star any queries you want. + +In the **Starred** tab, find all the queries you have previously starred. + +image::images/esql/esql-discover-query-starred.png[align="center",size="50%"] + + [discrete] [[esql-kibana-results-table]] === The results table diff --git a/docs/reference/images/esql/esql-discover-query-history.png b/docs/reference/images/esql/esql-discover-query-history.png index ff1d2ffa8b280eff5ba369c10e3e3db1fab4431f..eb064684af700cc791312dfe8aba2ee1122e4a53 100644 GIT binary patch literal 217954 zcmb4r1z1$w7APPk(kUHEmy*)m-3>|&HN?;jqI5}0Bi%@MD4jzi-Q77fZ}@+2eDC@0 zckg`P;p`K8uf5mWtJfh!Sy2iNg$M-(1_n(=T3i(d=6Np+%rgsQMChG|l@tr;3D!|n zN))DSm}Ccfkzl4RW3He8!vJj~!@$E5!5}=n0zCv_iT~M_gr$Rd_Sbzl7?>bS82I1s zC_>Lqzi8<3)aUOrd`ut=67&iidVI=+{oND#JQMC8J)iexKKn=e+0&$8MAXD&WT0m? z6Ofsit)qpV6B(nLBS6)HRn|om;BuwdL=+*;pAk`%gXBN>dNBE&SD1wu)gKt;bDEl#>&RV z41I&y(cRYRgB!E0p2 z3zDtl?`1(3$olkz^)1UA)_;x-?aKdjmsi=+&CEte+!6@I85D-#J8m}qzuy0Up8N~q zzxCAqS5G$1Hyr=o_1_---(58v%|K#yKqyQn!GF=~cjte9_`4%N>(kQz4HbXm`LDZB zL<^$uv;LE4f+!rQ&W2Drl30o>sYB1u9ryHut%m-g`}+)S!+Mu?$UCaRzzD<0h>NJZ z!R{|0x#3C>BOMcn`n|@KrtlxJ-ZsdQkrK`crE8HU5D~#dz`Mc}5y_Qi{WvC$i7AqU z%vE$wW@YgW2RjBcrk(qUtg6nnVPN4)lYXCXde3~q%uLc!`p3k_A=7uDn_acBg#5s! zS+!q*Fz^&Gu*kwNaF~AoWwX9SI1m-lV8DLc^h(Wi6;=NGv;S=XC@@==cAW-PD$*eO zwfQ@dtlR$@_&+8Ztq%j+uuk(1?tcbNM&aj|io`FD3j5zL`8U*<7ZenU^YBvtE5?M| zpTT2BpZNs^Q~qQA|258qL9p=aF)7Q)|1+K3{9xcfWXG6q;r}P{KFv^?#m`S4ghS&0 zUv7zitqKEESa|pu8CBqay}*Bh=tIDamb3l%>VMt~%dl|Bz3%N|otXbKVJI4)IO3Z}EDwfT@JqP=nJ}*jgoRleo=5%*YyTyI{E(0-D7IiRu*CjbUi~Ks|8*#i zsJVY&{U=bu$uC~NrWwt?68B&-jEzapxxRN0Ll$nofNySQQ{9v|jN_D}ImIuh`v-~Y z%i0eI-d3--8_YTgCbP_k<#*C58Prv66T<83Q>b;Lsx!TL!O4`K=Ak|yQG;@wThxQx zn;VKjAoic+jVdc9DUOoE$@4-xWM=3J$QYMnyHYA|%}#!ue0cvGwk!V2@0V;t3!Zan zL4K`b?YRgy)5uvo2Pgur3%IX5 zEEj>$@plwp*!yzpXd_QVpH}Y2_~QNMJ1rmH?G#!R{v&x1bpNh4!J2&hCo}v5!~Y~8 zY$AcQw0zdBiUZ4%k`pEzroDhaj4%1=@cM=asaVwSoxdaf9=$%NdpmJr}2=7(&C3vSpjNDSkYA>FXI0L2pG6ata|68Z{9?rjF^7H zjkw6@g9$7>5hA}=sV~v)IUyqJbvmKZ7~msJv4Cva|J5H`u-t0g?P>6V8pfcm?vdR& ztm;>%=xMV+DW4&D0`fpUGld_9=g&Qqkw4$e)()2!7tp+Zt*Wn2 zBH)3m^!)kr)sz&OvN6u{S7->$&CR`Yb19YPL!yF$g2sGmM2n<#f2OIl5|kfv^YbGr zMB5*r&6y=!=D)TE47?3i@9_$(UQsp+JjHt{|NPjZnF%5|5SK%PxfyzQ#0!k<4es&= zaU{O2J|z>BphgAd$N&cw6%||@oIPxx?N8Rvudc6S+S-IZg32zmM~MH#@yl3GBD2$^ zlY(NI1FB$BC8LOc;|v#ic`WJYcLr5ij_ECGhx|gTY-p%_o5xA3Hl!qACT_lJxC=CT z)4g0v-N%)VmyNDRKFNzX+>T2A?d>rA2}1$^DJLa|bD}@VRJT^Bz)gP)e=CaoP8BLa zrFOG`e^1cQ3<{Wu_!}ZT2{lwhXPX*)UjTQ2N4LZB4ZZ_zSQO}VlO^$3z6mXh>Zz!V z;u2oBfA;uPR^KQ*oXXQ_1DY{tN!cdd>0rQsPQ0YG|EZHYSNK|7l&CYk>) zdqTzJ3KXY2sV}+x{G_0U!q#XMK|sJ?49)$d6w%1A8@DT-(7vBNiNVCg71)2cv;!I_ z+V!-k?;0pgFIn^x^)@hZ@Bj2kJT$oe!!MbAk#dvna&e0Lvbv2(-wCesnlc+3D+%sB=0fE`o$EyqaoNJJ;OmNX(=+ znVecK2NVx(Ypk*2;^NjP1eZ6{DVH}kB(Beb+`8YtLHRR%^Pjd2uFE_w92t~7QhWGi zQK1Hsy$NjkhRr-{Pu$FK329(4IMn>$-M#@>{hi`shlUtvhmvACQ`?1T{Pm}+$#k>% zT4bW8gGPZAalIY+CtJyPIq6|uT1F|Oaf7af%`~&~rkuCtD3R63dH3GI5sXczUf>5n zL(cxQ(J8G4CHhz#F?WDp&vWSX9HMZaL0NcSIddg(4v5mzM=5A9A!EC$fo##?Ktd5$ z)k|pCGz%(*&I(|S+?T-3AquLs_*PcoS9&yrswDB&wlnE=(wYmjd0_dNE|luwE5{A~ zex@a-p?`BXj>w!gD+Jws{YMVmQo_S)Wb}yee=;Y=)S)XAfCM06SU}1I)HC11GH8`C znPD2$OZp4zqUg7HM4*!k%EvQm<-sAG=5Lf{+!`YH3UHIMJgL9;-2SL`rOLCFT*v0t z-c8OEpNZMfdmNAz_e^)2i&1F>5Y`7HGy>xW(-Oh5dUiIkUi~FbUg(rO^q;i!5Y|2W zXZJ9p*<(b7hmkvno1mPJ;jy*}Y;?b6iD%YGZEk5PZNuaoha$g$BfC4|PsXvI6w13- z6s~8R%)Bn=FVpy3BZpI;XESP*sg7nz&{eC`#nTz4GH4Zw!@|N^?%rRWnC#9M5_ffV z(L=mFM}$2tCbh<@F}$jd@R?~YS59ZGfp|D(NJ6OaS=m{Zn}ajE6?I5x6u!Q`&P$}+ z!x?SDVu)?|F5a6Zgz7ocADovoxW}j{Tm{%71IykOV0%qgpx_}NYd|VsVddbWa59vS zjxMp=`q}b5ol%Ylm|PWFA3QI)6qfMwQU_vRF)AGC519&(eDbS8s1{+dzA?7;ww`s` z;@$jd*R43~!6#@T-@IPLh%5K)jn=k_)h&gX^k%o!PIGgcg8<;uI?BeNyx1C7LOPrV`9RGmua5o19`wc?=CDOa`OH!ulPQ%qve5eTNT~?7rGyAf+)vnauIt!SzqvxO@m_Ng92-mEmi-x@DBpyZK76 zgM|A~jX|qbKlEbX{q%f?{rurOmQ}1qu}1F6T2Ik!vx5fwbr!jxdgSL%>$#~sjzp<- zJ&K9L3v_I%kQWtELWB}ERw3ZN1O3&twVj`z!(@vM4GnRLbjOBW?Y$hQ%cYc;1n(z_ znGGwWKYr{y!k@1%F}$zBj*b)Co-9UcS^U;A?>sYIjoh2ofF+SD1yn~v54Tj*cro_~ zk=jKY+mYN~d(G>0-OA=+S01k3Vv*S+edogTQP-<#{5^lzVv$ITA%nU7pc~+%ji6xq zXG{MFbTh;2q6mk0js?g%V4Y4{3yovOv$88KfJj`HNDVd za`g6m^YlgKoSX9p*z5Gl+A!o^LYdq!Km86($!xBwxoXPM_a{OPGk1gP-|y``kb4ap zh(y8?ODlq7-(d8f+l=0=2lyb|B zGuq)y_N|`N)-dVDuaR#@s~5|{iosc}j@dO%&kT^{l0dMk7J z)LoN_oM)N-w5ApyqD$XmQ0LyE)7uQWZ(f@@Ezz4zif5&+tHjTKBLNHO2_&SfXwdnTv$1>t- zire`GJk{H?Cq@91`Rf8GrDaftW+^^Q5g|UiFx0N|b)5oe{tMJ(|>CqJpmM7N6 z))^C)=T?x#^?CS=^~52Yq2sr=o;>xUd~BagZA%h{2j1qWD-p-AyOPgDmX>dvU5!+H zGecv!UGw5lcsN5esE2&jjqejVV1!q`fNPDT&-QU?nDMk7pW9<0)(;|`#(t7uo}Zzp zR%KBIm%;KFk!M8|pTEL^Woe{t^@kjhW!Y`*MJ2L|aX3?jz$ZTZaIw*wY=}>9x zA%g=Qo}`xSP?=|1mC^S zY2`{--qN9ed0GYdC;nQsivT;#Ha%0*H4C9QtmO;?BSz&T6TY^61{XC>%9^}4hC0sA zr+VvOqr7z07KlQd#)&>6wDt`Ajk0gBJO?k!gJ;$eQK>^C#!2trBabq&T3ho^yLe#q zT8^TV^GOhM0ktqLe2Z}qkTz5=$pu35)jn*+AGG_Cbc*n;VP8{(mPyTL5p&zA<;ff3 zQV9M+VOEK&H0`s|pM3wgn#+hwSr&o?=8CE}^1q|(gh&^5c6O@%YLIghI`yw4u@P4E zMPb$n_sSTGq%CjpVE$;k)Kb2WPakKr6U$F)luh0}((t0Ou=>8rEPJ&JrBAJ&F$lS1 z*dV6~ayTAKtJL-DEJScqqqWaUS!I773(`n+*q5vQ<)bm@dn1%KF9pjCz+tgoQzK|t zMF=V{;+SSX|0y1^Blg5K%}%dbDf@DPOT}wCTdv#1%s(q3T@>!U@u+p1%v7lclB93R&$l7IX%}kFa=Eqsg;Csws z|M0=-tXrXrlrxwe{tO30=R zwQmX}hlpr;%DGI9*UH--XwWIr`5j_@{v0miymIdyE7nrls4{F|H3Fv#NIl-2m7k&> zEk)(%Q_X8<3d0r|wWYf$Jj_%y56@1|)g6tH`|f8#?WU1Zy}EdvXJTSvpEVr1!h1@Y zrx4fOBx`*i7Zd=B8Cb=+^LejN9-gD1XN~2`(#vh@Cg(Eq-&KMRKEEK?j`?+Yc9Gj` zEZI?{QB3#v5+XMzrB@yi<|#gq$ed4W%wx=wCjKUFi9IjWiwtY}6&(YwuDjBw1SXkW z*`QD}owKCI&JR8MloWM`%NjR~@({VrO}tZ;agh6tx;XB=^Cmt}$T^lwP&P9Z;o0^^ zrF)^nOk5&Ym>b1Ii-31nq<#qQBmb>|*rCn2afR^imGOuE-83(%puQ9(74e)ySDltU z{Cv>0<4AY*^iIb2IVXy{bdW+4rthw5Rq%GuL*m*dnQZkoD=Sjto!Kei1$$i0NAws6I-2K$BIrzbvV~c7wlX znB`?&?63=gjAu$x@3?1-hFD`*M7h-lu}Y1dSbvG@pk>E!Fdt*T?sP`QOEu`kc;77Z@h5Ve7>rw*cYe$oX zQPwyj+yUE*H$ychJA#@^?FFlSA2tf)zaN5)4B0sMpx&I-wyQ7xhej9hPp7-BFbLha zbCbr9#4~F|6@QY&xL3slUHm?x)7jYDa|U%5C;m-0h+R{?H|{$_vatPcM_*4SJ11ku z$g?i+wm6G;6b2t2Wn!#lD7Q2KP-j=uh!F0#tIV$p#LG(7*V6nCyJ$v-UyPoQSKL;M z4TdMChkAjv`OM+hOX5LaJS_2}*G?W9leUYK8oy{UNpS#Gi}0YX#_KzuYh9GBH(7L? zh6UmZ#M2tJcA-gko8EP*Gb}pI$j)9Zxs^jg*x1+>_uW9J-EbD+{B%rWLUwapo^B8I z8j$tbSRL*p=_vEVcy6%V@bPCOa&bQyL>YP;%bXkE3m~}r1lAcvmrWRk;*J6P5br0GBQh;)XO)d*o{i!;S2o?{iPGdQ&mXNZAO4-V3 zE}))Tn-BPrhYkq`I%sN%fqUkMY(x&XV~PObZ=A3V5yZaPI_!V}bNN=E^HFJ!LfbUc zt@PkYTb~r%p4PH;d5f>3NVLm6iDUV}Xb z^ED)kZT)$R(MRss9GL@j*tqQsVMzw2AAqN58I(u6!7dIHd^T+m@zM(?z_ zF2PnpKo$L-Er$$J>F-W#`N84&=G8de& zCakYETu_=m+$4_ARC$cwP&yZWu=G}rE;W5G&&P7$vY*P`OY3-QpIIBpXlv9&E`_jY zl?x9O*;J|Y z1Ghu6<8j=@bVmTabd?xsA+L&^*AWlb1vU&b)Ia^NFvvwQ*(b(mO0E(#l1-^Y)m*b*(I`$w0s8A7qYm+YQ)LDU5(eHRpA-v4%m(^ z5Yk!lK~|?OoG!?>=rxqN*Td=^!tj}Na!CdghN1$|cXMs>aide(uB6PS-pycx$p9pD zYr>>Rd!{QhtLCiQG=i98_exp(W!@V}TZos3qBB*ug+Y%q_Zi4(2PzN+Acvi5RFPJ3 zLFy{uW@}pU?a0U5iUObq_t(OL? zK&)6rO)=f#spV0%Mr@Trz-p*^_<@ClmhtB5YJ|W@F=w?atWPZGK+NH-=dAl%PkmXl ziERvp;TQmvtZ<;csd5#qF*gQtwiI4xUf^8OBSK}N%eA&v1uhHN&D?rLXRS5)G@@(Z zm+@*RSbF%%oL=6X-YLIjrBeo{z_<%#R;>TFSO1NQ-WRd2M&1HC*CaY*b=^sOL`K$@ z@^}1sN{l&heui&)srMvyaw->+)*6xzT%*cEfDK#9Y|4 z0Y&MrqLN-j@SLVwF4Uq@@_3X)Hi9c7B?=wQyxSkHhTd7d;SN9Sp2d%0B>MV!ID$@T z>JE@HH_ZAI$ZwL?hCD72Sz0no;<4?)q><*sFk`Z>OC9awrUrpL6Xfyng!gW|XwlD5*^HqjtsWW@UU7=7~n_E(?&7+Wi5{pjVk%kfwT;_J_qBo)?MZ2YisY16yOw zMyk&*=%BAhow>8j!psE}SmZ{rZTEOD>m0j1pDv!Z9qH$6D_UvQWF}JEKrxHss%Gy! zflYrN1y~s%eMyn`)0gTrHSjT@NZ+)F{Hw@t<%MbQnt^{RIipjMai8nZIX+nf#JIJGRk?*E zHZ#>)_rej#B5BdoxB+)?zZRVmyT? zL9dL7n%}2{Nv!chhh&XlR0e4Yee=A5uQ+-{RnR5Z*8fv(<@Bt&mW1CeSnyig>*2BF`0V}k zPYD$If&RlQll;aB7A^O!w7Q*m{Gs?+`pU6^mK)}khEqCPt|)s`3E@^|Prxd7vH9bQqDjn@KwN37m9r81dx`-nE< zvLsIEr;L5AKiC-19AP_&Vrniq+AFWiZ*Ldob=rP&PXm`lG8|_%wQpBw|DoeeBtHPG zrS+n>uflt*h`**R#_0LOF?Px0?M6L+ZgY1q;iwlUdlu)T2kkveLhbs`s_K&x1J?LK zg|{a%{h-{Iw(YxapB9inT8uK%^!C*%gy638f z;p5hVHy^>`t zm>iqE*kR>;YSS>ArsbfW)%)lUOA^&JHk%ER8}k7|Gd=5XBqMjy^c0EF=bv;HZsoj} zj?IihAPP7YadrFYhzVv!U26Ri(Z7G3U#&#IY7t$Kla$6%*$}hSU9+G1)!4 z1-I&bTXINMxe31m!gsfeq$yV4IVg?8e8$AO#lh&66kig*5Qj;>^GsFU{z+8jcFf zPX$a4uJlGN;d)z|qR-#=FCM}YY*7JsGbRYMh1@t|>^7tlO6!;NZP}Itr zKb#GX+x3Th5xWheKX2*N7miG%xVJp8lU*>Dc%hQWtxL?&>;D!#ePe*QCnNl)C#|8^`E~XTZv18z+87(!^QLu9(6LYsR_ROuc|c&H`KD z0uoWb%!wEuBJfJdlN8*}Z7S?~ddeo0R%in@EQhA(<**O_RgUqmSiS!_eY7Sl&y>MU z2b*ris#NUAXB4~~B2H^%AeK2Jz4=CeH08AoSBP=f7p_txwQ_Pnua}%Q6T&lFtgW?{ z3yK1?+`2U$@rbDVtgi7%3vT#4EF>Pg;}GVHRIXH-XP{k~iXU>Nr!mF)f#}|e$;~B? z^C~=aKJqy1Wx$zAg~PEr_Ix#HIAg+laU}5E?XgE=fL@PlfiZ+vMCW1!^G!Bgh1t-2t_#6-_7Sv*W%;Tx9j@vR>TuyXX<^0@mrna=a z>}B-rH#_EC-QwUM%6T?|3a>OtRijAnml)N*2`idqbV$J-ZHq;0eGB<;O>>RGqpPx-P8ly01l#ryfog`UYU{x-b56d_bSXuIlJJ4Mpw&VpHEW$c!4Nl(va{XwcI4wt4~?4797eI=_xyI^2>|yf6iQa;cP}VI zO5G*(6z4O|jSiH{-=YVchOnHuV1c!@Ym_N23e+YJ5_;5;Xt~}Y*Pab398d-v2!Vso zvd{_ASHi^d>G9C>8cjwK8>;6bN#t}Fdaz8s*B)J#Z3S1@r$nMh@Dwf8Rx^r8N0Jr4 zI%3b0L5={%fY-nCJin2bf{XM!9suAQoNOiaB$&Edss!2ihnqL--f2Otg}Prt*OBzR(|{VJsZnG z-7=X)d1$LzuKaZ0sjH1Yy(FXg+`416@%g9lXD+g|{Kr90feH@z_3#|1&jOcJ7y5YfCr7GbFAYT~d;tzy?B-3@2)Nq^m=sP&bcf|0 z{S^vzqqngy!EXTi)p9jlwqwD8Xr4-ipqq0-58q$NW=&2zs-3s`)m5D*s8)7P+c_j# z$O}PcNsk$d{!N45fKrP zmN(i9J~^=^PosrLiwS9u9qI*CgM zu0OiSeqeKFZUYsl4j<+Vw7MP%-YnIFC&Yx945-YFUnpNb-X6h3Tsx(D9A-y#e(hue zO~w6X+xQq(CE&B%DIWa95Ms(}*Z1FToxg%C8C2UX`IsdF2=-|bTAiI7YWzsR%`Ud8 zi~)qYzx)z^4!*Uh>94T0H?UI6^{WKS#cocd63@Sm%X2PDC_%e0&PD_Fh8+2t8_SuQ zb7fm`+-zl^UzgKPbU@mGN<7}|VY%RR<&o%&NzC{{zl`RBE^fYf!X3BA@u>|YCa#O=ETB)S{GGfUu6!>cmw zr_ih#9})$42_9B4#9ho(Zui)Bs&p0JM5l1U)E&v9PJVUQZMho?g_T`e_g^gTupISm zgfXZqYS_Nz4((MtAKPa?x4RN^HnKggKlCXw>UZwUE9TfJPdTlYYVC*Q)dYwM4@;xK z^1O;3T?=tJ7$;4A)jFwJl%s+$=Ykc@e!gnA;(fUxf02pmlk}q3LE->A?^HCMu&hA4 zzuId?nL(#H_y_m2&%=f*ufuMM>P4w-x!QwQ;8Jr8(84G?t4p=43ZF3u>fHFW@LDMq z&#`ZMDSg3kQj3gqQ-YtX-2Moho!M%1wUzq(Vl%%9LKSbOe!V*10B7-@{66TJS{|!% zqOu0D(g&0E5g+5cmcbmf-lP|gxg+vLN|Y3dR-zn#tb81+5!OXof0-f_%kR9)aeuL8 zRISJ4N)Iq&_rVMZLe1J7N+Be9Sc@V(pNzAMGvU9T88cgAG%DH16?T_VW;sG56>Ub{ zxpr)v`HIh+tmJ;W`R)GpA-lrlF@Iu(+To0+PSe@1f*dGj0yw+QJc1OTgeP1K2Jl`=xRP_xg8~x2D^a@p!b{r&!T_ zH_7KNMKQatwPg3cJz%OCGU6^%x{|> z5WSgogs+^=|D!)wzfP|%9dYOw)q}Nd{*&zUFleos`EBx(_Kv-DKUS^Fh`j!l={r0R zWk>zu{_CAY$_x1ziw^0fi!u?C(3sr$o|NtN3nWmY=$U!GKbjmvmBNX^+DmPKh#`53x$<1(e4+ovy4t@{Qfenb<`Mu_}srMsERMz zC_(ELde^9Cikp+Me0U40jWB8mpTie1soQ}YO6Yz+ z;LTJMKXy$+eP^lO(X5^wO@8eG;Ly1kyYDehs5Ozr95zL8h<;|tZ$iwPrc3iniYaPW z3c=A-vp6V3XGd0H^iij-REdt0<(uP*UmB#~nbj@d?Pt|{fO+$s^;8r_(Uy-}6`gZw z)*}cJX=yM%#JEu78bXu)EfP?4wobLgJ{5@tWl?&lx)tVyP-BlbFq%!eiiRK z*7B^{m9qSz3+G*SkOSEUIOp+%l;*3M9|Kxt<1rgTjGok?czKGXx=AGu3uBW`7s-eB zqgtP~Peb33CdTH2N0Tp7;tv?`22=;Mh(gATz z2rYUy(Ov_eK(`l`}^cmt^{dbOg3w25MALcWxxdc}IBIf`ks z99HK+R(#Fh(DDV8gb$BbUU7I#&|0N1YO!SRKQtN%7a}p+XzxBV8iwsiDBlfmj708@ zcqtgOu3F8k+bkNeNN7>Jl`VMlHIjs4DeYj3*_w4i1fY>eWDtx*Av>!B(qDE8WT~PavLiN+d*jh3LwU|HT}(la#ub3O-*r8 zinF2~cs5$CGvHl6w>r6gj9lu2!!8YHdWjLQ#+-}n9BY`_`?}zb_iJRdUyl4VtR^q0 zD1Pa}BLJDFK9$=5$8|0uI)eEbGb{Rj9N%)B=66^TXrUK~*|lm*FKnSX!Zt`fmsuZ& zn!5I1KBs*`Ae~(HyKJdy_62STB#doL31){7D~%O!O38C|QMOD=e2sdRH5s2MJ%}W`^&T1LrJ4>g%x6)$q`&dPYSW}l8k!AkE z83(KHR(*I&jzrX&?xziZtiEj5JBpqUyoQJx3F$%xS=7dank-SY+lg8WJ}k+}Ovtg5 z7D341zV>e(BcW(&_jRjmbwy7MZTUv>W#Ut^oow z8f4-OVWfCqd+bP6Um}J^6Mv$$-HQZ8?dDA~Y303cnbL(s493BMDiP58)7tElSd3Z} zw}yj?*&9x-qnm$BF<~kG$rFY}uJ`w2gkOJ+6O>$QGb1~mztc82TRk&ye%KUXhPM%)c)p%1 z(17aIB*bYuFP+pC4msNbE1hkEHO&i%A0Cjrf{8c^lwJwCW!zklN>7&=s73aTq*k8o z2j$x9MjhG}RxKnoF1pPqt`Y85L)hwog&1B2&|0WPLJn~>#@4^`XZbr|K!Xqtt&hm# z-94)vgWQ+Xn4E&>-De-WNSgp>J6>BeuVIw2H5=ER<4u;v>zY$$z*h_bT?XdEumAe; zOzr5=P5HU^SH}aWD~i0_BHfzg+!nU%F&iE0aI83|Sb|)6ZTZp0dZ|zjD6wqYv0^>p zabq-FqzN3@;63W%&(jZ>E||6Yu$#)hU0Bx#u`i8jt1wx;I^n zcQ#snG|BxA$U*_Id14mQ1J&|bNTk3s(ER6k&koCQP1PoYlWT95pGJYSt=iAE8mFS)Y_9LFNwq&70-zH7IXbmYAfh ztC>fwQILR1dL)KvdV1cyPE@V96r`{DlI#f0M~|LQAxu&@XMNoqf_`36(}#`+w=Gp6 zUL2w^5zo6?{zLZ5R>U)q7n_y7S%10&1oBlN(WGFiWPGAlUr~3iwEoceq-yl;&8&mr z<5rJvz<_eiUF)v9HTOg7RrKzVfCVope3E@;>J@k%ekYe|BIn<^UBN>?0#eS)D?hO@=LTLWGvlK_ zGTfm4-i=Y+*eFip<8rM}O~@>gAxc7k$84ob2R^TlDg-Gjl~mDlah?kfv}tNj@+XH* zxSbSUpPquYrS-vs0l1Wxq7V`w4~=Zc=EqHwYn5dkwHX!JoPlDQ4ZS~id+{pH__m07 z?m}xVscC3bdbs%**@LdHt}wwI)5QStnly4BZF&C&RKRcru3{B#G@thkYKxG~Cy%J> z7n`cBUX;77oWuz??Z6xf;Yb1ZO-_~?tLdyLp$F+1t_zc8KM|+v%0tIH%~HKFKiEbM zS^3HMo+)V64L-AO$^L1$4KyM|p`>$>tok(^5Rt+K44g$5dThKf%!0=8^Wf>OljiXT z3NCj_kMaxsQf-#+u3u2uyN6y~lkvI*Q*{+F%^F+PinuQ({A~tu{B~9sf9{m0<3`7R zO+4zx2J_Zz{Un`rIhCyR`*OnMJR3buK*}(VR37(5fv}mZ0b3@%=Pr-zb}O5vld?`= z>qxoux!&YjTbRzu9S1BwG!7-XNFu1t|1L%QVV~ap!mu!1fj%c+ldWQf9+n54YSY`U zv~j&8S zpL)+aA4Z-E&Vg0~8~022?S{O~3|zew%Ap^sAWm1a+wz_YMtZgB5PQY4aFG3hrK5z6ALQu8Joe{C=@Sak|j_;Sa0>sMs_Yv zmg51#HZx^ojA>S!TfKVQM3T6D_4<@NJL5xq6mVDLqal5K0C&8Z7E)rS6MANfS}Y9C8I@Ov{7 zJ)FG_7Ptwb=UN>Tv0sOU`bd}80Ts<;-D5aDS&UPa$5mZ%Qhw988F8B(w zZ|w*!_kMl*s%4?>1wt@}fa%J6BBY7>PG>x;(#2f4l4S=VL+cm7#3@{VCP|l`yW)7KSQ|THu1B zlV(kP{Su8$mL(oLnTET=Zyx@ggE?*Z_IR>oMw0F^3Zk-+%S3fF-3S}w1 zgJlK2{FFTTLcIO+AV@X>7EN|!|8P=as+I6U8ZFWRGrWf?4600h1noc1f+z0lGJ2*O z%s-%cZ*@Ohcx~Yk zSayB!oZz3x@ENaWcbRi`PT;Sp{8fKuN!`DZBEw_KK%oWw^fm1`nGt1ujZmk)f~HOmO8|qH1x{1N@mi=SRdCG`o97G{$Bvl z`ehoHEv;H>;mDxz6V8Rq&X})G8;w^^YgD5Le1Xr>xIzkyc{)tM4T+1;s?27Kq>Y`z|VKQ{Nvf~yi6Naa6{6T=3{kW>#c1F2oK`C5j7AG4uE=dmVoLt zYB#-Fj=NP(?r!hR)`2Xv-jlNi`<1N9YO6kM>H=taS7q`|{}H2)ufvytn^PC7G6hTd zZm%g}z95@ibR(#a}n5+0DK(XrAQjm3YzBj-6F-sjwfbLHd}a z0afYmU`;=#z%FhsG}1z(&+1BJ%U^gW+9>Kz0{zT`r3RzJmtWlI-bYt6o6)F-vNfo7 zo@(s%mDtI>oeny|N zP1zcUl=uzjYp;te|LUVybE`?Hztf>fmSa~N{9H(!9zQN3GRjUwkjDIBt$ndI3=*87 zv+)sDouy5%z6h$!iK5vd;cm6{ue1&skG8BN;6f4+xL8{752?wB6Gxh7b_TZsVU{t$vx$;S5mPF)JrL-d{-1-2!?os#*-zxAGnSB^XZS9r#UB z3#m2T`!vwwbJa^)lw^S~GuDIxusa9sKr`cv7-VW+WP0L~eueLyK74TB8SbFQ$ zHW7!@&t!v>vXzfPyuKkO`_>F=Ju$@AJL|R*Q;Q8RdTni5QX|LfVX;}Jj*Ldtp35LJ z5+bK4sJM05eU_)&zjgA>L~Nkw_620=o1pd2N$&=%m<7G(CkBn=N{@yf(gPSv+14u0 z|BkJO{}o%+r6(u^K>YM&*HEw?Kl*a%-VIes%huTNKFNs$d(l%wwS4r_Ua;09xlU_0 zG!ZM5xjI4wMgf%BBbUa0^)J;P6}7^97zqS8HhBzr5hXvfCpEIm^=hYNMQ!tnXf>+P zJf!j5@VwZaO+WqoHNDXz?dnb7W-v9N#-DdB^kJq- zWhvb*=7^mY8c_QD9pleY8AUittmri`B46{Oiq4_>?D^i37JQi@4S5CJXQq{vM9Uu^ zC9K+mqW;R~cAIsFCskk9EuoO2Cf6EwE(0sT_p;s?bKTYys`aCWfyu8>4 zwYJTR+O9royEw&NF{wQcycVo*KRemz9-ev5rJuC>L{T z^Zo_?u!fiu)SQmU;i7O|?^PkX_#*g@)4iWSz1D^>*Y_037k$1C46~|pVmgX=U5|kz zjw#0i9B`IRWRWoPF3rGGGz1)K@Qo8szeTS)In5D6+)Km;j;`VnoevA=HbDK**<;1`{Uj4CJkJpa!^5JZ^sfA`{YK}lz6E+_*5_}R! z+gtUr)I);!JN{bR8T0SeHnQw5Prn{$@Iwp0m&?y4Tu%h!P5&;udVml&r*7(nU5>^0 zcEMw0y+jJFrlb`#;xa;?JPZkTr8zpS@rfsQX42mMV6-;0OkH>C(Fr-|HIyUhQrylZNt$*NJ0vtOC%9JA$l7` zv>?H#g9Op*=w*~7Nc0dykKTLlEjmN=P8hxSFv`2+zV7$=p7+UnxvziUwte3pn{AAB zuJc%H9d$qU6*c#^KilrcQAq~@Y9o<=a`O{{bVI>^OQW9;AWpu;O{Hh9n@sT!0(aN( zx#s*@!kqJNyeENnNkT$-VfuleAL50r&{WE0c3kc&$xAV2Q76yMITX7G1(3CYd7oW|ZG3TycPU!Axcd zPGz#uXluq0E}Q|uv0glNbI+ZfChxx5B9q##z{SIKvZ?+GdSW&xA&rGt)F<~M@Rqm%W*ltV+CL?Sg=g#SmmZ!Wh2GSH*f%+lnJ2R;h@ zT4R(wTuD*)JVU|{8}W@d1LUjnS20g)R%_H-ZgHO@)AB*9>p3#5PzJI-!2MBYHaPI3 zn8dMgq<9l|XJkJq0i$ni5jmnR==k1PK+*xIkd@5SrzkHxfI6m>h~jKVU{@g)sJqn4 zV7w}PMR#lsheWeU4;F8jjph3WPzz@x*PH2ivq*xuKaLg~8|QtyePXr}*KWIg zr*_->wE&Ywh?i}V_rTf?DQn-nncG8xxj7^;1hff#YtU&^w4Ph9mWx^9us;RAPy`)rp#wwf$Bz{4eww5BRYeO=^KWOQzrA;` zJ|c03o-!C+)-%@UTLb=9#C!2fSM@h=M`@X#MWH%_`7}=p1FW3IeM#bTCMt+<^YtP) zt+~v`e?^T$kF=AGiG8YQq`a?FDF^O6?~a{FZ=^UluwO%vG;~S3$Wp$bK|vl{;)uUN`BYl4emoLD#c$M(${ipm@8&caAj$fjy0cGkcp2df6rM zCw*Gd501^R65rrhX=BeZ!#OG{Ubhznflt|K??iao*zv@+!rVwn)|WuIL5Jv>PsF%` zqn6p>MxXxo!iu&8#Kt*ze4fmV-|@XxY=PLMsAKL^hA3}F-?z?Zl{SG~lr<@_v;J#z zF$BM!i)^X_Q!QjuAf%~{oF)H3iR%4Z}@r9H%;=d zu=RCL90|(+z7pw(r-cddZ)?B&dGK!()`zyogjU6|gfLRDlLqZ{&xn84Wujl{{!Ybh z*uUZBCf8bm|IvLa;^*AeBS$u_R}p5cJW<@l?Gb9n)e?gbEAAeV=sYw{&7C{dILB~F z{&x)5Vr|UNAc~}dSC!Z(g%R(5SS;wlOW(?$8@->;Y>s+w=;Qh;2>x+S0YH;dJ>}A4 zKttpV?P#LJR(nAjoU7zZ&QI$fI$ot|1~T8~TYt)r9J{b+ZjUcG@(!R06CR0tI6D50 zfX}y@Z;KT!)Of_OK7%0Tf(!vc%>QIHN>V(BlRnuvvaRi@awNzKp0E2MLF1paI%6%N zcpj-ngvvRapT6V$!WQ>4T-VW2PAf%`MC;*~Z~{&!u>0q|8ynU9owSde*NRMr!j5(k zGG@WT+%cOY-}OWoiyI34_j7Aguu;D{$_qeIW=O^i5-|4lcR8mrmitZrzfZZT3&S zb?>`D&Qr(o{EBCcLq9;|=?0R@MLLJllP~gjr`>hE_h-U=kESjQ{jSN_y=&R^`0DXE zxuISmRudVSY_4eBGHM+Q@bCGje>G3fiAq#`O&P{4)SedAm3)TmF&k#U5{ zdywl|wme(tRcSlctv1DF9<;VPT^Q;`x+R4`B?a!HUq^UtOu}bjvddQfE0%`ldt+>5 z*t>dEYX$K@c~3^1lF!|YXH#KglO0d->h%lHmrdUoU2!Pyw+>Nfh{uq@;5D~i9^`mx z(^UT@QU!^aN)2Q0h%QMu54s@%xyAEj{%ZDqrw1KYYuPW%-6m^p`kTIbegz(xHI0GW zrvV9QtPZ$ZwB2bX@ID&gx))1htFg@X&ASJ;G$_ami#1g;*!~SPN>e84GSqye_NePi z;kNy|IZ~r8tHu!!s;Iep)!cs=&93-e`@u&s6F3R4H3o0Z@n6u!7ll>b9>?xh-b?j~ z_a`~k9KtFab*`w+(#BfNeQl9Xo9Pd+`KeX4#Sl0a4*DT*L1pmsZA|!cTDwVc2om|t z5Sb48Sb0NfxV`_>_Gr!)6b7uz3{j7KrfUL=4zJ{FgGlDfC(d`e2M1~p{M!I4n4^<- z)<&;8FsI6Uy9k9Azr5hs;=udkRibbbpRF-{W(%m?KegU7pQsR5tc}PiVxsu@n5O$) z3QOR3MoJSmEeVssELoDY*QPvS>pqB4W~pgdPm95t7WPY zBJzT?T&IT;*rCd2FW%^I@y{ycxbQAdRx^c?=BBjoz(8BVwh(2o#^J2|va>ezLIjCp zN(0DWk*T)s1USk1x{rH~5vKoYy|zqrP7_e|A&ps(bp2Jhu&hye>${EI)XW20nTYH~ z?I@nr6c&L>65LK#_DD1TmCci*0XC5niZ4A{38oa@(PgaNj3uD2@O{NvCUMf1j@khG z-P6l0#r%OcgALo~`=07Lf3455EkHib*IRUE3s(rY2yH3OrCg3I+e)hUvv!F!c<-ET zEHXpZ%kHZ-*BNSbZ*#A_OpD)Sa_U}?0b{>G&NTzd+O6puo>w(9CnF6!X7kk*=T`+J zu$r#A7EqEM_}ql_1HU|cFx2+Yc4=K^w=1mA=JELB*183U^2wiFGEvcOa_C7l!NmtN z2Ol|`A`Oa;YMEwET-Zm@L1VcT+h9q3f|yf!gk7_`;<=8OB8I!x7e_s-kto*xHR zyHUtV4=(3YgOy)YRNznu2c-S$P$X_N7G@!a;_*0{!zT0q-_zTLjpn|n4z`}9v7-@_ z;uI`n>>ovgI1Gh9OsxbcsMsoxL{6%dGK1paZBIa|7>T9X@(l;V4E2u&YT}J5+=72+44;;$-plAHX#~6D+m&NfY z(mDe$@yN}!bp5G!&V7s^$rd8_PSA1t12FCyrRKM~FROrNS^VqQ*}s008MJL0`u_b{ z#oaK^J!EtQd;$5EtFtB3XuJlY@uBA)Lt$En2)l)aML79QVeiF7@7s6oKt>AQa*UNO zE*j1`3i|lc3BL-ABV`LT$!&-iEt!EngQHqS4tEx0_Rtt^Q+r_4AIalNNiCHj$hDu3FX2HTHvkJlbavn%sI?5* zy`%v)y>18JlE)m-2KC;*zp(z3hbzoO)Y-3zSbxCU#&FefDty06&MSygaImN%P|&O= z%o-5eG>>YPfe}UnQiq&=lLj3R+~Q`|0&UwmCn_&L< z!~(dz)O}}Ni8h(R3w#6=oLL7?p98j1hZ=XgB~5Cx_r2Yv<9T+2HB>ZDUDxdtnAR72 zRE^OFfh+dA1j(0m)Ow2;m+GHpGYg&w0phM-w~(bKOyU9o_8qUh^pK)MCbg^mHQJRV zB2rRPa%))T>tvXKNku2ZgQ)w?RnYSCUm)@S`gQu8U@f{KAWF1O7Z5!G6rAp4=Q+(( z&^k5>G-ijwDsh*QEaY~!R7Ut-j z4aZLS&3WHF#3(m1Nsa_uPuUvwi$4-2{+fo8^my!(JARmda%dc>}tqs)#2<=QB5F3<92?`H^AIn$0=hy55df)U0c5Av9W7pRzmJqV?)gR*A-pAK#SbJh zzW#Y+@=dIG&gyB*m)+g;HB$bc__7&D zh1QG2f2K+sJC~bX;8iLwcbW5F6#}the&+@}Yyce^4A8gSDd;$7Gh+|DXU5mS(RNN` z2=B<=F32bpV=5c(v}%^e;be(#zVV&*pL}=_cObn+^j`#~ z)3Ec?yb@TMY5{)f{kcQv>mP6R^G(t5JZJbF#Dc(~(Tkri@%L;4-#)r^^Ckl$qtE${ zX&{WTWWf*?)_!6VE$rxQ0M8DH=l=7P&Dp><9TOJ43h3#;!9k`Um+}5f817&mQI{~0 zx4@^4`*O7$xO2)=0pF-T+b=$Q@pMAkVRbWN&LO4j^+u->l8v9UGa^ch5Z@~5b@Ue2 z>^IndQCI@14m}tSdI?h3(INBUXBC6VATw%q_Wf%ZpTqo}dw&wWWf(Xk$lw;l-Dxtg z(vHp6-yi)xW_9PA%X-SdJGQbGd}aOY>J!XAslmU{vy!;y`{cd1-(Ar<5AZ855vlB7 zRwj#RliD`uW}HBA!ov)(x%O|jetClRz(s2yVmw`RRcu9mFJ>zOiottp3 zYapEzRrdE!c}?T7_|?J3S3XZ(*ANF#C#zrzsZ}vlYKPfy>R%=K*951HF|h`}M6Fz^=O#YJF8dLH%7mr4t%=mMTRQVP~T)2IIR zlwo;5t2@$tlSQOX5$CaWbgGb=AHi>t3y6TfI5sGLzXMa7b~FeG(G`WrNl*XZC^UZ` zz3FEjAHLsrx*aDO*kq#6dy9S_yA<%F0}|N|Cc3vsl(<@<-aU&gpMP^nxv$`_)=-Mc zTpv%ST1Z@KsCGDKnziNkjTNbD!eM{g32DMCeAzfNk}gpU1J1a1hLfK=EaR8uUpw#L z`}FUY{@>y}_q%hB^m^F-<}Cfs^vSuUlMDs_nm*Im^k?<*H=j;tfCE{+93oQtX9M~- zpVC6lkM%`F&wm;K&_w}togshu5o#%VFXGxQ;2N$ zU%ThO4ST`zC;+zF|F13T^x5wV3I@oVx7d>{rO(n zM|`|{;uXEm7<_04@U@6;nBKneg;g@-rs%aRcdytZS7YAT6;_UzmUx{B5UzgVFhCyb zONnmJ^W)LFU&8#)en8UjiGm0Q=o`z3AeS((|I;7v2Y`TG#UQ zR1syo*MGlFUYfJA12=Vo9!9r(D2zjl7Yy@%rV|xFs_{l9sWl{LF!sCpKo^MYpl3dy2BI{ zg2NmuRe6!B+Bc@f=0S#-F*My+WjS*b?mHoZk094v0=|yr3dApmEP!(O9LWj zn=qI?_PhnTPuAif{!iovp?!%NP|stlX~KgX7qXuBiwLm3TF-0Fe_g0@HehW85c~ee z8-)l$OOJ_cupHc>3e+g23l0hCa*yU~Q(NpB5n91fmH1~v1z`@-Mq6pNAZ{=)FsPq) zvC^fM+Lm-LKC9NZA0G{~{Rh(m53VA%yjsJ%Kf78sO8Pl6(l}){+H*2`&3ZN3?4J#U z$uiA&$H0#cG(TDEL*&+!Jjzx@twyN0`4))(iKs4NXyfq^G^(Kon3|2Cvsp6F)YT)Y zgRF_2tc#~cudMvDGlQ*O2Q@?vTYz+%hvUtUJ4yaOXmbDA5Gdt3yz26mmVDXoAV;*8 zqFNmLf(V&W|BrSkB<%{1*G$%|ucW*@Hx@kh@{E9)K{e4od>aR*X;wUgwZ2g#fNnp% zcga9<)92sX;XhG#MAtC~Uq?UDO2y!v{k9j{6AgX;kG6})>nmVscqwF_J%9c@AXU|N z@CN!n)t3MI9$tw%r+@)zP~r8?J|C-vK92i)u!eKB2Ddj=b$370)7Lj{nt{ji3s~`q z_aZ}BYm=rB<1P?vodmjM!+1=LauQOJ3zUaP)!iV6)iN#$Sfne*kNm?2?CjD-4v@*R zasf+Hae|gPkIHEhqSi4?GP2RE$n=GxZ&n4aiHF6;@CP@24tG=RDf#JB7232N->LJs zCBedic7mca7*lakS)^O{neF)wDMM$o472E_4KNtn4#~c*Utm^R#W0`G@et#GWFeA& z%!(byf>BQ7^62Un3{uSW^mO=gztKq=8&g|)yOl&b<$~zM;d-$8gK}wc-DsMkRoiG| zP8kTYchGzQ4;eEwe_z0?8uf_K=HrjOBx%FgvY)`wy4Ap*%?6 zD|0a0E>{}h1MJ*=sCgF*Ws@BV>wPlTGN|ytN9M%~o1C1;=r9J2z?AunK$$&Mhs=EV zudgmgYy6jg!Xg|fgE;uUIYr;CXTtO#{^kRFU0qIof%W?N=(SQH6A>vJS;S8V-0qg&kCD{l6Y>IsUZUUKJz?>L?5hqv)8$ln9_4*vJzt?=7$B+rvfT zZ}Dv4(G~TPhj*)aFkr$b)vQS{MQ+g2lHRssy+GGNhVg(K7bqbPVHJqImH4dgRhs1bIY>&voV$^`)zr@PNc^KH7!h8)HCqxM5(zPr2vuFR)-A3ISRB zrh^kUs$rJ*i8MAP4oHdkU>iia!z{AUu36CLaxnOUV z6@gdtY}uhd7vQH?_og2KANkdNo-u6HyV>>aj7s~B&*qxiJ%K~%iW-wtrvV)$ln`2e zFz=BoR+N9Qygg(zKuqQ!;D!nRxfOlq0cHRuV0;vRS11c%tVI%t5cTmM8smCk0Qag3R8&!1ZFi>jA+utWZ-= zHfI;4u8rFTdtbj9)FDO~qEViN4TRD>`;eBDyt|$f2QWjwBc=g|V+Z4&<^}T%^8PK* z#s~jTfokdXZf73_$g!}M3QsSvV8UH5BaIHQQE}wODtUT$AC395{OXQ~`G{i0<5lG2zX-X{UoRXQmuRT6FbO;a5JL0=4&D7jH#m zb;=EbjFtTy$!YRo5Ak7addC=oOvC0j@?^Q19V-)(@nE;5Z|R|i`^a#K5CIJ=Y;MU<^&RT~RjAP0um0 z{TL7s7F+Fvp?{HV%r*>_|~4ieKBnarZNBvRq7^ob?^!Zr#~t*#gU-H>J8;gFwvQ zCL>tEOQoYfk3+^|`Ee3ygw=s8E8|%jDouSWzo}Atvd`~mGW`-k8(8Z6tiooA=vYMa z9$l*m1~0K+q1Vi~;B(?2#W%brF9X0DKM|(ZI9g*T6Lc~S$muDVla<5LbKXCu!z=ChVG!l$2CfJr7mUW?-us*7PA^nJJ1%Yz)_Cu^UB3 z#Z0lFbN$FU)rE@{l6}~>PPQg(LRPFt1@}Kh^H?ctq{_!>_0BX?z$XmS?1AF$k0YAB zAk?NU!>qa9N>9z!;)}5)@$0{_e zh5{>hjvcey&O8cSnxs!|g8re<^0=pypT+~fhit{z+m;WC2^Vn+=eyh}K;~8G9-T4Gg<7*0;MKI~tJq`dT|3OV zNk(R5ACQVsPC&}qy1PWvQB_VVtU6|k(o1~y>8AQU2lsuCBzRb2z;$d?0g>k%xH9I= z;qaZOe``Z70UNui0%5JZ1T(kj>HDT>o_mXN0iE#<#)FFzI@<<`htQj=qql+xdNoF#+~KlWoHmtED3iQV$siL*Bg`6DYGPqOIV)cjvXX1U z>gjtstoCe@g~Dy>QAe?{#ivgtMM=ivnL{#g#SV&rjB*Fiop6+Cp*LQ3W!}*^*{IKC z_Y|PFvsJ!~)tFT>tLF7&MW&3V1&yO={K7GD@Y7_it!zLUzW2i(K1O+-tIJjD3zTRr z0np+6`SPrw^7$PQ6g|AhGlqt3WME*a>h`7A(Y7gLYfq^jFmt<1R8XpFVxETC8{FpraFWMVnv*63*e!EHSN`k3?$Pej0~Yft5#iwAV8%j&uCUIt z+TyWNH_@qDKJuPVE)qN+Fvy|{ zYGo8sX*eha1qIYt@>!@R|5+FoE}=Bg^bBeRAD&jvGa5}EtGC4o4^FJLK4f9r916=W zK#a-px}ZLOVTDR(C}%R;PdHZIW>NpXKJG~n-m1k?S2CBIz3pumtFAuTFDf0z7@s?3 ziLf>r&a0`3ozMooZ!GOvtJ#Mv{9E49 z1F;gLpa`Mmla`Qr`fdFaV-kTnTV9>(hrGZ)}GaxJ_a3+8z}NA(sbC zFXIr9*qEA^X5+)MH7iAc6TmQ51e3I}`SOKHMLCk&gi%Fv;Mu)<_f|(2^y#8gRiUA? zN|HHJ12?zp`?>^m^1P;DG1I=%HaSOj#c}TAr5$>^i}@*m+={;HSn!NQ(BUm@nuR9Z zpU0t6E+A-4w;XOH6ijh@tdy#iiukrMwDHzktH6AHqEfy>z$A)LgKBLXW>CJ8!{6oQ zakaI2+gN*PONVWTx>LlEX|5xj)V4}Gw0&@5-YIzBL)-J@IwjBR^}?LAbY17Feogd; zvCaw&ZF`_{cYN$udqzyp>XSaP>->^j!}@vMI?i^kiJHwYPRZ0-TjRich7b6HE{uLx z{$3e+^xOb;1vwQuaqb7x9nJ6^$inElGUz2Y28*kl9&fNtq{J<(9mc|*g2oR566ViW zZqY}D4pX+|V#?j34+!%iZs(^CE-HFTCa9l~))SREhgchT+FOqF1~h2qmuEX;!a`#O zwS&+bm16}hd&}FzOmod7+>$jeq*yOwu~A1=&-R*YJJ=U z1&s*31a94HrUa-izs+KhqghKRxQ)Uvj!5FI6m=$VEqZsIsr^x;otf!~ zm-fT~97iiZ5-YW2k;B&qR50Q3M|yWiFYqwp?^>qgq6Sy_9x~FVhG}J+%=6q&ZLn$# zHy`TO20koxQ6IO~=xL009xZ3=ORRkp-^7IZ%Sk?AKJ#V6u=w4^eUZ5B7k7!Dq*zO4C5 z0&tWp*rv6E7gx=Hr}iV~O{3GO?ST*Du`V+qpe|AC$~irI|G_KfhyH#RVKRKqr#YpJ zAfF6_cR{ci z7CsrQhMK3x*1k5&$5rJrIGd4N)zUf%x+rgGW>-&Yr0YQqdi3iWyimt4{XBWI^c+*L zDdR>^htysfY~ov-{6pG>_rS?C9!ysxt@JT`+y3SAm_;oV?QP?))Fb5JSfNK6q>&!l zPswFkHZt+fbEhCCxZhB~00}$%l}L7^f5m%A6N@;AqZ!`CsnQ{MFv-&7OWG#`my3N0 zMb||dQif?&YX?V}SNs^4lkX1Vo}zrTQhYO^xmSdVX*BPRSaoMA{wd)8tj`IIC=@D) zx<1h79{t1ZV7J3AZyNH+4gM#wc0@u=oz5J6LF!`BOP+IUnR zGJ>?Ju^%{5VIe`$r3;QbyJB}~?RO0m1P0fQIG*+A&nm*Fs?CasU~f*slx)lFC&}I_ z@00W7)d~@3Sk<-^nKn%v2qal#ydI*>mR1qnnrVJxR#MZ%uJip40{YHWUrSefo6_&WJL)9pp{OFq4sM-cjT7wt4ks-@g=HX)2BSQsCKKDpn^)d*hc!fk|Ug z&3l>hq9k-c=>!>RMo`6Do!5vY?x+p_IocoJyxkP;+ev5$Y+13XW6196K;s>+Q>Wd< zSB?cI9P>3LW*bB8c0xwx`~isRZ`{dNv(M+PiWBBPy3Q}j>A%beJ+Dr9-G*O6+{eqd zbPHVAi$^nsprLhWnzHUiq5Ik(5=ZI2d3ix3{=p9<`o{;L zz#dGop2%fDvl+pSR!F!8ps)z$U$4@hn#!4I+=hh*=tt$&sy%#vd1RY}akxLB$#(vU z?%U4cPYREPv?7fT@u6zS3T!6Y)B-&F@k4jpN32hpMnC+U3t$K;X}-Jo#x9;Y+@A$E z3*7W(QKfa6ko+|-V=!=B35(1OlqD%PorH}hj`SkB7_9l*MR&p&Kih092^MI~i z`R2JD>$GiE={nH75XbiAb7J!n8fy6p=Z=4q=G1eFy~r{5=)F@VPkv5f3(d^Mr}Tn?An9=>CS^i*ox3vRop+zj4952P!Ov34#RL6FchM&g_l z2Wa1ThPZ4K-*WYhBd4@eka<>$yE-fS_ca;Xr`VXpK}-`SpC+!+P@5ABqikw_e6ouD z5tuK`ojcg00dg`4DKBhG(iqeR`m>DNq?&jQ5RMgWpv@%K4+`b4jBmU$_l-uCs! zMr10BcD};VLFro(^-^O7tjkq8)H`|)RDtoebTSxz#A`YIO{i0X+7&`5pKckJkx>dr z>Z=Q4A|sVpGWaHk^Xcw0GD=VFqu5L!NVQ+Gd+{kHP{r|#w?7(q6%wAkt1&<6Nb^d~ z$n1*b9S}gVe%H^YEp_-|ASY(PNVu~}x3+gmjX;T1S|(N2bYU1Wm0xRBn-WH7yGo4~ zl{b!7m&eA&vX)L+)oN|6bFpam)}D3D=i|~iHALqqA3maYWBW-SR9SF#FGhc@&e=u! zsnMZwbuGv=9vIb**a8f>HwG^jdb<97LW=0g(t@_$5K^<*ymBZ?ee#i@^ZsL|GBR9{ zg)fhKII!3_urj2LNR$^drqnKSs5!*o4Z`46!Dv(~6#6_eN>j|^H8v`=nWsj!9>8xN zKHnANcDLZLTe$dUL`iuH71OFCd62bo4DvA=6~a3TTPXCCwX$W?e@~kWGPm1U)7uvv z=Q41d(YRkUC0XzMImT5gm1rz~YqhK_Ws$vezbh*A*mZKEwZtGtWffMkg{RKz+fXdW!rwbty@Eq)XG z`HD!PgOsom$f&0Md$^#A?=ngW zixV-Ic3`S#t1kc2gO!tIk)?F}K4U-1^vaxjG~KdnlZ>Z?PYwkqZ!D1TNc2Gs@&cv%musm~Tge)4kRP+{gieFwiDdBwgtyT#$I zta}D)nQDb5s1yo|9{mqhS~D{Ets>Ku%6WS??J#l)xc=r0*Wf#vLv<&;m-U7>Cn zQxMkQTJ$svZy#L$+LmYUC8ML0aOxN1*#yia2ruO>SDmIDQiCuksXy&iy~$irf)^Ffd(#Vq4?E2Y&Lcn!gjY&;Wy?RqkE-Q6+cGmIKCU3H^@a4BBk5vAJ z-U0~}E4f*_HR@0=e{%BFau@#rp*m>ps|kU1BZ&F;R{J@S>WWxUQ>pniBNM^%6U#ch zSTv@bA?*uC%4vaMw zol5Oacl5Ni#`1Gt;sY4gSluY3L>um6?Rja5;W!HNQ&{Gpy{Hu)*?|D++n9t0oR}xZ1P+4(o&x6Nj;EFO+dwmludtPZUCeed`(Z&r0w(V4@|%fv~O|BPtb&| zz7yK)q1ukcx#wBSaXT1j70cXR9%&gD)bD&$Nu**2vNAK@M8r_3oWc=^7g1bl(%}qd z<{GIAEPzPWFYdA2q_aiQ#O>ThiK~6@t(&FdDT5SsP0!*|yV{+*?UtC$M#qh=PRy#v zW5K(H#-v8a5#fEVEdgX~Q!YE%t}jNAk%Emzl`lu6fb$TGJ$nI^j`k_2&y~BeW*Cnz zYkqP9p}U`ADC%AQ2T{m+;}QpmvzCt5YWowT1UBoc)gCD=E|!dK+=3=ERRV`h&vw#> zSomY)b0wDZR65R%a6Z@P*C0-0eddWZ0Inm;d=T$oRcS>=kulL2STw{P#Z|5T{Hog+ z&zCh5kgr#&%Tioe*mGVzg3&v!=6e^6=Q8^B&#&Wl`It>&qf*?S$~=@meYv3leWeS~(7*hk>r|*j!NMtKBQcVk0Tu9&AXXPDLKf z6=lZ;c)P<`!QW}N-kZ^U{_^Ga#PQKeZb&$K-!4)JdTr9pEtpyD*z?du=+GwRHY;p+ zeE&Y}0#BELEMVcpu&82X%u5afjG!`clZdzIeX1in4@{iSt#qor<)M&xF|##18#w1uWGtt3pWNs;*2H?7R{_tFu6vz9z zBU1;}dcVGy=~PWf`IIK`d&@R+YfB_sLfsd!{vQ;c({Tfiv z6s0@u0B8-KqQ7pTa{fzAF=&`IjLUTR(M0S<`bN!04VOAb)_Q;}yow4?$ZN|L8Y=~w zC=`F74(fQK$0`pB{~hkp+cbQ7U%9%s0_PK7)~AEZk~+7%zOw?6mJ9vR=DkUb*62`NppU3XvmbsF+3K?q)b- zbfIra$m}gc*H}ySyV!awyEEr&)HqU&JQ1uUyH5zqRIe1>;p~lAy{=S&iz3X!g_j6&y7m^7Ivft+?w22HnC=+*r8#;bGjxk1L*P zxA;XovKB(arb^_7D&TX0+wGYai)ac?<8Rft-ec9Z{T_z|Bpn}b=s&0iCvEH zhDJ2ybPHOjB+F7|X(d(&(O4<;T8$K2hmTCEl5v@S?5|G9$xp9!A`g<0lM4$+?-ml6 zl2N5g!x@7XOdSvBCgEbfD}|KstF3R1ruAAre9|q(Hw%rS%%9quC^&0RdzH8s!`OOo zUgDuRc3Uazo^O-r6CNy$_QX|TKGX6dq%n%kT;EyF%c5Y$d2kNYtas>1ad6o_7%daZ)p5`=hq^#Z_zM-UO762K(<(jVXb z`O|E7AbOn}$&KVVda}PTpyGy&GOyZlN>A=mT`&hJzM>*?MB${~%HZ1e3-5(pTjJLy zWBWozy=C!Q@g^&SX6j{zpUAWn{3)^Ej73QeGRChKgpYf$dh^~O%D zsYR{p@lS7>9-l>BvH@p`P1kekiSzrspfX7 z>WrzZTUD2(8~t`loB*y+9m)w$?g)!&8HYx2Q1E7bu!hJ@w70&RMeTI6k@!`}GBHDQ zT+|K^ro?X3bDqU{s15=)~hnJc`p;knt*cW=;{aP+tje7 zLui!x-b!vbd8%=t7;=C^c>X$@N`oM+!L?GyTfrS`hgt;&yr2u36oc2`XSOi+83R@k zzHYNprdkpUzmnvtKeu)`$Rw!lk1B(}B7G3l>YX-MTcDYRyR*hExFHYuPQFjeLd?g1 zJ(1g}-x{sKZ%ub4Wq~n=JRG^X6fl%;ey}-N(jXqqklJgwnd(uDMYQiG{=5rQ0+XZUttX#Y!sxm-$qFp9sJ_Zx9r+V87rw)AxvY@ zIu3PhRfCv^aP2J>Y8J>D9W}OFtYx&NKh{9g^Yd%n>nOoj&G?pVYn8QYAgHhDwpJWI zw?D>uc_*s{UO~MMysFy4ps2`uP@K7b6Fc8M0gDFPk-fN(*>D$#`QbJ(C=aDYf0asn ze(i0~be8qQ%nWm1QgAh5chttyM=FXkT0<38VFE{sjS5MUH&TZ>r2yF9@@E6i;cTER zxC}jfyDu`XU{@{IRZyx|=#Y}f5+a<^pt<2s&5*C|Gg{#=JI!cnu%?o&fdvNztZ=Ci zvrm>ScE(2|3?XI%bNyw2muYqQ*_3i9ii8=4Dq@U_)Y>>zh$n4uLRFRQzv`e|blz5XLEuCTnn5cU|`4rCBKyz7WAN-V$e zfZ(89WFa`b8_!4$p_5*Ld0m$lqzFM)*Ep9QpTnT*2Js?$h6mZnxYG)Y(?Ds?gjO$8 z8!xa?{KUa%R9`u}^S*%^l1(KiYaDD`=jC+zRm@k<&z7M$0}o}7UdG|~mAS4cvlPF3 zo~s%)XIPCu%N2mKsFP>mTN%M)=|1*@z(Z{BJrxd&7}(Eh5l}n0Ps{yl z&A!WWrhTLKb6Q#&v!?yxFcc89KLkvmA%Q}XdempLfnB|G<)nLSD%wjd_TyHWj;o&Q z)q5b1a8jen-VHpKSL@tGc`?>QF{M(%W=F4D=6-G6QunO8VmGj>6qOq50&`1QMl7&` zWVHEIjbew7-{aIJC^}kJB{PFe4sGd`rF9ZwtW%n{Y%MRal?JnYt6O=}?1eugduAAR z46aviX5E2$azOKP^t*fui6e2VVQm2FV7sznM=!l#lqrit_bAow&Kur==O?c^l8DE4 zRrXZ!8m)}4f^8>v_m?iO?eZ7Ofp%wVVJlFQW@cc#Lqbd(v6c)_Mum1?V`J}1Nwqxd z>YxI+a^P$KkcoKs@L_vlOl4$zb(Dn>16#Eoy3RmQczkf6zqloUs!y>Q@Tl}g%SZwr z6z)O+^5@GD>2sU$jp|bUAvdFhl@DyLlFa|o!4UFrhq>36Q(jQ(dh-xKlLV;0<}$V( zE}KJN%AI7L)Ooy5!0%K_SCAt7>Nx7=G-W8GlBDt9mUghXh@T&VEAD_DYC%;Z1rrAgF z4S|fR%0-5+)&h4hm6oIXQsqsC__F5of*-I7$w`}R;MlKa554M}%{Z98!ZMWD5V0&@ z<8qiK>rxO43V4^j@Y~4l^61884!y){Cex*(HIV$!YGMw+80J3@u2BFfrt60+RM_1B zR7<{#aYa;2tz;PGmIrrfN!R6rw&?yI=r^s6;f8~n7an^NIkYKnXt;;eg`c5IjJOC1 zEoQa95iJS!W5Vw?G(0B4bM&Yp2~wN}ImNV_2Bs9Ds7NKLAdqP!Og9jNuqIJ^FDg3Q ztsK1*VkCYIj1m7`u7)aZN_%2d!-+Xa=%a7x9bXVY|57=J)}bHfeCDhSkop(2jwQ|; zrNm?YtHA7t)9Ek7BiqZ5yK)fLXVln@5L$0c$0xMmV()?s!AJM08=FLaWxgA4V#=HU zjb;6CkzYwvriAT66{KjM?96fnZz@?qQ0@(=W3_7d_|i^W1JH{Q{a%I`K&0g#Zrlg( z;X>O14v-dQU%kNCT=J-5oep%5W_Pw<;v%cDsnq_ZWLtrw8F`qz@pA3UU%51Q=W|#M z0@D_J@GVKO3OH(n@h-#6ORB+sp!&9&Z;(rUlZM8^+Y<=Sb1a5=!Z)h;zAECi8Vh{IoC^{a;MeP~1 zroYj)iFE(}bpC+$_n1VlX3u)Ollm{(N{pUJ`lgf!^Gfg45C@85A>~<(MCLH!=iAB3leCh;}5zB`11u;5NnqSn$-4 zZmIV29o`B&{~Hk6156y9#N?MQrtAC(xEE}6qSivtk`fz?Lf!rp(8YG+#%ITUp~jPD zaEi_qVt4`szOA-4Yf|w2U+qi_KCd@EJ>`vzj?ENe9SD(WmXJzLc*0w8LWzq)nn>-; zrYgW~ZjjTtL)LM8h*@cWsN+ZcqmG^QdkfMGzt8|Rct-XZ6B~XnC^C{URW4>wo&lOr z28f{+P`k`#A${y`SAO&}qenhh*UdfHk#TuZvr3_^4*FrsLUEj6zOzWE%lFa5tP!Ty`jXOARq-MasB zzq|Lm(;fp-NLNE*nVX*G{^uf%7CD4lq8y^!BKVo=Q1ca&Xvh<$u-(5zX~*-_!V9iPp(HMkrZyU{2PZf?9LS}l>jS4bkPQ%I6xL+;=F+W*t{4Jocy>zWo!E0_Xc+gz=f#u z3IYhp>%u^pccS;?@RGQHj)d#1`{T#OBa#5k@<)+|gM(U3%-iwA?q|$Z?REb`J^)<( z!o@<@l=ws=!{sbfJqc?fQjOtvL=@^w{mjCN&$NPr%{Dg_*yW&r#I&l!F{zd589&7k zi3dYPw!`I6n_1m|0*JNG{x-ZVXUM#m&#xwETde`&O6Z@MP-OrAFfsp4r&b+kN*u6= zVQ&5R0mz@9(wYH&;S~w_e?<-rg#%9Mgg*6OiBx|)Z8$&&muMzi{VN6W|4ceQ8vt<5 zGg{rIe{sTpZqyf6fG=bPtA79A_i_IG^oI}-89>8}EdS+r{hyC_F6_r|%2Bnt*}nxR zf9&P2B%t%E{$&)Y+U=}0sgIM-wdpJ z_!fHNU!s#g|I8Ubz-2P{s09T0k~Ol3b)es{y$NSo7!K( z_ax$@$Efh$>ua`?o0QbISB1s!)uk-@olNV`$A;i(wlM3p{S5l=vD&yq@*a_QK`%UL@(CDMvc-3)= z0&Ei3gH(dV&G;X?CEw1KU+@<*xtyi2cKGdVdnE3*&@rz5ZkbQGbpr{{6cIcLXW~=@*pO zbhb}P=$Wz|NMROW4jv`Nun0M>FjW)*G``VNJJqHTCkF?uvDKVZkRbi)My_UywVAPm@_d0u^dpW+hBy0nIFiU%R5(9tfX|-FtNZpR3g+*79Em zh3ti!Phk5t24m+nKNvdFsl5A_0#PwwKNZA6TSQ`_i!edHA@f^<$!X@DR%D z^El$OC^kg&foat+r!S^r@uW9S1pvMC}2ZDIDL>1Fi!9p9hJsQveneAo*r}_ z6$pdYjWg7Awg{f%gkn>7Y(d{R%FI!0Q`{K&99m{9HaX4OFf~?!?je&d6gD6As~72} zUF=gh4!#N~4sZxkZ_G{+wkWE+Fd>27u(6WDCtt!1+h2!|OIn*Q=KCJCZ`?sTM#cPb zxp))0Y41z$f)|&CPkaU~%#o*N+WKmuERWk+8<$&?42x!Su^<)loI6`mjnZs+A|f00 zNR7fb;>q*OqOQs-U4cDVlNDL7?~~@qZs__-alJ<<-n z%6eQcF~(CRjH{_5{rZ)J*IcPhx2%fB;~DsB&~=jX%4e(z^+%c3=VC&wymENgoP>ak zK{-$NKmn+)sS|{a)Ki%R4bn8u!%a$^ah;twukDQQHCo0uEj1Q)LNnv=xNfb!g#s^& zX&z8pSB52>3l+a*La$4pIhI&7UKMvZSU@2?Xk^N8zaeFtS##Z`nup@pdNu)$LS|!h zHk328;uZ*-gOjRiI)+Ts)J8MeYum5OCK*@ipiBzN%Sj>Wvj&M~J13TE^PlvcJgg); zEwDG+GqDMt*fqq{Dfj-gzE3=6N}j|8oab_+dxop}kIPn{Za&@hVI^Dn9-KQs@hu(b zj!D`~G*P&TMQ6$s=Md}j!yJ0JBMQmZl?anU-(85P@ZR*^F%pd{R+sci!NFd5;fxT% zfx0whCvKLAa!GXnrHjdlW-Z@R#h!Uu6qd!ky>}%JQzWXGc6?SdIO@D%wollr zq=1b&G!FGE=88f(yQoMMz2^~9um8;P!gBxm=b?uF`S(zYeQa>Yrt=KKvO>Ym{I4EI zo`m{W+ALM&a~V5~2oZrJ^;ae-+ZM1-=Hv1YG2)v|32drHnr~S@)r9{P<0X7L8k{yj zc=wgBe2H)DX^S(D2>!{O5R@O{Jkv6G6u0rSlTIg=Go#f30G}pC(WqT{YGP&l=)sm0 zu9JNq1M|EYyG@-nd2l~Ob(nP-bqYzZJRLcr$48R!RS#n*dw4*+Ry!9>+l7DKe$1OuAFxyD)HS^vtYg; z=O6o$u_s0R1|RW))smlJC{g`P_nL=NF<}LO%R}sBQv-4C{c*BZW@Yt_gaB0m0Taq} zhZkwri4BkqBb{AkV-vF1?BK(1S)aAJ$u(9UZ!QVJ<2_1I`wc3$Ogo?dNYdk6sSSiX z1YO{1K-wx<2l_YSKy$oT)p!I-ih7M0JUg_CMgHl4E)62vt8%k>F6P{84V3spoG&d) z<0%bj!{UzD&RNa!nyq|KAg-w(TO3fb06IfaxQyh)bevmymdI7JItn_mT<`nO-u&|6 zkdZIs%y^)6*CqdILWavnvL;{_)=axR={G3T^L@8{G4eyDA+!Yure?i zYjj|PH}{SaHh*4|QMy3v8a?cv;+K*zl~a*59okWRxt&_Ub~sTi5tsJrQcqdt)0@c0 zV=dZYI*om5c~=vRSheB7<+0pqPgw(PAhi`T9#0J}vPtllm(O>_WiH3bM`v`e4W4M_ z=WoM&x`Ln1_SwgS6S$Nqli(_IgmFIRiLARS+DBCupA~$#=P`4Dwh3JSLM(dL33;z4 z5#wnaXv6$G@Jz5{9%ZtVA&Y722O};Pdu~mRmA5!=!V8MsQ}iZez;3Qqjct`dlcs?D z_7^5mM!|QJC5&d)@Su=SN4kLv=UzhY|e!nWH$$i&LYR)6{&%1V0- znwM*~JGEQ_hmE!32q30sloDaf#&?D8Se|)2!HfMaD9dBG7;74P8f=UNJqp3h*rHC4 zd)N3ft7}xURYi{?(>{M#mNv8?7SY=W7Vnwt-yd@_yd_~{y1gLaD@VDh+vs`sa{o;n z*54Hv$P1Im`*pJZ(KHPCO}WwBySm281p2zAUfp}9CL}t%#MV1u=+B~3?C&Gq>ugj= zsCXdO`8To>!a7sy9*>l)0cHSiKR3I$1|~fa^k3SP533Mbo?@&tvM=RwJDr%aoGa%R z$X1nfCuD!PiN*CZ>l53}>_Z@z{D(dl1+Cz{w<|tgYAJZDGM)R+S%qHf>~6nmzQ}+% zTetUs5e;kp^C9$N(;?We8B`@j{`#=vcQ45-T|W!l_FMQb;5?4 zo!c<#H&|vvdkkIpG2U^hem8T=?U9}dyID~(vgU;zE;=jKRW)0$ww%LOu_P@^9s2kN zQCbPW{7V^*9bPo=s*UiSdXtec@qFAkBMpy4PbYt5jua5gOF)GL5bQk9$^c5#BfTv* zM#C~;TV3qTHw6z=SE|UZ+M?KuIi&To4pqu~M~iW{?{9Te^1nMEk2-k}zcEcIw0)O( zJoF>ej`z0`b4}E0C15wmX5&mZ$Mf$qx+{Yjv{;cEsBZA36RY=P?avvD7uGKuHbYm$ z!kXwg~3r=sOApfMG03^x=q zUm|wil<-Tz8q`#MHaogkdd#+#m%CI0Fsz@s z_OR49rcbOHcWef!Z3Y>mY}9i0%HDCQYa2&4``AZmw~KDBrzybHtPKr~3SZ^j&%f_S zbAd6*8%3&7c?IL#m@A3yxBIm>CJ<+K;n(3?af6(O*K@WY^maTxD5w(ppgp{FDQ-cP z$S2KDm{>CC#id!F6?t|yqNXnc!3yni@E-pL&3`i2-!+xL+BCFim*2XuGFVu_;$&Kj zYmf9kg^v;ud5KvbnW@6Swa`alo&Xdgirq)N=a1_NLDhlg>=X?~ca|Bb%p*5ObN43i zmvbEZ9`$vQSJl&^>NDrpZQ6_SOGC42OZ!0hsH(K=7&h4Fg?z~0Cn)!jf|%xuJ$ix1 zQZq?@`gG!R?v03QT0Y*A;u0AV=4-=Ht*4)IN!~9i2&v@>Ki!@gL7FqU`^+^Yqz}T? z+Q+N2TZ>+OxDN`f>Ob^8Y-x#Fg39L55SjW=qzi|e{s28E^&)GZwueWhy^*h_o+e08 z(HyIP+s2BRSB%SCBr@Gd24*+BIR%^4zC15%wEFVSSCyIGw=EZB5vd%VKAD zU;xDzuOwNB&1a*d~|2 zH0T5!z9vC9sjK-$nEbARKehxt4RtA1f>I(}>t6Rxoyv|godK}Y7vT2W;-v|ggJFra&$nV>yC@LTZ%;e}G_|^k;7eC!v(B=XlqHZd6GSg)esoC+NTNWHpXT!N)~PI(i~2j?%fFE`h%&`pIU z_IB5~Y}uafaAAUU;}+@P>jk5@dI-nlbkP-f88)5p%Dm%3p-a-{zU7?Vsv07nI!nO2 zTGHL3-EIeM+xSr?os@{e9YeijNP#gG9u)sUtbIo^Y>ncPW4oV9XLw*@>92=4D6zyg zRD?fw?pZRdEL&4=0N}pvrzWr%u@^korjI6_6Pv%9t~|Cj<1)jIFN*Pkaidg;=zceM zNQuoEVztD!C@3CvI#sHBEhHbdGuv%WtNx5=(Lce=$+>iN zP`-zNRtAWhoapPZSD+@#t;)iUD@-3zI5ZQC#PjUeJzc@~JdV{R%21MmWa3)LK?`PvFAqrChQOwjnwKi3Qm$D%m<@kZDw zK(S*E)-kR5lB-zO8-GsC zmC$Ox?{os`_i42=z0Dy?Y|zN4_zqa`S;dO|0Jz-u&95HC&;73@NLLe^uO(&z4ha?L z-A-%%%b05^!N8`95}z)!r;4LE;_;LSra9zlo4M`j3bBO;|rSRAvLJoOv$zM zFJ896XO||>nwvrTt%Nhaprh@i(IQ=)xXlUB;oev##s1{$*xulWF|=kLIfT8N=-U)> zpLnBQS=fXlh2Sk93%36?Q5y*JOmyAy#QN^cY(tR93=*Mm!oRqhqNgNwYSmF#{(80o z=}p0|#-laY5HM#LkHILV6z#v6mOn13$vTD#@JHO3M0F(l+%!_pdM!7zla`a(;0|AD zSUkVT-_g0Ixk$34eK$bFxfKn##EF)b zfAs2JuF)TSE2)+cALA|)-Xg$12=d`7TFbC^B&>VL(U2sV8h^IhoLx>0)adx^*Vcfi z85J^`BQ;r_Nx&Tj5Z9|j!!GxbKDosmj`y8##@3g!c{6bdGhBn($@K1LV6LXVr$TyJ zD{FU{VVO+=J6lRO?kzx;ScS_YE3L#e5j_0B$nn}>HJEm)pvbHJ0i$^Lu_u_43k^3o zudA<{5m&AK1 zsT}Y;YwXI;>6{+f?RkpJ#6KR^<1$)eSA6*i!}Lno4{}eATx;IMtr(opR%rsd19kB@ zEJ2N^`~5mcy3pK(+O}y27MREQGS^Yg2{858f2ZX@ZUdvsz|nlN3P-dzZEdC>P8Gkf zXd;7KC5+G2Q!3i=Jg5qqYHAQN5C&CR^JTy5)`s!Nc$Pk0YO+OxXCIP=t@Juo9B1E& zQ6?woJ$E}Sb0Bru{V->J<-ryGEv8hE)c5j>U&?hZb@@G-w3~-oif&*I0)-1BCotk} z7Mo<6g`xeu)pAizZ30_PN;hT9T-`cdg|IEH-`a=k0GDL1zAV|hYZ7_S37wS~o1>yE zJ5B=<@M^mc-yc=N72$_|XLLXXAs?EM2Yu=#wi$gNAesnPk$3<>#XAp3>aWhm?s;(~ zTKZ`j-lP}Ke|XdQ&BLCMwQ_#rhe+)YRx7Cwk;?r1h55I8LjsyX6Ezfq^xS}SD_W&{ zd*|SFBZWvLIyyT$Iin6{Ljs(f)ecUAg~iqX3XUHrS9Tj-`i~aCQ*KmkbNk2K?h-wx z+$r+lrH1X8)f~2raGNWULS-lH>ifOmI>hd!lBx3<^Y&6b*5X+I^dIlW!rJh=$yTyD z{ijh6viH&!0;;ZZ-e~@ORhJ4R1e=$%RhR9Wxy;oZVk4=YrfQTypHN@ljqn(Gc=&qF z-AEttsQ>-FkVzrP*@sT8gP>d|Rd-53(I|jNT}xgYb9AA*rE0T1kDtjUA$CfF{C1aZ z;-qhE>?P8nJJex$?>N8#EQ; z&p-L~Ley6U=AEK5q!7f*kpsyx&U-2A{sF-QdFmzVU(qJ>LTflEmjw@wfpRA*4Sgb} zGRM!sRK+DF=LC|b(zmUPKz-lnrb@oK89Ju#yx7{E4M=^dSo3xhg6TIH3YYy9{1T_h zoaMh+95?FpQ$?u1T(y9Xw$0PV>^tB3{WYZfSI)Z8U1=7Ym8zpUO1aGP>2;oTP*qc7 z&)Hyy1)8m?O?yWalrFn1v6b;WbY9KKm3NoHJ$nVW9nyNWS?p&fA&Wgp@s4Ys47)5O z3kWF(YdfocG&>m};t6x+9b@>jfge+kdj`x81_+wq5p3;j{vIiP@<@|oUe#;zGXZ6f=QNX*C6vxVF&^q>WCw}xakUfZ)c+kARRsfnR^{(XIb{^2{gY3x2 zjM;-ySws&m+1BeQgLYV4Oht|%|FVXyr#56?x9xqwP<_3 z_wvhY424h|+maq`R8%IB#^kwE>5QPSu6-Ql>ebZGNfHg}cEk>|HPdV`UrZQdQGpmv zc`4QxCOr^Z+>tUEJcmqy*Q81qsJnw7LGbHh{%%vRa^`4ulDv)-2nFzSiiEAUC|UP! zE=H%1c^=F`lP2=W_JfPo3Kz3?b9BJH=UARPI=`tenPY3gBXjs|KE3*&D!fo&b2XDZ zM612_q3of-lP&tq!r5>1_J%TA`VrS;U*9d0J5GTb?(H=H&r3a@b-YOr<>ywc>}PUC#GxM7|Bc!H+K8_Vtb?XB$9+DpLl# zRMm>2I_VPt0S0uDC3=komq;ghISA(nD1*4GtPx#C3J@ zl^$J7_`O(Sze8K#Mm_8P*Y5tTJ?{y~^Bl3j%mbAoAU2kUf{0WnErC{{pdq4HO6B*9 zJx6&55po97$d$mtERUVjMrk#0cAa4}Ku8Z1JKy7qT*AjK0<+XRuz1zO zqIwkM+?TmW&6)qRtYS3u&5$*m-rW7H1k)05M;=Qnb$7-HcTSNQ2;#xy?3_au+SGKz z?KPRS@nREjPjwMe>UQFadKbpniQ*JMIQhI+Z|bB-X#N(3C;S==WJZUkPH%N)H|`$a z?ImvZ5A~xnI1NXw{XQJ1aL|mjghIO<%*H*y^>W>gGl!el&p{R1@yfgJ<$EtDjL};x z>|(sbz1w^`a?N{3rBJ5Ugxk)|1@sHo?ZOoj#|)J;ysGJ=DwOwvDn!v3OO3Z{L;QHb z3qx}*jj+Mf-HCZB5YxhIq}cR+rtDnkh3eFtlqH)bHTA3dp;;2f{T*)X&pi(^S(7-v zFJ6d!&eD0jwkKp^LF5s~hiInHRc7LgpJdxPp51U$o1#dwf7nzReJ(O@ZW{G*6z@I> zb;LX`l3!S7RQN9wm3pK}B=CIlI`YFNv*}b26Tj3Prp)PfGc}I{@}2y)QMd>E#DWH> z&EqCXJcDzi%8qWKDDkG6#k-38BysbEwIju4> z%j4Q5hv5-uFik!wsdHgjbE(2AY!kD_0gjD$w0)kW034E>H@v0LGj3Csef205g;c?_{g zJDZ$VFF`CUx1n9HcPWuZ8KcDZP12Z$dBD$vl%y< ztc@tcC6UMBxgMZ0RzOGk2BD!h)EMnxbKqui-)(K^N8%@ofRUkMU<2k^;^h;{O++m# zh(ltq={L3dFGfQM3w#;95)CsmU4Csj$DWvdZDAK3TW<9LD>d?=Z{T+aa@`z5@%RMI z#BM#@BK|1SE2z+^1G4-xdiiKh*PKcTU@ay*v+q(S4U2)4Fqp&q7js9LtY>KTDa>YU z>ccZY^TDCAceQw>xxsaKAe>O5EYO(6_Q+1dfNrXi^s$~_b+5|Hyxy@^2ffKPE~-Wz zNUvrKcp!c7XmLaBZ_6UkBiH?hG$(uAxX~sGuX8rS%`~l>b5hz|2jy3n7)dw!4_(j3 zd1vdK)SqWx9Prm=PxXYJKZz(?9m+_2ffRNA?)2FkaGibSCM9_X3-0Gu>>0V`Rvy$_ zI#x$9!Z5v2W<~oL({sf^AtM|C9YUuku9&*A`4>8qJ2UMD*CefP_NT%YEBiz>Bu3FC zsV*lt0yf)!9Xk-sBaX;%KMJ$-q958$-dXg^5s1Ow0qJY4)ZdqPJIIYv zbZ)DU2R{|)Rr$}9jhxZ-xY9ee|K%l53U*6n6K`#tNFL=RO!EKMGmtfJ^6hG6Szb&4 z@t)4K5XjBDri0f@%q41&#R==!TBR9mQ44R)Ii$;3``p}>-v(9oa91f~>y@OaxKlza zzjJ`-Upp}8rT0P&X!iXKsRhB$v)Ov@0XNjjEJ=Jn@3bMn>3!ZydvhwwiP5B`DvdgNX?=Y*9()JqUMp3%w zt{5zueiUZD9a6aiN63^zvF@+Jov!X+3GB!YH`esKTdG^vA9{}uFP-kab+1dc zL)5QqLiw_L*H^vF>m1&jp{ydxQpG9Ls+l)ryE`Dk`GW?bSqr647d(;{Ztw3j%nV5d z1?+kp9ej5~Zxi@4x;ltqZ}0M4QqFq6i& z<>%!DhN&@8jp~!6_)M%olezqQuw&h<>*pfO=49E;Q|=-^;vDUrO-B3E%-3^2jrL3l ziS!Rg2h?^ZAx8#MN3o=xXWExFiSpitqT?_AX^}2gDr#X7-N+uVW6rEDT7C~#T^h(2 z_=dQVEr;ojOnp8G)ut<gMp2!ziSj~`HJ(9^rmFa0# zY?oi28|30GS!xh-k2Ne>*{NX2%*2zOabVnlw^B$1z1!^%$_o9H`*2vINYbKOM3qk3 z2hFz_vDvkFut(aL3N?m4{^U_AkpC0{PEQ(QtkLQX*oCZ8k)7X&j7cr^%gP z^JqM3(R+L8xDTR7m!!J$srOdNJ%w4fci3%ObR;LQ{C0i&Xy5{Ra)0|kcQWZOiVr2q zocLWz(N2eUmTJD<`AK5<`2$P&><#a&Xg5_}^wFkUc4jGII>|J&hJ0=8THG$Wp-z3& z!_ae((L8`|nA2PA7S~);U0=kmbVqHm9-ObGW2BgvadJ@HD@aJ_*O9{ObT5gr{@z1&=NlGM)nirr4(?t%(wyD9DJU9Xmc(V5303!YnXr+N zVplAf$w#PXka2>pcY-n|WHCI9ND5V%@j6Gv!?IM_U8LU!VJdC~x@JCGDsxP}-msHP z%qz^U%NTQoV|QF_lz;2mxVHmiE2TGEpu!=*QElm~y)+ERsG}lr|X6 zWI_BE551I?Hgc(=wdL^1FO^Cz-ZdNNjkc3z?s~nJkQOE^=s3Yo64sR52P8P+7CA%I z?0;gFUuuP(LtYK8Z=+uBv&q&wTz0&V`)LW%%G^h0_i8&P5I1^+UC(yp%t`%s*JPD) zg^XiY`uGQ(&u)8Hss4B}Hp&4BAFtS;Hy20RHI*^OK6i9JL@H}YfEy1DQBUR4+c(~* zR^X@m=W8n+@6EIEkdF9GHjIaR1vzfL&HubIJ>C0p{C!0%1eavCfJc8W3Wh6Mc*N z8+SdXey-?Z!F^@3Y|Im+F;<@y2!;xT=c%%Hv>%z8^~9n`qYdVkLRwzKpHYToFV6>o z!sDT8@RkG=$@VVWTQa%7C!sHsIHB=n|XPFy~pZ3IqG@OMiKEM!Mdo z(Ea^Ss@B-&(CAIMhHFK08BfzEmn-|xDm$^d+UHr;L9#!_hGfd%Xo>7Ox@;HH!MO0o zII@K0M=@J9p1QFW8F9Wpr5)pNXWhAPt_bWjBITRidIUn1v{NtNA_aj>udPrKmGKnc zP^(kXIvrD%p2Mw@%m}{&*|z?7bGJ&4zTpV;2@JPDIWk$&m|m?PmlKFD3HoKKo;t(d z!Nfjj*V;K#nbj~ur4z*oM^x&$5YXI?c#97mG~Oy#aT7RTm3|rz!_=JB588* zO+A62n6Hxu!{-#8iyzV@UQEzkE}AS1A1v2GJ@#RL2mFB8O{>de(eSL1iO733wHY3j$*EF| zQXWfAWpX$)Z`rmFTt$*iU%Ymiyc_gAUe;}g76YpV#W**<3|jccP1+vYUb>?il_Tw2 zhnTh({9L^s5_Tz2{wh+x_z9$wV|VECRfv&>%=@%?qUqdVdv*qL_k$+<)q7H^XQD80uJ6F}Wa*broIs5V^Z(e`? zd41uhgP;SWZm-G6U&Ed;M+cT&m%!8)hCaUY-$?pUyA$MVNA7y|pt9P<7IsI#@W5g4 z+Bl*Y=u8VOWV*&xST}qesNy-^X>bNz=t&vKHdFIDO?F_QzYK9BTd_Oqth#cVRz>-> z6||8+TYE!IHfbiU)E%zv)MN5IO1B-m<9&2$iXB3;oPMCELsC*=u-2;$^KbNq0<%dW z{aV$Pd+P9=bBd}{hFFK}%ssrFJEts*HO1|G)lV1No{&S*u3^YLcoIO7EWhiLf8P0l z49xJz97HA_z2xzo!!inZiAz{`nPi5%?>b7FJ{Ati;e`Xb_q{2YKg2@A61jhme&>Zx|C&PGe155 zF-Y%M;3w>$;s?&hsMRwk;)DaXL!|)e`n`bU=5@W-ZYbKN;_0V^;3Eg!c;sR{$XPnu zup~#OrRd@ClMH{v^7P-PFEaIyN4A%07<(O@J9uR@BnF^sFIUQ^Z!~nYU;=bE z&LMdk$=8kbOIM8a9@VoRdqSxbfUGX#yvIToSj9}u4Q=^xqNk`OZ-wRDdt^Yna>v*R9h z3ng3MIg|yg@eZ?F2J2&WxJ{9NTFtRBM2)4#V+p0e-Lmrv-m2HG|6>bekfk7;Qcg?# z3Gk2kq?1eX8eW}Udca_E_5*SW%izb=iTPbp;v?TB$NoKtUg6T2Rhth?!mpi2lBgk5 zVc;@$DYHd@qcksS7gVtZ=-D}T4!3SQMnlkknPSWE6YShaYRm{!z z)on&BI>^AIfS)~PX~<2#`-F~uFm-K0r)@%CiWSwDk$jDMdHbC3;I{(yK&6aOmqBI~ z$nuL4V@o)Srt(R}5ur8Pzgn~D{r)SA#xL{r#wcGZRX_IAd9>Q#0$U_EPa0~8+kJ%N zTus=o*+aotsTC2+&YjFh^Th93@6#Zvb5jnT#BmEUXAM(IqJURz6o#^{Cz+9Mp6TR^ zT73IY|JA6avhIDzKw;IZyo#0bibXH2PmeQT+wa0ot?Dm=cc!~?mT7x{q!7q{2Q-re zY3rzzL{Gqpzlv z`#ye2`996X+f?AogghPX-%eAC&p5x66SGvghU-gC`XH#AjS2Wz366|rtjlprSopcq zzU0_37PFHG7G~#NyR71FF4y!fWZ9y^yHl^LQJ>=E!TF5H;p(k(H%bn;hw9)pn|7lB z67=!KBg$xoUN@JN`Laa+Yae5&o)Bm9<+n@=FT{ynM&>_QjO=(h22p?if+mG^z_8Aa zo66FT$33-^fj3mya`kAwoOZ}e(+@Wi{>}><+9CgQK-z$|t9sPv!iD@<3}ucqTe!j4 z2lc_5exA|sXCf0k!QVInF0fee4XBTh<5qTzKr}!2Fg!@~&BE^oQjPu!@y-;oW<$gN zM-$?s{+gCdC+M~dc_AqMU2*!D?eY_yEn)#vBUDlro4tHUK$J6SrGd1u=Qi7T!^2(y z|E)#S8MTy5B!8nla1|UKE;e74Ny=<&Y)VV95@g6SW02R_nTxpKP$QgFtyf>>;_?sJ8 zNw}KK&ku%#$y(~r_H0-W%{@_!7tOK6`8_gNzrpiVf1f}!JUa8~VuvSEfjF)noiWg4 z`Okpal=*@$9n)(7eox?g-Fr;)BvXaG`Ff7AdA%m5jDOwZ z>Qj#oNjLSSby91dPRxc6$FA=Tf^7dGTug{P!nOusK3tl^mc@kBXx5z#PZfVZhm5Uv zOf)o7?w!q$RK`qb+si46L4iVyM}?mN(keM zM3UW$FO9oJUCLn$I=>AVPYztLqk5isxxhe1L0|e?q*GG8vsgzy+{i+vimqoS1P>X< ziJlrh7`X%r&}EV#^ltW<`un|Yjm|^)ZBuDCwVTm#E4>x z(zJ-m2U-Wppx%{qYWeg>?$&GeiV<|0`Lx1cCvt zo^+gQFoos17 zm{-kpRMV&6)t$#1y1f%l1t&V>z;uer*Q?h*?BAv zr6o%_dg)4unlQ1kGKlypqafDsup(#<(+R{&yMvazl1w)ip@E5=8FAiUkDqA| ztvN?6d)R}3=;-9+9lwS?&GWS(;%5tV^^M9_rx-YOwor7)IA6dqg zaHVQVp#2!zNxSW$r~w`mtArfrq|bRv3t+r!RWM^L9PhswNjGoqwPlkO^8B;V=M=%` zG*kLm9Y5=$g~kUzbw$(lu(qf~q(oNq;>^s=5nnA_I08rVl7uQ5eg=RIl@;`-IET(3 zdPov{`ch{@3^woj%_Xw%HWAmK3Ey!DMXYl?dfRu_o%71H;2zx|7H+S~L(&j;wU?p} zM2!EHAgwJf;2ZxrkZtfd0P-{t*`qRB1+*!$-ko)H7@JDS&it4k6xG-XrEl!TI9JJL zO86SU=ZZECJ@eBy8^pZ=lm21hEt#p_%(zH3n~~9xM~`2k?x^};N3MU}dogh5gl*-g zl5CMCLEqH%nfTq!AyYRBnM-=#MA*8eaJs{>#UID(TDuHhAAA6n%GgxA)PAqmn>fA{ z)#gI(QsN=<5t|=~&&$uu+#@S3fSq7WLe}pam$T~s8eqr3WRDN=l1IJE{?Xw>U%CuK zvw~o`VLlwdpoYPA@^b3gBsp7FUO`R8zTD+V8Qe;Kdt?7IUcRTjc)6VH)&EfJ&R``nq~3^Cnc+XYQL>3a%%%2*7fHwE?4XSvJ=sO zop_J3(yw!Tqni#N-+Vx`e9@76hBWwW~UUdn%yC#7i<^0tnE09Gj2tD>~{gtJVKvWB(M~Buk1| z{r<3ja(TV|F;tbkc}Y*G&?$7D@lkb>kN3NTOY9Q0r^{P&w2 z_AG1gqGA2kI`*v`(c2_?>}6<{b&D1;?Wwj?b}2U(GLSmjq^RVkoI@jPhq^3kb886(TrRfI8yL}{qY-s2OW7e89R zwR@V)n;T2WQv&t+q5Tt}u}@Az+due9ff8If$*88I(oC>7*B<8alCH1fII=ubkb|iqj`5s$Z?FG( z06^ySvlK;mpF0F_7gONZ-mULs;6a|^PrAOTYN8+B;4N8$9A?Eg-b7r~U28qC018I- z6w^=rMY95*al3+GUfi5_Tx#Z9)|B7qjPtk9*{YBB*A8|xo3Ze`xujVlpE$VM5lw1* z#NZs1y8V9AyYiJP-waO?{y++f5-m6oZl{3ni6itb{VZLc<^%n1Q$o)1C-(Kk1&>oLhx7kR( z!q{Y72tQZ2OxJ@02JpH~LI>)GFELw;_IW`@YG)(jE!cScvz&a(Et{O$lG=~5yKU}0 z25Ryl?bZ^y(GN01sEoU#oqmrThX4EB4PgR6u z{_L3HrVk}7q7F)WY))K(oOrCbb!$(R6e(!NUB7YlFZQzEMg<>KQU_R)W#KW$<#qY0 zredc!H=xQM$&W1_NSD@)v5bgtPYQm>eo1o&L)lizm`Sv`FbRSX9iaY4N->TAJ7Bex-c0vkb3JGb*0Wu;AT(=Uqc_n(o=nn?eVSRzz67J{sDg%EB zoDd!6zu%xJ=bUHx=phRyTIDhTzZZu#9r z@7cK6Cax2nn%ik+w?#5@AgT)uR!j?mQ+`z|?+hxaKo$Yd>%Wr@fa!=wd13cpoO$=4 zE&mUDZy6O=wyl8%OOQZ>0KozzNN@|mo#2w7!IR+b?iNDOpuyeUy$XUuD4fC_3bz6Z zDCF(*x%Zy#bI$1V`u%f4B#nFz9)3Et``aY$;cWryWM4q3nQsV7n^JP7e2fBLZ_o#C^kD?bSE3RSWRX~3R zhx^Dlhusf0764uz7eTHytt3d%apZWfVn@sa&@~}co8(wpUo}@p9QaHD>KOD%t9zMK z_$t+%+mXW8Oikf4`{i1imk6)PeMwb{TLGjnalJ8^fXQXEvoXVH=U_SB$U&yqzL5q@ z-%c{4HFP7>?OiGC=$JK>?WOV^+JExtW_82AsqWE)g)EX|y~S8=#LgS-@ePOQovllu z)e|$(4qAxqC$#$YJ|fk9meD(nA*9gIyA>9sX1g;q9}-AV?3D(uMm%3Xpsu@syc)dB zn_iq_&b-5mi#KRg)q9=mB9xRDhqG&WyKEd)e0*77b=4zMQJ_wRiM!!|7#KR7v=Wxh z?q==+g&baAP-qd$IT2rKFM3tabvRkboLOqFm)7TNjdg>0pSP<80mw>i#rOh6Be-KC z2f#^Nn+R-lruk**+_QCaz?DCa9Bo3Krw)Dtn?n$^)a)xZa705}EZ?Lw)r4rT2#77JA_+8X5gI#S1sqplHcwExLZ1`EEsRoA9HuPM$|tVwEw z3L+N~Oh&0}PLaD{!vp~^piJq9JT{5^72QuqIFv7#DPgM+L4B`?4^{IAkDLD%%BtQ&)mQD zC-&XXarD)J=kxK9hV*B7@lpT=5v_IVe7awaqb>GsHRp7Yaog8%PA;X+ zCZz_t`Wk@X%17dz^pFTJrSjL=Ipob$c!N&8M$VS~HSLWe7*|U93jx)Hj}RbQq3o~Rgp}Z z)QZ25nDCGpO#q-s$|tSxSZx!W^G^f%GC8$Wx3jt(h=OWdK%-@AoK(kk^ z0uv1T=bAlG*NP-H)Y<*>aBjlND$abRoS&*ZM8kXQ%@p%JQtl%@ofR5$F#(^m9}OFM zZ2GHgyKapaj;Xsj3*J=BULh9Z<{BNQl~x5muku=7QM-*@>w1WJwbeO_Z6=3;n*)XAwk$XVaVm?lT)$df~PWdPn>tt(>S*dEa{hA&N-G-V5rt@ z=6>tCtb;+sBB2@3mZsVJ8@5e&t$K009SJpT0{9My7l_&<-cfJ;9CBHQu_HnlZ-HB-6TlY87+Tk=N1!=xn+qtrWqE_Lye zFH?7xki*|Pd;npt3S(T|xY z$>)9u+pnhGq_!CRq|u8Uw06|1hDCBJpTwPtu4|FT_tUG8cE_d5hN9S0BxWR&-QJy_ z(q!Kaw@<*R>7h9TK#M+d>MR<&*#HKyjJ6BlxVM3GO_h$q>Ey(R)hRxlSMRc_)UEVQQ(n; z63AR;MxK~cE7!DDfQVI-YDkcx*Ecub+`L};H&FE}Mi$+%`0TigN&n#^Kb%)G7!1?p zr|=5_+oh;+*8W#GM9t}haq`mbiX#h9&=rzvYbEJoGmj}8PHSbVU#m|Wrm_R_D}J|QrNZ|Y zq9*Wo<}|wLY7+-Q37jO_i}(GoNn{l-O|F3K)PxZn^$SZR-1-M8G!Hcup84fRaP~f; zgk2XVwXSQs8Fuqeb^;#E$){kwli|r633@kVrc}xDu+JQk)0v}}W{Bk^?qP`=(kP9`C4DV(w8EWKQfNeN(0w1`?26kEFiiwW zr*gL>QqtEDGt&lL(Ib88Ew2zbGZiKd4{R+WJ(2QwqnLA zk9lg|kWkhyq8qNeozhrTCe1s6>7iS-X)LtbLvLehrDjLzlH}rkK(#tXE=>j@tIE#j z(8-lo4&x?!)@M47F=n%dqfG&mHM|O6WmA(Kc;GaIlrC347o*4KMBFaR+rl55h5nN6 z-R*9cw2D6UP<$WiI)i?EzwFWcou|xOrMazp2ifz@h~BKO0h()?t@Segfsh-#U!p$G z<7wEWcWzdno>e-svYzpTJ>_6%*k}_DI1pQWdm={U&jjr&YL!Mlmgk}L5p#47c@ys~ zzF&5gY!a5;ff=`IbL6iq1w2i<(4f;K19%f0GbqX#Rl7)l+|VU`6ue{S}}`rwp$&<9x8k}`&!eo z-F0iMJ`r7XulXJ%d1`D=iW#SDy}dS9@@}K8Kq=(t!5;~ifZqt0)dX#+k9wP@>P*Cx>^tA$Cdgbr*60Trn5@?-o!xEKgUycRhpi1hbFt?u1>A=8 z`P{5mY^ImZHZuD<8e2Kk$^75xGBX}{Pyqq~a+sQXf2yD&OtZ$e&dH<(&qDiubvg~7 zy7-L_+m@IZ+=-4jhfZTAEG9MKn_ZHvo14}^{6S6xsEk1n-MbFkYuq`#r_e3a{6;ig zM?=$%*CySV_q1xap6_s@1HDwZ=|cC*NigReSe?5|5E6@YR_;=b@tD=xIcfBE&tJc? z$T(AD%`rdxmWsvN=$GwI_~k3#5q(y9Ew79JasMJq>3 zBMia6%aWPv6KNAkK@)_dzsDqb@QLEE|6wA+due~}7ZA@rldj|vWn>?KkJz23tKE5B zby?~YT%89de{xf5$oZ6))=RG%yfi+rS56LsvO~YnM$_UGV7Lh({CA# z_;w6|GrfV!)Fu0Kp7G${l245vJMmUprE@lYfP2vBcQmKK3VE04L=;)wV_!|%gLt35 z9I6MYdC({F+q`vw*~~w$T}ZVImm92-D7+?y;_BQ^JsgQ{-B)*(p(wUIsNd`9t2}US zq?0KuOr;TD7)Z6Fe?;kGw4O`@cM9>5%5P>0U28c|FQ&LeoT>#tQ-rpB`2$jfgxP#j zEh+f)@1?5mPSDz$67(n&eT7-iyK!)Kl*DK$31n7lr&phC-1)|)54 zB9Li|N%u-3mwrgnYh|LxqIRr4V!&k8l+L13R>ei8^-N%NHTPH3xKtO7>iE*zhJk4Q z7lXbYf;Qo#^xQSJ4*hw-z531g8>wFDXC2pe42XLjnGkiCBnF7v(NOH#QSV{5NHC&q zpf}FXAQ?&lnq+|Q)P8>ezEybkw0=5C6^h<7_*xX;=9okjVQyyA)^7V9`n%JmQo<|=bhN~M2O-%fQRrG=Ry(cJfXNShzdbcWLe%5(h6CcNZ^ctf}?hFO~YjI>*+VuM!`Sj-pUKHnedJ`?R zu><;S?N&d5o`};9y9Hcc+QIqjO^hq3N%-y~Y5{t&;&f;zy`)-BQI8wp=!4rgr3LtT zlO`Nul2IsYTykq*DZyLUrcjF2nV6vnXX z^WW%N^CLy0`|#6=ihk-=ZbaU$&y1X5?b~uK8qvw_g#i%O4?Gn-GPIbkd=XW2^Z7Yc z(&CVe9-H&2>}ZzxuE`c)f$%8@%$ZLV zu<%*I<0<_8jl`-`x31iS9aE)9b}}Y@WmObuG!OM}oWh5|U+!j3iK?Cn>`aza+$)tr ziq#8Y{gxP|!WwU0m<_b&>QDDci#8e%{s!bb;c1y9C|c!;UN?!ZW*r&!jS?IK66w@8 zW_6Xa1dP)0Oo>Lbb?GG>6-hM1)-iMuaar6tHp1akoHUA^=CC5wGE*b0#^Zj*lD(%~ z#x0fCW7{rb>-o>GJ-2^wotL_YOO>8(3cE+QYL4}+Zx`eA0>u#4nh!t;(+&$RLIze^ zTrqhHBz$lw3lTZft;P7)*PenS4WK#Il7h{}?G!*Xbmwg^Nchr4 znLi@lmfoZG0ku@^_3Wu@3L+4&ocbQEMVbAeZH&XB?v5KW3pUNY;AgQGW%Se@h>!lk^D>jPW_d{{YWwxF z1!ukD--qLV5|Qm2Gj#_R9Ax zxB$dEq;xSYPt+ZH`gOrB;VwJ3M^URo$=q>aURJvyH0j__K9NJKhvC5IFmp~DINKKX zr9MT%H&|m6^zhIhZi7H^ni~q-1aS@<&)4?nJ>H-V&emB!tN9|0FdM<#9L0OH>S_k) zCpW%tpjjXtk=pf^4E6KsVmIuN#}>_gB7)YKA5x2 zMAm!LlFpG|;PfVfZ=%z^k!p3fXUQ!(x#lesC+{!6-3QVYO*}pDGt~E*o~vgpZiwGA zTCSr=`GBsw4}k~<_}|HXM49imlhit4&Gq$3oK)yM(?zvO0+)yLfLwZM)GF=f1Ym?H z6FLlCs%4Zstkts3ET-?|8T4HWIa!j>ye{#g04mv>S=nY`#B0b2 zs{4YstYL1(g-QRWFh?$3k>s^2wPrD3zd1_*`K7>U4D$>xH^bIl7gOly<=$0fpIISF zP$(g@14J|KH8PPs+dlyDi~RqG4c}^!uM%<$`I+Mra8D^E#-gk@s#m9y*R1&h#LDg* zc$zxBaaP4~>8Di)q5_glwOC!1y34dJPBjqR*9VPC`}B_CS3@ZXO^Y)OiktI*mfH?O zlgp3zQ$BrNeh%$=q|>$#M>j-rd8tg&oBiB=we_Td@~A}x5hQ~4kT10F>r9EE?Xb(3 zTeaaz_bG(WeNlHBeNy+M8)W9H^i+GawJZ}M{4%wFVe8rYeJMa(tfQ3w;$I?ZcIw{} zx-@~@D_Go{{TVXi^&S&zhr0bJUUrAYDI23PPz;2p-0vA;Ee7SmlPZ>ihWs8e-;^^K zL|W@;ft?(kYezBliX57rLzQ}CJ$vOZp?QL(X0=u|pkh1`OCW!_a_WwIIfCG=62>{QXpU+0T%i86&_|~`mF?QcU406(yJVcYa-pY-BzN<9@gaKYk z>08rX=wffU8GTu955MW>hSKL6-AkX@siBxpX9hU&*mF-jZ>zwD0Omc=RAf2cwvEwz zb^1Y`kxCUmad;SA>!jff)B<3qbf)padmuS`3V`@~1StYHlO^ML>pX;K(wQhW;C2p}{B z3iVkD3RMi~xSxunCBM?Pv7gbWW!tFi0Tlc$dgYo;n}RAJuCgZvImjAQbjRuIuoG7q z&dZveVTBqAH)ZC+UAxOmTY*|8uV6(%!@UH*&{Dl?TZ;TZn+rt?@Ufs3h_;A zHKzA^y$m27bI03vEhf6`jiL{l5@c&g)fUec<1KLwKj_XBHK;ZUJATbTtX2QpYF=kf9lJO(*Qo;?nZK@$eS+jLy*xr$ryigo!U&h!mAUQ{?hPwn<>;v4 zo}QNsG;|q*Mru2Vq$Lrj5bUkK=9cTSSY?qkqtrz_aPBQpkn*qU+e+=wim&yl^;ev1 z92PR#1I-t5?&0uUBUS8s;c-F2~2mB=x2xsC314I zAUi4zde)O|xcSRvAil=M4DZ>_vx2YrbmQ;hmI2tlV87dVsF{0vo#R$Cga;u)owmPA zvHzi0Z|JO=V{6NOvP5e;HhVlY%K1XOek*2hmY1%k)^P9ek-@4}s4*!JNT|H+(?vK- zTwXY`%E=jngq;0L-XEE@_`V!`nw>rmKvxdnPLfFx73O<8`DVsEh%Osst-HoeRA%;| z$#qPIZ_(p=!i0q(Cn9Ey?8K>)i)4e(=YGhWsHL_7!azl`Dy0Srp~0grQSk74iD1;tQnvzCxdMGU*s6)j^BTy#~clI zDPMoX$5I;*T*I{06pSyFm^OT-GnNYoQr^uZwp32P1TqE#An=^K`|rz&2Tyx&IUSv3fl1byw2PKNP!tJHf`CBbIi~jcOiTx$upcOawIQ7~?Su z)L@8ouxI=x%wiESq|GS&K!3^dv$yjpXUX1FJ^%VF-fF6urgbl4eak|>TN&CfAq1UE zY}kbZ`e3!eiBy%%84;u*MNO{PNpOc}2nsZs_DSd45m18o#$&JuRwr}qB#KL|7rU3= zZ7tc&g026M8^s6*Aj|w*9UHIe@RS8)a*55WSEe^>0*yslGL3Z}duuh~N&sG;KpsHH3NvS&0N*s-D z9|RNKc`qJG43Nu~PF=d6)FRvOJ_x&=<(ONg@zCa?{BRV#PCIPy)xZq!3R{Vnk3OG1 z7PxT&w6)%Xr(Euy@ck3bNxO8R`eabPyH3`$4Um)mla~-_+RoJXZ57+1&eXLD$USh) zb{a6w;-+b+?l)(Xszn~u`m?esQj>|Ofe5(5Hu9BJYfyj6rQXdWIq?Iwb<;K6lXJ&`gC8v1pTbc7-W zUYm`M?}kB|3HijH_9lJX=w2nJqhbt+C1NAytSs|1t->>A%!)(J0xmXJq`7lDNfl=q zuaYYVN!Eq#87?*yz-%F{+o|t1Q~E?)a{JU>P8^rct=)E3?1#XbORqX~HqwgcimN`K zk|~9FYUarMl}z&HXmMkJC{_l)qrOltzZi|hy;cTYSUU{!5)3+*PVWpX*m+bzw-JCB zxfkhpD`2qU!2xT%5M4~i&H$@Xc;(cMm+Fc?M&gBU*f4?9W`?BCq|CNj!mM!ywM964 z<4A9t2$m+6BaYSk%nx|c48~?|Hf)lA?p1G38ReUr@BP9Dw1@;b+JH(>T6a&HoHHuab3p!}f(0i+~(8G1X(bdjKE^Q3?De!(W`-}C2AjsnbH(A$iZJF;cPO^H+X=4o2<{AY!b!ltg zUwc|qq{N4gA=Ee@nTKqOmPb_RUAAEPmmDo!hp^C&f)Nr#`vxl)n9q(!1dYQ#`l)Oj z7&iB@WGJxO=73*baLYwPiW9L8a#qo2GZ62$8D)6>~$NDzsufnyu*(0;ta+{?SkpxNie++qI#Me#vy9gCrLj4Qk!e;Kyf}A;1$O{ zi|DYPcsLC$v~D2ai&Fu$#2y|0S*;1Cmfs0vzKl_W~0$Xgx+)?OQ_+kHwX#2{G$$O4f7#o0vXMQ=p7U#l);n9JY9_;GpumS(FVU7tFh;qee8k@ zI}(;m)=}||Zk*qgx%Fm$34Ng!H{dX2xtqBe`J@amr$;ex*UPY8nqjgU(&O;3ZlS zwYNb)IR*zOKvLK~L%(5!89~r=zm`N_zx!HI_4W^L-W@+SKtY9rACW-i>-%Maz_UMg z_+;#r!*}D|OV+yE{*d>B$IcsJV0*_TD#?I6(<6)h`2{n;)|(C668WlsO5^WsC=lh;hP~GT*xmZg$Jb z&78tU47;kiPdj#U(Qm(h@65%vj1N4BXilSk>Q;BhXk@XvU8Ews1N}kn zMg12%$(L;=0%9V1KnpS~Y+$KS;^P0-twdSV7tT?5AF}A^^?_G*^_5-tuM85A2P_8>z72k=#Q(KW`eSW^e_a{BT|HzJ@lDZZ z-}p;?=DuM4T2m`lSuy{*vj5|2`rq621~vzrvHQPu5B}qKuu}mDkbH}RGPE1=Pav1mPuh+HK4YoO~LR5RTr z|7<$QW6FfdMViat|DDSf(7yzQR&5%Da`i51e}A7vMn;Cd3pMnu#FICoQTtJZ7;i*H z(BG(vpv1+;#seSF+D0xQ$rq{*enKGo5InY}YdxqB?B^Do-^BR(#se%b=&BC#0Hnw_r+wM!%u z!%6?k6#v8UieHo7b@>95`uB_f<+i=C*eGE{q}*Bm<#Ye`(EL6SeGp-pI(Rq~`KM3x zKh5as@dptI>uYO?|8((B-%>kEo3BpAB2^m2|C_h@hp#`UX#=2o%egOqdAk0(z9@|! zz~TbJ|84pF;Rk=c$3r<^6khOD!=t}EUG!#PIkkHSyL$iIqyM*G`Ioi5lmHC%uC#mc zm#2&AjBQCRof0|JnD=_rx)2ifuy7;FX;^8jp{ay?t`O3w+KmFW)*pj~b zz|f1X?(n}nUCJqpr%^a({GIUZzq;~K!01n-;GyB9e|fqh|3BNP4;L~?xvhJ@W%tm? zZG;_98AM_%6ANACORNUpKD%u4A2WgUN2E5dPaVZXzkDY~ubz{UBc1jWN~`M!;ut_{a`?huwWV4v+Z~`r>o+)6@Noi}hm4<9d%vlEHI{yI^jRkW zRrbRXBxd_o(JU0ZQF*~j;dFRCXX1~s; zjGb1MDk`euffG&p-6<@urqTs7TDz6bi19)S0Y{0vtIOujFk%%U z%LyHufF<={7O(Y86WzS!;Xro$$g_DByVe&iX`asSvKO~#GB~*-q}lRCl}W#Q z+Im{Jc#Yzej9Ml|dcpj%C@u29bO`oix~->kc2IAON{*tqf%$9yK_EVzQeU(YqYpCeB$LVzwAP=f+n>Ogo2Ps0w_oP~%KX!x@X_$OCgrzL;tFXZB~#)?c` zoV3v*zfGmV(iKZqs|##ol#1)+T6r7@L5AFFYP`I$(38fK#Kt$}vo7&F6Am4~Vvuno;Mh^7 z1H-aBjzJ^b1M3@m{M#N>d0{r{Gcb0LP6N<|+{H+#3Cc)`F-AAC^B}camrrT7Apf3# zzJyQ#$;D9H6wux{QbFppE}e^smN>c_?uMr$?I(gI0c*q^=bb`gH36NB8}k(O5{wlV z*=z!iod)X2kLM98Ycr=ikc-oU37N<5mmiq;C4_`0*Y%qc*teg5GS$C!-`M5d8ZoIz zi=Yrr^PFj}PP<{fYVbnVSm&)&Ql1rzMB*9@K&zi_#6!ilG zE>jds?-^a9TmKG^LNKrX=)R<2e|I&EC3c5F^~+Z?V$K}6*~v?m7H>g`8K>=a9WCS2 zmO1s?%8@)L95*qHcNDoJAQhYIdAQCsP=l+>93Nxm?T|V;^c2>E9rbNoeyn?Tc;1Ojv4o7(cFHyCU5cpl8A-u zYlghswWnGgW~DVK!hjX$1+=F%c_GQaXm?gg0c5yeY+lzp5xnyd%BTMiw*Q}N50eE& zZCnQQ-t(={g_lVKUHsdib#Lpun@VtzC*hZ)(FR=qHiB-yX9U!0Ki>OIl`bNC$Gs|3 znmomAZYSg}a%2jJ!S593ZN<*g8uJpoROzcbT?LNk$IjAQY2A=4I1YMXE>?yKIQ0Y0 zThSpNkM+EqgF5MSNn9uIu?e>%_D_AHX590qVe;0>(%Z$uhHMdbjFG*~Ds@{b$g{Ys zW7(sccGo1KA*^K{{IDGzesG348LHjW0@~mamq~TO@QYLnm_z(d5MoJcE?(tC55GHv zo-UZ{&@C_?Ut{3hs0{1}xhaTtu(pZNrxz{}$f&nc<){v`(&~qK?*{GpXtL`6bP@EC z<5!JaNt|{YJi|IU+2g;BDZg1@=C+=Xca&L?6?X#P6u;mb+G}MCbrxY6rsHy+Lnr3G z@o1g>fZInE2w#mgtFpe%V@sfz_a$>aN35(@S@p2EH7UEPDZG58`E27um>XYg*Wi&z0(82k6pdhFo+ytAt1$rG|_i%eL%x- zecK80+W6Di9N202VcgE!#zBWzMiq`9MH%m|Q*D*iLU;IW8*cPExHLSPvJ4?9qGt!f z)Nn$k$@GtbYkWSVd_UMKU>C=Nf985Ey(8G<^stF##ogm!G<+b>HaOcpD?dx6KVRBl z6c2@@E8pvWD+77v)1o5J01NYGqnSdr_puaY4j|f1m@8ZwBp#q_wD(Yt)O#65n_}lPO@Z?Q&bO3n325&<(tYJ&X;X`GJl6Nfxvb7w-CInH>-!s)+Aqi=jsfI*6|4(JDwhjJ?cw-eH=$Z z9EF3JoQuIQ=Y*w25lpAdMd_o;)!W++0*R9~z2qNzd+< zk7Cvasuw!I`dKjGFalMkAD9v%iU*s*$7lpApxYxNQ4u_}QX)pRmyEfxgfX2VI{MqU zDz_&qCyCYen}_e_U)wcfKlo(Wm-24G8}3x^9PaLS`RJVpqy$mWdILqX$5Cr=EHZZs zvmG%BaPKpSv^_YLVy6rqTHPscp`?)uya^?-Ngpu-q>MO(K{uh9z36K#d-vdX?-SDp zt9A9GUK~q@*6Tg6;Yx>RU-Ew(T`R2mG@*5!^1L@hHrP0Wd~bLNJh|LBn)`L2m=$@W zL2b+r`qNnJnVMgh2Sg^E6)L%NYz)>GSQ6z`l^j@PJvkr{e#Jsvdz>4^!>&tJ0f$=4 zf_agSLA)uRFVw;H`99mFdAp0-FGgal|Rp3 z$90)UHLm$=LhhsDXyA4WGfs6?Iw$I*C^&rBo%8NmRyNV86>(H()AaSs^OtD$4d=%M z#byCCp&JXVq0$s4oduOuE(a)XZ!m~tpRh9Rs7(~v9H^IKONdza0H>~ct=3?U*4I2G z7=!lLsHZcZ7h0TDu!;Iv?N&R#y(kLWtRpXawHi(uEn1o&tZyB%l^srfR_OrM<8|Gv z&;i-2Dd)@5Pgfa7zbd5A!GbOtLnaAgd?VSUU#MN&U#8VJ=<2UMUL6{9@}+ix?2Z;0 zahjY41V@-AG{V*B7spJft&&ugFRYP$DZEv@rH@}2knyWCO=s_OPAVh{B(4o4$UUaH zzjrTEy>vDX%&WNUf8nK`B<3VgGa+cV5^Ihu>^IAkd%^2q=ro(}K;=SbI~4JZp9*s@ zg~v|$F`Z%#gMv})uIBuJt@T2cE{iURDaPkUq4^1`mh{7g&NfFTC+4$;oFcQmw=nPH zakEA{cpkM1Gi&G*$ib;j1i3Ih(4>6anN1+S$R(3+u@O#0W0RH=t(F?uXwuN#3N&42 zIZf$`z}L1n9@?%|$f@_*Uuf3Hh>Iw6kf!7vcXl@I6>_l|>xtU}TB1AJIt6e@s+AF? zmlta&#J};q|aCrjFvqXLISEZaUWs zXh(@%prgcjpTj7f7FU_kTm@!~W7r5{>m6?IG6{bj@=n6O(^Bg5+d zk*|~A3Id+#Qvj^}!zrVS>Ay@8MECP5GB- z^7lut-@WG_e%Pg27>3&S4hHVwIK2q1WGu0>W zS86B)g)_c(s-rYTX}cIs5^5xm1CC#BOyZB5@a-Y1{Pm7C1q#(fAFCBYwBxu`YxzBInuG@3iw5bmP|DuTmZ`!W3bAe1t7+mo?F{%5 z>^A3|G@B49WUUXg;(HSE9cW@)E5&YauiuX>n4AjGJ7P4hBOwDvp;X7q7 zlww}i#SSKQ@_vSOUUnp?YEKq(*qyTbp-2XsYvz8KL3eM8!0vZb&ziM6{wnk!#iVZS zD96zFlzYO^?(`DmeO^uelWpOA?~8#pv2??~k+R+d^yd#H@23R%hAIL^UgKTP%6K-#~O4Gt(n~ zY~jmpu0IPUK2_0{O1GBD9Q5JP`;czo8LboD9eHk(&99^r&4nE5Uj~vC z460nWbz%xE6xL2XhEfbv%SO&V8*EG1t!3Ej)*)lbz_z;f`7#Aw_&r`fcJ;uJ1$%kL zq61JpUCHP{brEtL)+pxA*Mi)rL)#g=>jZ|9@#LSu`1CGP3aO2LOjVgL5_`=@7C$(w zej|=k`4rhu`AN1EwJhaZE&sZws52S3z(g>WJN&ZDMc9Rj$AMRZ&{|_C%D(G3S~F(H zJZNFYAQ|4~?1YmKUG#IknIz|7%&nqHAh{UZMm`&HAdN%%8zlO{yx=47Dg{B|%JBn2 zfqQWioCzXoB)B#J@Rk-9d4bt!q=E8GmA?biQKU>Nz1{y%&1u59+127(;DRFaR*!PpM8o<^(^rX{x-L#HJhgL*My7{}XNHlWm($#o5vS+hF^ zGu8tn-;O+(lX|}&31iOf9UbGGI_)T{hRrk$PUO!rNqt#wB~)mwJMC-&!c2Rd(+~VQ zvz%P%ggRiD)ANSoTyuiZZS^#?QgLC+iMi>!L!V)dNmdVEi)~_}rGbmEEkYB@*h1&^ ziVN7NQg)BVSiyax>&4VSn95i4+GAhLqmoi}@=dM3fc;5BQ@X(k`!2zuCzso`kw^R# zl1V+s*Sws6(294#a47!Gg2u8Gzbd|QZZqfGcDs>ipB5Iyq8~o3O?3|OK`yKnVn%AM z746lvsZN7|zu8?z|B!(j&0Hamtz55Um(#3GZ{u>kX4-&YuUxe6XO)L38KcwZSi)PA z8D|bwWyacfKJ2ydKd^)MCbi#Vf5^W1wS@((xz)%(lx8o z+}^x>y6E#1Q*F6<6t43V?n^kBeztG<`t*|I3%32ci_{P4J}@OARh0FL+Zn9oIuP3D z9k|t{q_~%tp3*{+nNQx5KWro{evlLg)$%FGn`p@+cY=RVHRJV(@aLZ{a*4_`sxPm2 ztL3iwTyCCUcukTCh96XFO#o4Mdd`gTi{UU4M4Pv6suAW@1R0m2Fc|W)8UO4+aLCA0 z=}$nBTLr%cE7;ZtvZA&N&f%omgOpZ#D`_>K*AHBhDJGxAaQjSU0ZsySt-NPz2GmJO zA4COJqr`h59?f$}&|dN!O~+@}T!$ky%$+wq&E~du#3F?3ogZspp5j^YE>jhneW;MD z^1_8#u_eD#|)cQ%owh^sErK$Z}2D30zn@BTsxVQNVc9<(m2l5QO=UBAwrtMwh z@BCi7P;)Guiri+c~-iZ3R#rk%EnW?Ae?Y}D>l{5YI& z_YN26!-y~rdvE`CM#1IUig+iJd9QqD-pVfhT~LMjenl7hLvPOmm{pZJ#<+k~KYFF% zRWU}+S8P`I2sDPDXLH|AKc+n>(V&;&NIa1gl_3;9IS>P(+GAp@%nV}Y6?tR!d<=y4 zk7|hE%_goRiYCk{QSDdb_u^dW%G$U3>lI}C7<1=vw(lo5i}NYWXC%xDKZNjQqN@Kc zG@DP4bDNEjtF~iYAJ2X0p&~f1VYQg&p2i06NdC5+U1%?Z!D|;5A7|gIy%=;eXn#Cw zlzTHTLiEF!)w5CcH90SZu?Fr}qu5FXxh;`Wn3$*YA{kiOTi2u4;aiSI=8)MzT!R0W z_${H8`qyo)nT0X!xP(eMME}7~3zqiW5q7AGp6z} zZkxq#+#p@5v0K62I)RTV-#HrJ@NRKi6yt4w%p|{jb;eU3fVR_Y70P{}pp{*YHdfp` z9)-F!A22PxVVS@(_WD!hY@1zv3p`e}pWHq5eP-Vj!SUF3fct#VUXk>=C{D`WLUD@9 zU9hUErIgK(mFfWwo~=RO3sMFNp1W!&M@*k6DP!K1 zMjo|bd(ym%8)OSh^v?D+lulRFXysH(W24x~9xNrMbqzoU_>%kkYH}EjLtG>MEW`-7 zTcp;TiZ#lM;~O%;lzJ8?EwS!Z-=}l&qc|b&@9dy&>#g1-UK7&mh%8vn^-i<2gw)z{O}y@)&n$Xhe=D-dI@~m*8ndm&xMel?z-A~^ z^E31ds{IDv3uI!~1@4VytihL=d{l}%cLW7xBt%vB95#OD38OU#!8P06BV|SKs7AU5 z)HjEs%O|sFhd-MT4V`Y3&QiFFK)ear_I6x;kP$E>XNno6WwH->eaH60#Ot8V`Y4od zEGY0|Yc%hj+8YCDGy?Z7#AK}nG&dcaFgfED1Z9!&%mgoP<)J*+UI_ShnMkvCL!TV; z09qU~-Re!w;Ta2`ZofXy+4Ha-;CI|B26r4z_qg1-o2DQ5d{Cr#w{sgo=wE!^`q?ik ztmy4iZBoZo4uGML(c$uMs65hXDNTkr zXA_~cxubm=tU>w!!wb6m^(a6ZXb-ru+o;MYGePWaEBCht6Sbu~=ZYyK9pa0(3@~ z%`Q%zZqL_@CF`3oa#lH)eT)a)UAd^3J<<|iqcp-bY@~zy;6@2Ws3xCb?C(&lqOr%P z33g9RG%zmYUk+IAT=11!8R^as{zSk?tZY>MW?g0k11-To=r*F`Gk07V0NWkPXc~o^eJ5!{* z2qYcp?nL`_h-l%~YF42mo4F>rmRoeEC#^Sr+oq{a2xxF+F=-~p zoG;ajlshr?aFM77enQ_JRZ;E-GW-;%zR@T;CYkXiRF|}&XB>J&yc%y)TqNhx;e@&8 z#pfDWO}a}YDi|oebPL-BBG42J_eL)k%!boleu;a_Z@G%Sk`a!W_Ze4Oz{ndqQMtGG zo0DR7b%!{0j5Vyb=dbbUu0DI!m@Voa)MuKjX_ti|TWbR74?V!^$%;FY@6X72bnekU zQr$VUW877I!0#HshQk2OEF#s?jUYw@;Jr+AdgS}+f7}WJXHIq4L2b%K2E5g zoz=vc5N9nPK*WzQ)++Q=g{td@LUFfcFYI$u+D7jZpi4N3b7rj=)P+Eb3(q$+YX_l1 zFH}U!By0QarRI^1ID|U~MWRJ3-%Tt&1hOIVY=es5#@PKiAX54H?JeVn$~F2R`cGN0w1Nb zyvH+gdNg~WhtD(A%+qHX+5Id4>EZ%Xd5c@20nT(k;vZbJogo{#GAR3*hNw?lY<1Ai z-{S&Fy;H;63wuNc^gs3y8c zwUbp!hzfkk&Vt%B5lPc(BCA-MO|X=*He-hV))ENDMAy&5`(<7TEtJCSAGW5Immx)S zbz4(L)kS4&lOS_KFNHDg)M$hc*4~OyaKo-wl5fg17OIwM`&YY6fc|^pw1|)IpP+{eDNyn89k>+6RBIq>iXuiN)w#i=(JkAfR8l+Z&*K%{r+0R(~2LV(aC@GZ~z&N+L(*M6V9_xt<&3klbn zYt1$1m}89l9{0$yyxQGNB4KNW#CsF=ucp7hjG9(GIB;vo-GmgqtG+kIG){bQnefo~g>C5?G_SXBeGv<_q@1gUf9E-EVKwF>@ z2#EE2ViAPSrU#nZ9JaIWCrPn!LR{V5#^wdx;be43nSLyjA`M^hh%r zdjMn-N!UpGVJ z{!tfZio1S5pWjrhW|{a9-)tB2S-ZKh;0k`}YcjsfZy&MZBG}c;aZ#AzD%+mOCK+1&QJGQ_%gS2LY0NRcA?ODCw7~NpLfqt#nP-b z*+|l-^Yxak>+1QuV~tHfurSwkK?q^Jx>V69wr~MD0JLWv(30z?=XbM}ik{5p8LKi5 z5ijQ%F?U;9<$;-`1*n9y7FTCb?UoeVv)jDa2^WN^TZ7^LzV7c*<}tFqCOBXBg6(CC zxNsg692s5`^gmWUSt@*f$%PlWMqStU`H_G-DKYuw5n_lP)pinIC)k43T{YKM=?rlS$GLA=6;327F(2DU zurgu9BOhc1Q*}IU+{A^vrPoeoJv6$nnT?J67p|B3t=nBRaXroX{yU3_u)*9#)nAp1 zlF86;$RI_xEZ*q+6LqX+g89Tj6ZR;PRZd#-p~E2!vvC5%yVSd4NSu>BE`u@aS|vmh&!d==+@yE_#t}i4N9+Cyg#_4Y>)MsSr3-H#!%)X5P21)ZJLV&Ik75-S z6PnZn5s{xZ6Jh<|rmG|urNIhNcM2U5slY}VDxT{;boQ06w!LQ5M*5rGb<~pei7;?% z#hjFFd3Kbl>R4?lmMknzk|+^?PE7ZN{J|0TV6>e~cwN1*|RmCzvb1Q3`cmjcSmpxWk9N{{0*G+pe6S^*3>F z>a-`o-Z=eTD(Y)9pnrz$y(GnBH_JW!8juLF3%H||xkOtA4@8-@ z=Mnbjn;FJ4h0$lcqa{UoNsbb~T|KwT>!!XfR|G!ao(?BE-us*hDZ@KQb%B@p9FSACt)%B`6?T9`Cc2aq0 zoTo^!pN&((T03>??s`VYggVU8-LsU1lwRiJuz?3p#hFWAPRspy06342+!)$YA-gS0 zRV~>eI=NOkdm=i195pDORc*~`vTdeGT}-b;v`kpM?>Gyh*uHb(ZL4gI)-l`V+12y(k0nE~Mm{=8a)5I6LPOQ5$qF2Iy>P%7(LMld zDKr2yM@|d0q^eXaAv|V3bmwc_Vym>mBQ34qO)i=MnV{%{zM2jP=NM!R2b3x1z^*Ot zS5&12R;cU548L4}KIx?AT=d$TtbFwb$^OMO$Jx1y*TG)r#g*S{2G(2ls55{SG^qwo z__kF9YRXT38Q#q5iRXDVHf0GgSd2{Ho=99C_DPiCg&Ll@)62r ze4TgORWpMkrR9#H)Jsg7X9KUumQ}<&DTpoO@$rp(tfEbs$W5hJqJW1V9i;Y5=s)-X z{v|n__Q4JnN5)IxEVfqR2IF4^7PEx>-|z@ z*=>IZDYLzuZlF34WiGH~Yl78$HeJV69ZCCY5s|xwuF=s^NWAwDtbC zBGL=eJ`vE!Sj@?vj6PB z45_;cvEz2$*GJc{p!6F!~EG95993bRxpoCZ-75&w`Dr1 z%~nQYYV&tw+1Gh~uq9CsaesS>zFiEs@+G(GJzv%Py)Go_X4jgzqlpc;qh^zp*(Euv z8xi=nrV~Z+oGo{Fxt)(7*ppff3cYF5Enl<-67|&u8oy|VlMewrq zF%mYRsDcM^LqDzsj3!7{zdn$8)D)KM5;T=6^VGa`CCmH{6Ju(@)V-zJ28)^X+S043 z(T))uNZ)FQSzaU~1mA29OUNMtDHegW!Cn8|&Y9z=hY{C#4L%!Uvc2Eh(IXm*x)A?~ zB}B_8%&v`m@GW4@tf7Ua#-JkLVBa8tSKcOLB`ux6+Oq+q*+saUrYK?MkEZzovd-_N z|DG4H^i(x#+|JS}Te35tTwk_Ir;=&n?n9&`xAIr){7-jJa`D|>Kez^Rti|`~q%A7- z!^2;Hc@&4Xa?rvmn)1vt5C;@-gwTXc{C;o3 zV0NPp#TA`aCh{1|eUJC~A{FKlOmA!nx^h2;<2QK|ax=aC0gKg_B+lmw?cv%BV(iw%voy`S zV#&#l;ARv!>@%?hI2~*AgSsYZCp~^%bNRgXE@=wrbOMx{T>$0W3U7``7Ya*MhsyKy z`-{kzM*t_KrKPa!?rd{kNecvF0GZ~yp8he13i!cpgelwcC>1`NSUN3-cEBZZ5yLg3 zvJM`XPw{U1)GpYQ>5`6f?KPB3MuNAniIxFDQ!g6(d{7oW5Olh zm?U`{;29XIWioia-;u&y4_I{9pov+rQDyJed9Z)v)oKfAS!VMp-}TmB=Ea4{xC36J zNoSVY@-ym2E&~0DFU;(BA;e#j%cxU-xV`iT!n&s2tO^OgH_C)Yb+k}m`y%Vpnv| zv4mRvORcv?#T)S_OpV>KU0J~fi^I9Uolwh&db%%l*scb?vlR91C$x65I&6ydNb!u<#%?kVF#ARRM|HSW})H#N>5W)&a{#ry6*erfKl zyHK%=0x%i;#b|l~1s&qZe)tR2a&zS$4ryDvHB?68M*kEk@vH%&LslQ!D_O2uzbDHq zs1=H)H3Ij(2xZ#~f``ZrS!CD==y3%adt*jsvs6Nu8Lg$=Mn7Go9aQ{`NXf-(sL*ZJ zhK)7|$m#C;lP4)>vLuU%bO*ts_1r8*f6F;7{v^8XfPbH5JbC@#I^fPaY4rBO;S*Y( zVaaz;O;~`Bcy}zG)GIzez`1TOEniU5hUyROR;gx-KWV=tnjYHYZ#m;-gxQUBhCo6} zvjqI!IYSYj(l>qR8-i%N^RrveX1hk1!U+2_KR>d?hEu{BMDAg@?yg&1y;Qr-sN+&z zG#z{N*_t623nMb=Mu+j2#S%idGWs^YI5I8YJS}#b+H8gxw6^3SdBcsSNvHQTinNe# zd`TvU48ig@B{_Y@m;P$t6V&-9P1XJ>;*1^gA0(mfZ}je)s*lC9+gmLy&(+8g?sRzEsM{modh(`dVT1C z+@?r%sZIx`hj?y!FjKm?11!s_7kHkab>sTkU~bYO;wMc^6yR~uMC+mIua;(-vn&)S zW*yehFWZ7>`FFm620f~BMi8eYX8C2r);b^12+@6cNn&u9DzW`f^znZuH34tg8ZvdU z0Hagn#S+5#yw)z#i)en&0hh?jDO6o=)zFc}YQFmbeo(p)+;KQ$%h3FN7e^h+mn^QXe6szw1q=1caJx~JP$L~jl#`^>$aS4Y zz*o__T|5&q*~Phn6yvd#AVc?kLgMI1Poom&Bq(LrwNr!IZ(H)D%mXWG0Gmn?GfCsf1zDx=DPLG_+2vWGuyW-91wEHt-#|;2xEkH65d18ys&4|X7gYw zW)DbG3jmTu;c=M3p+Q;JY?}TDtr7qol(R=ZW(K96OpyQeV>s{&zNL>i+o?FrVd*hI zQX8)e_gj!L>vc_ze~Hy>2L`=v7k;^?a$N82$!qnR6S>${j<_&5ndl(#_wie6xGFT- z-hD&ZkL6rN;BsBnokfNHgE!#vSdW@-PwA?|68-%T{aWk_*fGj4`3-}vgr+Fn&-yIM zqj%p6(9#&coik>b0wOiiK^Z}k+$VlHZAA4oj*Iyb#ug&x;bmWSSIZ_ZMX)P1kO=>; zaq+I_{cohAw$1z0woc2*zypN%_+);gUXYYJHZa$5q#b88*@UML^YveGXl|kv`RRga7vggI>7zjGpg8JAhbVN`ZM{7R;6zIb$Y*Wy}LC#w6R zQt|bCg~B)D{+Aj^&NuNR^jpgbuNp+;HmXBw%=-rd@&iTuZQ*y*Yc7(E{Vf+YG@cpF z?Yv#OnR|kbXqD!l%A_R6Y2>0$4uWSPtqZShbTC?45x!OGAb{-+x*y<2s?E`+jWC1m07K@e46z9CfZK2YVGUAACgD=#bN%K!dg>R zr*r1jDtYp(dN5HB&N`PPOwDL{!Yc)9m-2P4ZqCP55vBWYB&#y*a)q~?Fh5Y>kI#J} zICCCN*x-SLZ&A!{jkKwkPbhXzE=wCT9LpoLb?!z#;4nxf!|g~9J8&}1wDe66m+o!X;>vvgY$wXizDM=evYD^$wGuHc zV}Kmect}cpDTP3uiOyA|<8bwvJ+2oM!Ln>gFDS-w!7>BQ1tq*T22BOrXx_PHzO7wg z)dAH3;t+s6nBooO)U_*Z=64*^5InY%PbbUmMXbpm5EF13T0{KjgC7S<)Cqun72)#H z|4B=MW75s9bNq4F>o_~Q3PmHangL95fYs=7YgdNxus_~hm^M<GxFF zuAa%=h}=r{WKh!dtl16M826@JBg9%ZcPhK}bdb`-zntWOF&k3M_iVSZQy0JM+j|$8 zo0nuA(8CK!`$fJoO?>@Tregz{JC5ASw>Z73_JqIr9Zs_@cyIOdr_6OkpxV}nvhMbH^KL@Szggfq9NZTO#w^ud$wWoZk2%Ds@~-zc+xyJgx6Hf?aY)OTnRP4{ z8VknnH`S{#4jcf;)XdHcvbBx&Dkre_!=K;FvN9r2{)ZmgH+w?t;Jcjfw1tZFZ?hiQ z;L{?5vY?92*NwWpp1-~aa>_+LOc{$H@#J9CWesT#fuHO?o~+X3REf|owwXl;jqmJN z6XZQ)*H8!f;@iKkV)%Flw8Kt1d=ThbZdLzZ5!4x~!G_7(74c)rrdiOl+k!5%C;Anq z7^|gHDk%;~xJ)~`RCs;f^R#?oW&6}Wo!4?aSh)SkZbTVgI7NsJ^GC8bdA7v_sF9pA zuck^el`s)4b%<_9^fKZVn3<%Kh+Ev}#!(6*>C1TUPYH}4BP0@<>ym$UlpL7a>(X}*-NpMY&-gO!4kV}$Fi`Cnze7Imfy z#_KngD9(QOMHasZAYpz|FA#eNRewX;gQl*YZKa&@7r#+8646PE)3ewO68n9UrSbIjLrw5DO}Tn@*?mNL2(7*YV0!cq?7kPi>;oj!VOy}A#bp!#ulz9hkHV>>{w%9d zsF4*iMi@SiSo3` zwOdFJIMEQd)c|F?3FAS?AJup3?XxjPd=`aTV?MEIxaYBve%jddxfb(#Ge~R@rnknf zd)B-5Ve(o)=j#A*Kb;oVFkJiA1FX?#1sMbL>zb2#(Ho>s`$`E|t|=YDLkl4B7;W%N zKRK|#w>o{_AHrAZJ;EDWCoP@&6kdexX4fC+@$%DuYH2MLWxQKzhtT7u`clVXu5+)B zPH|>=L+tGF#Cu`VQZfjK)X^z^VOSq=zlxr}I!&wxSj@qYzdYJ!=?aBdj5SFjfLvKs z{rHsRsJK0wS6!ETz`{h(PbcgPcJ;_96OeBmP0lVaAGf||?PX)6@3AG_W&de-z~X0= zoclNt+Z+~`7OqS(7jH~XC(FFZD3|lb@BzX@#6A$pEsWK?3wob%jIGo4B$SDS$Vc0( ze9%6gdZg2U`GL;fY}5A1rtxDP7M*{o#kkf zF_e-mWmke21)#lHP$?Ptok&GIq4tFX?-OOp@#NpCIl}CnkZQc!+Ekt|pHJ03K$HCd zRorHuqep@#cqqqcU#XGj)GpR$q*UQGwJm-r`xZ+%i~l5DovG2c(MjdGNmY^$vDv@{ zA*JPlbKsUFeTpeyo@iqt-aYJ&R1+oBJlzw^Ga9cVD*z8!C5TviiqTDw04|I{6D33i zF=5VHe>24S17OXf%MnbC4lB(D0onXY64o|Gi}{XhtmhxhGp@{N^0QM-N}&sr-zUZx z7PK?SN`v4MY_t6&5-L-zBdM&ZRX>usWrU@Ek|*C%NoKD&0FuO|4^O+@JVuHlyyeok z${_sVt7?+Om7fkp7k^DHXM6a*;#pgj738&hn@6X2y|(& zBRl(D$X{>}uBUDnXFnyaB68uRaOpJVr)_>(6j9|)92KWO()Qe)aQXuza!h&pF1e$? zh~ykz{u5qxauO*CUeLztVr+}L#;gQK5SL}Rb^nM{O{52D^o}ljIYIolgl5RemW~oh zx=L?dH~fmY-$VOxcRnG>7Fz0SrD+YH2A5+9m#F|EwDJ?+Ae07$@Ay%1?HGxa5O-y( zPcK`6dM1WQM0^)#+uI5i)*(KCsWJi0Hcoj8Vq`rU289v_F6;%0Zs7j4sOWOn2ihN> zF-CY?&W=34lhg~rrnK0Le4;u>VmdUSLNM?PG#?_V+`>_%-8y#{saL+P%z+p3lV-wa z?w$x^gf~*_ol_{mxarUNy%S|(g>q15rO1Nnf&HXQ4IO4Kk*wJnz09axbTSWZwDF^khBM z&XH!~&qE;%%B!aZqRzIA9zUHe50Dy2dXcjy;U@PTk~j0)u_`jthPS0n<|XTT7=Mio zrTrfQmb;BTYI}&sbHLbR6mjx}Wutab2in{naBXZ3#9e})68tv>5mTiWR?ka7G}d9@I)s~k=O{K>5cJ@M%7h!*IzSqdX)>Qo;9~qJh}cDPf2*{vBEFK zF`qj{?^7%;g|e&ta;FrV^gH;DFc|XT4Phu2ke9_0Ks*aj<^#wAeD;^&3&fI9Sm&ZpsoWF1?!Ks0&X?!J=3^=58{;dw0zj6 z0-=au$!dt>F2(&P;cRrD(ef`@v^;pQyVx=9NUIsWFSlb$@oZwNAktun*7Nm+hFysC z0%-{ijlx#v!x|I`{JJEEDAbTaOLBCo3*BziK9LJ> zfM%7z$GoOBK(X(Dfr^QT$Z!vUlFQ9KHikvDV2V9QAR2W(>qE#_EQ|=GSMZhL>Np-l z!9pp_6n9Uw{DqmnpG3hC^W6@pSh7%>f}x58X0IG`y9E@-M_lz6L6O#aZ8% z8`2773^epr&bX>(?pE0)S7tV+*_f5$s5=76Y6^#|$AhxdV|7Be`RUpVu|0u=_ZX!D zduNqbn8F^Cd#TAnnEJ$395#OjgePMiXbsD6G%&aKjq0F_8kcWUnSdFX+b*??ST88c z55)Eo8DA1u;TvX!N~DiE#&<3@TzY5vm3KQVhx>B^Db~Fc(X&4|oL(Uaxtfz{Dw5{C zN$#klLM}EI1a~B_@N&; zR@otj@j~z)yzePXvvXZmCEPVV&Tf#~_YfrRYfi-y+FRlM8R~snY9QD?JonSb2^mB? zBjhh63oC+qF!2q&b&j&dnZDLS8YF&mBY$#a=+p^N4vD+QzOPipmF-IRw4I7`eIB%T zThvPFrcP{OmsZlPN8EFMsGKWXT(;wir(ffSOFVy)y!G?LRSYoC?hg`a5pD=8wHN5719XRW8MOo@83;D zUjXGryWd9^Esp0tr6pncx4v#`%4gsg7lCJ< zfra!ys%bSh*?vh{TQ}U(cdVR0N8vte_acTLC|PHhP))Cjo!$IEk?>N91u<7z6&`5~ z`{7+sYcw-V?J*5Chg{A|l+O%OPZRTI>N6xyYdlrkbg)*-Gui?wS_Ffu*W?Hl5@$_j z%FoSz13bs0J`*(aCrIRUtAQ^@-KRxnw&PN_lp}ml`BczrzfDB!traAtzdVuMkq*8m_wQp?3%J>T^0HE^+vE6#kw_ zUC1jvNPQaw4(f}?^A z1IbTi$vZy&A-5V>e}Pcl4zObEkCZI6jsm|hY4J(0-IPg16 zsjZQq*YlJLAUR&5f3gQzLn)KD`>$2Mdi^wUyYch|n(#FiU-}h%cGn8ON+*DZ(4I!D z+)1218-z^Xw20}P4^OKHi)v93IsX*#XtKlqJW=8DQ;j1V^(NAm98UppPm`q^BrWW?t45gcjr8 zhsg9S_27A8XeoX8cybwEDzBxeR9cnWiGzmAIS7i?ti$Ty8spLC%)Bk)3(cD@7Fz~S zk6e}!Smroh{i(+8FhTCD)i-Dg8TpSoL_jPCL^yspmv5+Ybaop3crgi`BVIJ01NZqM zAM7&ePVZ2HKkYb$y$o&f^kR#Z6PG`eIPL#>fjJh=VOKyBbsBhR-5qv={tVx+=~}Gu z(U!6lckamrxRD)CjI8#Fe_a`0L{AoI6D!OQY(#+G*<{*(pB*9|iqM4Z!U7v#u*ie$ z=SaYWbAM`PKpG-}iqxluPB86KtH+Q7e_=1^;w9cH(VN|Z`;Vnv-+S_VKC|bafHsx1 z|3C*vx}1J(!6Ry{OqfSDACt%RoR;d&$|FgzA9pxr=O(Nl=8mHn)?;aUt|_$Q>LRQG zShk6|)lkl7xaI5#ZG;;SR&x@*`A}Ez33BQlucMBR!GR0;^TZ|cBdO|vdX4M@D_idx zS)0b&B6>iPo*&vwxb32G4roF+-(bvmq%0?#A_h@8oNp*v#uG_w-?sbIqk-}*b{_qRf z>;&Wz9@N<={FwViC_CsU(fj<5Pa_|k8W#9`Ci8w$n!MIicG)JH`*K(-a3^&Y?fJS> zqET=*B51x}=gFDCTdm36LktV7`Vr*IXKi-%Hzy?dum8AlXYyJnlQW64!j8JDN6J#u z!4=x#)D1SNe&Wuo+Jq0{dPGHKmI4|Ty0=<{%T0PqI+-uj(U4s#6R%q$m_C)mC}sEY;{@d}PUWyhNror27#VWd zA32<9``M?qYu^$?y(jU;wre4hUauV8(3>g>xg3=b45PNv@W|E(3GyF|1uObTfHMvJ zcuSNrStc>Z@YqgNdzd$=q0CRVH(z8E^ucYdvu6a!SMNAUsiXyo$K);U&P1?_i{K3` z?YjK>PK0ztZ?Y?7vNYHsX)wkN37F2Y+mClJ+v{b#2_Q??6Mw(JeH6ek^zi7g-)Vhj z+KqGR-7%Xhq&DLQ6gZ6eE0d2Z<#K!uI5CVs{iQxf~e;mF5cUueVuK7{48uo!WPwtb<$7;vz!I9wITbUrTcGU6;do*m;)-P zmbHEJ(4$7}Wef2Tdo3&>&6*eLtnTtxBk1$qN3J_ZL?>sheNW93<`SOX52go*$On60 zn|F_7qx+uLLA1hVONnWHfpQ4ft4+FrB)*uZQH(a>CuBTrr3+@8N{Nih=M%GsGC$BR z=f6GL*+C<6-@ngZhd5H-c8>CD@hUBkV=Jm+ig0US8+!aOVxlN!7u@MB8&E1Q8)MW| ze8Z_(doIjJk*w6?H4fCgZqRhs;+rY8JdM90jv92?^O*RK?!T~i-B$e z4MRwN-=(qy+))1zgtN(>Wy#Q|Eb-A^6jDW!yTbRWVDporFRs?&d=!W@oZub4)~-Rc zS^G7C0Hbx)gQ`AGV;v5cC22>iX6P(01j4wF{|YFnJ$AFe{pIJli{c5~C#)hH`-<<8 zZJW%VgT>Z~IMLcykQ}AOM`|+^E56%oyYRWeA&rrd1M!aEZV1TExX-3Nrv=r+4UV)J zfGgQ%So--3;4V6!r!TH1!Fi#0R@jMiSmeFA18?W#b{Fp>h4)#X&p-M z4*Fw^m_^A!28Yb*7!od&9K&b3NoleKmC7%a{d;Tlso{G|0w;=LaqOO6+SFr zA~Nmm#gSM zEe1~RS93~^m2;DWY>KgeUp4a?sTip$X*laG3){(V`}tv_Z> zOu|X!UwFd5Un)`{KwC6!nJS0k-!^NqYJfuM)`g4xUw}VLE2UbTJ07;I&0l#=?Bud+n;`9kN}nZq{4B^l;mpfoVXu{F*`I)Z@+l= z6_=EA-{^jVaLsxXYfaKfy8gaWqTpKznh#1pf0SwRSf4t~H`x|A=yn?OJ1@Twa$DxS zINC@QCE184*kF5o^zeDeZn0CBjesNk88$AlJ5=kmrd9*C@3!x$rQ_r&xxdy+c!aDU zsP&mv)N3|N|BW7#dal(}`jA~)8U!u_&DUDS?xBnf%!_K ze?hL&C`;{=t;W#f{@_-aiS? z=9_EUqt}ODfGEEfPp#l_b+3|6n&`0A#V$0v(d2Y*emx)FydHmF?uqEW>KxfU6ckPS zg_&{oGINB-Y`_>n6CcpXk*czgLK6S&(0rNGyAZn*Uj*SS0xCbvdN-zZm0!;ynD%Xn zI3?L}u~DHsh25erhK`I_=;>40@OyMD^L3cJWhpQ8m;xQ{-tJ@p~ zOwFiL@NaKsx8mqov2ccftmkfl2BZgq{?KK=g66^w5pdI0DyK;b_b--#mXpBH#S$Xn zoXfG<6hW6Y95}<`qYtZB9ywi(B>mfx8A-!07^eNcQcuyL=e@u4Q*$|&_;#8I_~=5` zJNlN0!;^vjtpia+j|`{O6IoP1=HH`};oe70SD4c)-3}^o_!2#jdubBeSyLmbZr&P#c!^Cb1UHuSDk}K3;CK6UQaCttEu+@qzrZuXkd9A z(wl(j!c`7yTjM_J>J1GtY&|vFSnG|_z|3c8d^tZMM_pK+8m36 zPR4^d-6&a&mI`T5_fHuL6kDE@cAk*`8R?QgqqJS&P!e1Cvt0T_3ndhD6 zmB36kq8%~&T%DS=*#2tHj)Wg&Q*+4iarH!EXBOUoEGO7v=ZZ=OpZ$jT;v>Dsj9yQC zXrunY`S3rDmi~OgNtG<&XkR~0=RL~9k8Gbjztg;1)#~T;c6MKk=iR}(M^-+yu6Lf_ zyw_`$0Pa~_fLcZXnB>_?guLtTbNm7GVp;$BHiKo$vz&IGpHrrd+taC!20t|KdwJyo z!iO!V=i^ShHJmR2g|pO*RDklC<-jvlwKKVKO;$;tYoun9yH(9XQ?OJ(&*(BT|fhpd3Ve01RIAN+>iTIKSpO$;O`sTU@mm7gX2IKFC z)r%w%gDvf>OPZnyV#$R{Ig-RV!kkf!42g7`w9U#~)uH&;OW9957M)Qwb+vx8_!mCY z0qHf{af{bh^A4N(1CA@TLMYEg^PNEntDO;^BBhIasB&)eLYoOQr^ST^`}O7)k0m6Z z?PNcSkYX_S3NzBSYQKR&btE>s^(3?`Y;Wio$G3T-|7aE$eR~qy)VDq8tx4mt5lIfn zWEuPVn_pwm6B-!uB&`y#~9Y3a;5nHyw!9z!H>AxTr#vu=VC^fI_bLhlkJN zOlmC#cCMwirsnHvtjFl}YTwsni3E=RmKTEwRRyIty4yzQm^HZddGh#J=!SKo0Z7A`k1NNnOPv582148S`0rQPKkyE^n5&&{ur8Wb7)7?c z^kFu8?(~qPf62?_efGq+3QJtJ=Js*FxGN z&Xq>t>;1Z$@zp0R&ZOEN6@c>ieE}!KL=nMfB3}EAQn`-sGNB|bhkMm74MA{V z8(U1}-Xwm&y@^l!E*jPxewXefgN)s-Y_ru9m5jY9*f5y#vd+74-jDdg{8#R&kSh07 zn_Uqs4y&9%~vTEt+ESl}W-z?dj_Wa+9$ zt+OBQ-MNS)@o94|A8r;I8Eke#6kcJ(9HxF1ubED><{s>K?6h>qU%mj8f}a7-X*J5D z3fFZ3>VGIjh<-|4itJ*u3fz3d1U&U7~6a_sqyu`Fmw?)av z(xtYJJnT;AvEtIpB6UuEx9bCK^+ML5`s8>=eTS)zHbxv@+>hAxtt5gmGnRbVJ)Y9O zSce(y3zj2P*vucTyTR15+Mj*??ieR@BeK>g#5QpMd-B-M<{|!)5M~&i!XbfCHQL#m z8bc5irK;uls{{2M5;v$H4Y=zkWt*u(58!Sw(`9O%6=!iz4{9blk^ykMUS?ET|_cLb2rG@-};h|wpGgOj$x#g%Hui%3j8KcmX}V;ZF)Hn zy&mh=foaw0H}FC8z8LP#XSncykxUrZ-Z3kSx(;{60VUO2%wB4Vu+6Ogr(&VxhilvdGOE zhWypxzcEAxXvZ1O{hkPmdKh10-Zy_m(_()$lUe(=>o%;j?n^TI^7ul1P^a`KUvn-l z34Ia^w1a79!$>j3Ps?t$?1^*xMff5-PfEsiCI3S!-E?w6UyqJ$^k8$nBD~qn6rEl2 zI-Un!CuSBB8Fm!LTpIO|okO!Dn=E-KR3tpeQ1Cv@Klg5QpI19#h^cuTLfZu@Cr{LK zaG@9C?N~x$r;Dh-+AbaxzWc)PccNTaKD%>kmHlg*Z~Zl2H03Ti6%wDuYqOg)NHJQQ z08Ws~n=G_Oo33e@=I-;7`G9M?;*VDo%H-urMXdCxte~X2-O<=jiJkG5Lpt-fCB<69 zHf}Q!$HGkMU}XDbD|lulb6id&(TBW@C?{f3Dw@ z_x#pLG_qWx-X}{te#kK-On;hdoChc*Ju1P+-AU>cK0!5b&=}H+x`z@O_{2`+b_P3y5z&iS&sQ%#kDh~DnjM62v+Yw`pRdS|wreS2zYSL|CAr%|H@ ziweF;)_`~5%Ps2UjmA6cmm^Ehja1@B^xSD?4mrgg1Y|f36RclVB+CWTxq0ILK87ln zWdANVX_ z!z=9XzEvtnhUDny4-_2$l0Q$N4LQ5UoNtsyrM`#lOj~o=UP@2NQlR-8q!yhw{2rgexxd}1^{B?`7{Uuc$4D~Zc#L5_OA+bjwXxfgaeiuir<#=9~ z_~T|?mg$9`VU+N%**7&b`{?j%GkwX?z_#l}?90=5tgG(HnGyXq4wUEC+6uGcBF_4k zZ{MZ;ReOaxRySVD-LZcv3~ym?=2=3b96B4M&) z$v|A2MsdT)nKA3_CPc`QE~_To&FgoMjxH^v}Wh;`}4JU_1YIRY}jZNH)GyY2m|nLE+Q z9vYI@Q}VovBR&PQ1F?@tgj)Bkub8MU5b<@jZ!vFjPn)6S+*5hQ)s8ZhdnsNe&WH2R z{05~>A7Mv`*G1mK)nu(Z#bV)9wl>w?BR`*h>|PVHzy ztuR$+$?Q_oP5nKCCIc%+@jdfA^AR5}CVXzq{zeXj0so|smomohqA1m3;tQK*y6~@| zRL(?EPCG;%seW)ffvyTW#LVy*i!y?osE=z8v%X0dV%mtV$9j|&WxKvzlFQxMl6kB&tphso!r#eL_vLpg_Uph`r-9inq3BcjgS zl13kg*@ZeD!J4hJ^*t`VrYweTx*=;t&8(HbMR&bK#46o#1&$OX_g@_gQvB1L3LZyr zuJT5MQ1R`|x~=2%FhX*!Dcfbr>Ydc7u}5)JjdxFWUkCrzvEnprdY`V0v1&nHTWk<$ zTp!ks<{zPlg?Q{oMO!Po7%TINrgC;_%@G_1t4bv&%J#(x4%X7H!b=c=hcl~7GpDVA zNN87^C(<8VW7$@yA;SPFe$`OkW_H_L_)a;8D&By^+jX4vdbMqh*pGJ;vbK%e&7Zw# z`}hOk|9I9#X)jL_7`BL_-94V`eJSN#c^joCFmIO@K z_;~4%`t8wvbLCOHElW7rcW-pq)yo@fb;tGE$j|F(GPNz;C>p@#?Kh{oaNIGpkB?)Sk2*byozMJj$0Q>sg{Q(eU_WK5#4wCDcbdv`H~t@ z--NuY5y=tR=$xlt9ydMw)D3AL`HimrK}~(CR47o6px0%qXtP?4P3fPJu{QjmHlhXL zq##s^xs3Q(9|<4cG{qnD>iGZI`_8Z?v#sp`K}AJS5tOb{q^p4RqKHcGy(v|C2k9UJ z3JQqQ2_P-fdkp~ukq$yALg>AP&|66IZD!`oneTnijN|$JUKc+?czB*?x3$;0?|ZF{ zN%5|`$=RHHYUNqVi?g*{LEGZNmIbpP5n3~?EG_~rJh!VdE@obG1ofP(GxEZAsad>}$!6QS@ z12aALeptQzM8sU!RMS}2HK`EUYNrSoQkZM0=%dtZid4vK(ScDkYs!ZqBfq9K347Hv zW(KyYOUZgQHr4To!a?!%-`zts#8Tzs?_|{&oMlr^^~Xvh@I}NqihIWC4^;~d#o7}1 z6S<%{m&f@s7C1rO`@2N3q8?7jZDg%SU!IPdMWv`-Zp`*#FTyU?vn(T`OD|s!IhnF{ zj!)WFo~#w#_bItEd8gE?ux3*}zoswC_EcNkQai(lI9*!NJxT5}&uCx#II7R~*Q7)! zclFfoQx^0hdo2aRDA`PLTHi=@g({aw?_6e&Gg_7~>KLB$6%NR_Pt0%^V+})fQzYo} zD?)d+jcTWaNn0(=gh?sj7Fjw2oA6ZEy zEXi}-dlc7v9#GtJ`kHs`Ht89mDM5=jQW6-|3Oh{4NbO+BYXoQahYZ~38_+-jf7ctp zPfQspG^qTn>4IS6g?HGgk=iyun{3_-5Y3Z*_W1@+XOfTmCMG`-AaNIGnX@0XuYMVE zz89P|p~;b}o0&%PIL5KXsX-lyS)WltRVdQs5EMyvB*XpW{M6zVs21o*`g<%zoMu0f zwl3|K_VgDKJhVvJI#8Jiu$ya2yb!5Z?g@F5cx$-Mi&|7ZSv2064U>&_u~3s%ni$^v zIw@oF5$sIMozsQS}~j0jSW$5o!=oQ^BY$E!o_#bXoPa4+>q%w-x zMZ1I3R|-GW_3wV-20j{li{p=dwDmK`$7)b=>9BOf6kddc_Q9U*%X54Yi(B!n9HxoP z-P7k%bLFmw_2@WedG@D5F0pJWu8ia~a{zQPYAACA`Oo{Og*pL<>pn zK0N)oH*_y3Q`uh@$v>~1Z^i81u8!Y&UI|%nC}nT8#Z71)9EBCVksHj5pDU*kL!Vg`zN{6qQ*U>fDe2HO9i=K;aoXhF^qha z%V||EP1T+NMd+Ba-OMqo*u zNGJ7V`r@xB1UP2m;FGzwjBMrYhW55nDrV;e4m05sE+j+!sJ5idhr@YOgiZ#zv*e`P zLSc%`iSzL4?yL-j7|Dt{qxP|IF5of;^C{GrwiyGAI>!olnGSB%DVeg-QBy{594ZU> z83iWfn}x9$v{_T|4nfaK(ycVGnQnc3n=W%-(-eGr@|$h&u+98m5C_#6?063<#Wt@eH%?bR(SOJGU9|2o>uhc$B!2B8nsd- zPqDgJzoDhnhI#U2je|m?=fYHGeAak!X#VN6UL_ZFTdjiNR8L4zKX9VV-_6pPaM?IH ztIOvj<22j423{9p`KZ*ijmf@lu(JYh-*K151@Sg{OgLe9y+HkrGZ`NTR{ZL9uf4S` z64(+&mZzdUzaZ3DGd;Kmd0oWlE#w(AeRX8XAZ1Aleoq9hnCkzoLIe~zlIF2>r?faD z2MH~4ZmFmkrb%5k%jsu+#W1n7wzZ_bRCAS}`rEfsH+BuuB&_XyvlfM4lt5V@DtoXF zzW6+2&!FSGJd(t$QMUIMY)4{Caxdn*bs#Q$X}iFKzSFD`N(Mvhk8}?m%)}7Q-dAOf>6e-Ah(8BYSr0XBBHpHzNYO)lHGuq2z5Mo)j2jolZ4<@PlQKp(@ zY5E(!a&QGL13%0LIq0)G2ZCxcYf(l82Kb$>j01R#IvTf}%u{h{c9aHd59T82o%!6} zO*lz~M=U@$;-cO@MZ7oUVuaR{qb9tx%sb3K8*$t5Oo%=tuLFs;Vbh?ZxL>^v~i6>Aa$dBIxs0{8AO zU^nP7+VPKU2R=(i@6}=n^Cy01G)AP%xy{v7@}Ey+mh61H>du+pVfMZ|+PMmeT|j-{ z=B=nk+Q*05cyzgBU*OX9DDWIFJEj^uj9X_mI-4rh_yCOAmsc0k29}eUg$bRS zB=Gu&J1Z;t_YH0Pm_~hSM7m^apKoLuH;0xu&OFRESdq6qXGi1LAZ@!s-DKhLL&8Z)g0>zpQ@tW!EH>^@kNq0v{ja&-jiQrNq|J=t7j6 zQWVV^og$)Rtdztr46~hBgJlv3(R~I$hzz`U#rtIG20SW?P2)Sy{E_XW%)^D;F>H!B zCbd?ZGaYPnx;qLcc5Dg_T$$P8v3&RRX?*1-HUuh-N?tme$+f6PeOX0FH+cj}=w25I ze_7*mevcjSrK8Nz!37_fMZ%G8`I{BRJ~A@o0^vv{=+iMbL3P!|9ULr+UchO|(8GDe zymI2LW+V-gF@+RU&H84FBmL3asBw6cYOiqr;f_&cI64GNuyD=*FX^=#4>jAG=TJ;J zPy39}^eIoSpQPPn!#&l{FFTSlouP`LK+6LQHE{-9&~i8bi#?c_O+E{Hw}gLk+E1C zCeogO1^!C&s#l?jJ58rY@n>iAGv7i+i@Oqdm3)(h9=U`n=DTHGOJB1S!$7M@7f1ivE_DRmuzt=87Uoj!10yg}$Fj+~(6Ay%aa=4GVq~N{P;Dwfia&wjj z$ER1{&>Sz5M(?d1^C)I#mL81<;S3Vj#yovZk)Vpe0qqlz(5BC2q?BZyNYh_vLTGR> zdhFjKB#pp!{6NZ-n$UXq$c&^dWQgbFdxyc>u3(c z^=%KFw^ZiUpn54wccONJ^1J10 zSI=o{gX^9G@Rn1{>GX%SE+pG_&m-ywcSY_N|D4+*Awz{!WO-js$S|{ zmQqf-t_3))6|?Z_g3@z-+!6h3ak$_UP7Ixg9TxIEmt}dxa>sZ`6g))o?Whp3(Hxp_ zOOIDD=hSjx(Lg6RJI`*`)@lL)@ldCm!M3`BJQ$#-5X4u+PWsOaX#T%{wZe$T4-xY$^FYz6S6 z>R;bn<*448Ew(*N#T}_-=sP@y&Zp4I*VUj&c6OuW(3BTn96%o3i@whye}xB&*>QFo z?uny_L=f=j8OhD}UzwwAf=Y_$r@##!|2yQrrG51~rx z;{RA1bIE~FRCsepw~qQ&RR+Sz*EGkEPRMCHOTIW~sLzs<-y!{8`rZhfnPj?U$lTpA zC(ts`k8x@8vtLF}OG=H8>l|b-U!J(Ui1Q0z4N&UI)`@!A>?K^mAi5 zOGfOCM{d2#p5x6ly4wtimm+huaksPevupQ?ato}VSOb(|luabDb~A0(lW{>KVoURC zo?&^EsAk27p4-&I2zRYm0YhtcolBoT;VOJ842bq`0sQU|7hOM(w_)GcR zmPU{#C0s)U7@_F8I;t9V{>%OJ0@3|R(&!Du=ai0dI0j%Lj0>c^6;>NLreqtWCzp!@ zL*7Q3vYb`hd}kbm`Fv_s2&@E%^|HkSpQJqjCOyh6k+CmcuMCXf=8z-f`~GBe-DXnh zx+rCiJ64M>K%cRD1)psXkznP!l1nhWVqoVXsd%btz0hQHwn#(V#9-U}w#Q<9wz?Ot z#6Uc$w8;V=xD?e+qSl5&&n$S4NaP&&^RZpsjpF+5E1%;#?`4?Mo&1((MU1Iuo(vOc zIn-Oaxu1G@vnPAYj*N7Eu8Q?_-k4XPE#h+itxDWjRFt4^5)CL?q6;@N#CoRp9Jr=S zXLI)D9%83POmqWrQ~i0CmvoAreC8+12VP5`pX7I!B+qZ#ONdOAh;1X?#CkK;9DHvC zdB#;YyITj)`zDQ6&8CPfg%XpN^Uz`Jn{fZ6L&BTv5Luo$0_ z^3Y^W1j1}HL#8hqff2{tK)$jhsAhZrRW7kow_2(@k*8k$d0GjRe5A5(ibRimu@`e^ z4yF!o8LprQJN3X3n01ON!E--0>vst3;nN(c|o58&#$q3DV*5>?Q!qxI>U!J<|1N?rApO+ZMKl?XA|c zcbBJryfU00=w4YSFwpGcUGwa;QlKfWV}S7CTu1af)?EGgpdO)dlGK9~EiR1DoqkO? zg7MwSB;$ zkT<@mXU($q^6bNi1Kq)VeAAj5B4lf>`0|L0rv}8ce|TsLUOAR6US7(p@azsR9rWa= zi>3trmuRnhJ)xQ&G`k0nhk;1YHeY?ivCaO4WdLP9xx4*{m$#dc+Nza_#ujZTfFbvI z^onJD4+|5f;CoroZnlPdLfj>!`IFG-BbVL<%qx$Fo;-(d#03WC_`H`mfyZBI3|Skp zd|19YB@uESS^TV_Q>PYCUky6~=$F(hA{+V;C^Npv!?r>SN``YgJa@3wgCCYk0j$;s zAnP(9+a85%iKJZE>0S8LqZ>QQLZr1YoUrRI8Cal8qLjO}lvXb=CR}fA!7FOhn8OhF zEr(mD#5C&U&hp~~zHF38?hFlnr++19&_ZeUz)=qXX;@vOf@qyYGMH3$9LHOE54L*P z*i=aVoy9$aHMi8u`Gf*T)gerE*;VtL686GOu8$5BrUybcH2db%BL<{|deV8grw^miTY8kup|@2z94M36%a+39fHzM* z+n^*KL*UbdYntbM-t%zjPDES7#~VPG2ouX!F=A$amxf%dTsu&K2K@XuoLU1Ei!hXIhu4`JY#q zt+?&op|0z@6~jI53a*vX1;?%LdKBR1h0Ajki3xeZwmb@6MqQEpk+Q~Xl9pn=2d zifk+OJ$`yE8!G4m@h)mznp+NWG%vS@E${Lb?ai-r6|&_m(B=xABhB+TVQX0U=4xNZ z1k2*5Fkr=z0lrFQ2C=HlVI42<>yzp6PBTwzS6 zqS=K+Jvz0o2tSYuYg850)83bd8q<|LsRZo1*4qwCc33%3$u1|b$;>owWQ^iqYNX>! zE;$|U2JvnF6rW_Y0>NTcxNqoUG4Gmmy!wt?Y_RHtq{j&7%Zhu5%v?p6dFA?koo2d zse`>8^h#NpUjue~CSvnoy~ii(l521rhZeVGUq-h2oq?GoBPNR!-(xk6_~LMr1-mBX ztIJZ!Tm`;8nxhMs?AETiAecWsm@{Bq;=FMF>bLrO1g9c#bv^E|BjXbX3?;m?lKQ#s z@}$c`f3{ktNyx5U{dqc(1WOZ|D&%;yU)PC&ROBa>8_o4r`i~#K;d|oN0@}enU89C?R0C*TQhbhG=J%G77GHZxeR6UwVZq~8@$FgwX!zFU5T?mYvuQ-0 zKXumCR)gD~m9)k3v0^L`qP)YDSZu9T07U4G9auW7gN*$tiIuHi6%ywnvv0)+sXc5< z6sXJ>9Q>%(lL#E3^)yAP+=1QN4za9dEr0;R)H;`IG~=CP;C|@$Y>2v81K6&h!Y*%i z_8F~@-a3`SsXdCNw*l0)nVvl||8hM!7xRXKOaEJaV$W{Qz>4YU%QkV_3${dOS}iBk zph_M4Bx*KSzLD5iO<KOP|Pu(cyWEA|_T2}f^_0I&&d^R-%(Kar%Q zSupwD{q^G5HobSb_*qKs+iIxpVszr3AnA;lMtaV^(q2wAZ9bFWki5}~sKtUHq@aB2 zY>!O)2ezcFmBTkN{-dsMC)pu9iHQ;po3&UY?@L!YKCr*bkflYlKMF#Ld(Pkg{vnS= zDt2W&gP_SJK+$FXh#B~4PGa<)Dt*jKj7w@v+u5pAi*YozBDMwD_aq@T8iiCh_ubQn zsxG&zob_CvyvK{)yTBXkY9$lP4e8-gHZ>76xtM$EJnb!Jic9VAyCNMlv~~Hp9H$=)ImNZ5(?-MOB~iLr+VHGPh9<1#h$Z>*H(I*&+1iC0 z6jL?X0m-~7jYPXix-PQ0gFA}@L%TjuR}P>;HK>{?yEmahC%2|*;tLu;w zX=3JW0+?iT!F6GrCvrd`CI000MW$EwB@f)??x?A=mq?#gS-Ra4krFv;?rEG)@jTt{ zFE9tUJ&6d$sV(o?y+4QyN0*EECr7-M-2+;Cpl>U{id@o#2#88|1`(Rb?y7zL1Ch;w-+MA zlA(`45}Ljc2VtcKNX@aGTbTYKLk8^7s1rdxqrl|*k8Z*puej-t(a@c_U~qoJ?yH(_ zzBC|9NSaI>B=R}V<((eYBp`IEj7|x=@k6V2n$bazs5^-ydbl3PLL*=s|S&+ohY16gc~4%Pg?p9yC|d9#>;M7d4CqZLg@7QaQ2D-;k(n| znBs@r26ePD5pgh%-;;~tL?j;u|JjQEMN=ioPrnQfZh!qA$^;Hajk@);k@3oHsRxXn zM4(*}92U}(6=#x2#8?-)jjHlZUM)9YVtK_Qb8(Ad{?p+<$U69~8yYTyZqee@mSP+{ ztzYXRqnLb#v>hXp%vt;eY~(jvx>NsYQk8McF9g+bJEB)e2?Z1wujIC>h^=Lb`6|Br z2TeV`+YYgfmABN&5f^L7=NfF{XM2Kjs~Y;pncP1-ucH3fBf_YuH$4t zxw!O&>wpn$i7x%I5B0CRORkcG%)c)^@BH=2B}9Y6|KWTE*X8g$ObTmi=OzVl^ z{U;Xgx79DXes;7R=jVr~Ija0j_aCOjzh3S?yv!trq-6I{AlJ_feSr3L@-~WG_jtXG zDt%6m?$MJ!?$SRV?a)H<063;IoK;VM?wL&Fl#%J$`kbB_qi25NdH()ql<$S_{Z-zj zv5Mak~WC&d3?nf!4HW7gT1$)wS56DwNZr9V1x zo_}Y|T`qZaPj33bYXB!nUZ?)2d&h6qB&}gHI#*$!>e9ROTxOvZs(Fv^5jwqNFTV80 z6aT9J{xlYhUr(=My;5wBw)}?pl10hHB_-DqG8X^oF7#`+J0OWQ^L1E5B8ZZ82mBl? zRVNfAyBUIPe`?yrv8^M~VY5d;XNUNzh&E;}MZ#FvgnwqZPLH|;=+xuf3{0wiP3txs zy5REgA9TULFPXdxvaVsIi~1LTstXT~J*WEHT7T-+f8X=3K2?z(=OLdEj{PxY@~2Ja zl`q&1dhI(e|LMCQKh`b~Y&=N+ZswnwU};8R8`-9DCjQ|#GJYkY6>@rD-nlQMnkmaX z*P#{p;Pu7ks@r!M|MkC<4?!o5ePV1m1!AW^Kz+a{KH#YQAqi4}1Oi3w}55tCzrrf=Om`r+D44?>LhI zxbb(}=wg_+dF%dOZa++G0+es_j)S0uK*=d;^C~FSq$JExzhQ%iFZo`)lNYgOo$yn zJXT9$^+T>(5W{Un@N#dwn9iL(UV)6Qz?mZ+K4DTd#Nza4>;VQ5B zhY$we?=WP+3cgLd+$!2;qCz2@UWC1xzLlA5p@+wP!pFU;4mZSUN$5jziRHs(j+l-S ze(Sk_Q}mH(&7o}|V|dIYEFJ|gCm6E3J!!ZU)Qkvo<24CBx0}!{{)3td#vzhe&mJT? zZQCx{59hJeUoH1ipyG>ni_7Z1BTT|;ch%0pZm0k$crB$fq6ytH}&ol|JnKp0EZ2}Fn%;Q~h=ZGNr+u>e~bY7)2U{%|wbcYc^uQyEsMsMO;VTa79Nv zuSJH#M-H>h6G5wnUis0TIGzW4Hu<`J>5r7qODLllJwAKNb)zi`cJ;?6N$ICAeK_mV zr#07c>xp~RcWoAdvIUnCC#xhnRpGkauajJ;)iY-P_q#Ns z+i86z$%5axeXkjX#JleWCOkU|$QCDz#4$E;28Al6S)#g|Dwab9`DBs45qHg&`Fhbq z%Y)fmm_78~uo0*raIlyj!3;uPan#3;A2%!5RByYl6^f&3KHe(_=pHwaU6EGuC37+TCdBAKDJ03tHC z)g!Sbq+ZyYujk&2UQKhp#z+OJNuoJkBa@224|Zo2ofA2t2P!{ipqOyF9K)r@-KLh| zP>I!Ik&j~>t8sHo^xNNHZF_ekgy>A}f!4<`PDc0j$9XZ3iSJr~We>*~4wT zHRaxmyuWvcM=u5>(nWT>gIpIL%r2_3l#_UQeZ;y@E-Z#?%z&!P=25e0F90OKmM(>8H$4@H_k*R~ZY54p?L z2OIKZPWPK~jp3=MWs8W*R=_q{4XNEz#7|@O$7)JEw#>bBu|qtzNVqjFElH}vZj6Ot zcL}8#gs6Yw2h^Xa1wRD~dFHPAJ~q{|SEk)b(m7g%cF-Wy_u@d1G0LOEo~@ecRB2@M zT@nn6d1Z(AZrnMVc}$_+X!sAhjHtY!>wK%w6(tGm8z#9JdEdjOM|)me7DfQh7R7e* zZZ2dg+%frj4mzy)Qm@G29c-*xjWwQ+ZPn|Rt#^ldlow#(owgTMuJWy`RVF~z^s1f} z>#5(bJi#?io>*#h!bgK$t61&RUHjhq55tf8ys3s-hPim|S}{?tiZ~y>c|*;w>T;Le zLdWWiZN`4#w}+|DL!VTd@e0L6suB#lFJu1xpTti}{M1iHcH7?z8UqUsl3c2k#QKwpN_5I*z&$79C zk$Y?P{L8=Jjso=9$$U-cx!F(l5@1GQdZHSo!7vm>_qUUHXRrDRn8VZytlb)*uy?f~%(C?p zVFkeRcWHAnJoW$rM5F0V|m(rq{5pzmM-ES1-H*a;as0YbKSnFO;C_Plsxj3tMHVJ}8|! zyCX2}K=7;FpU>Xc%xl~}?OKiN%Am}caCu=_4}_HsTwCdCw)pMZX7l<~1z6V0o4qXz znAN?B_2BPi?4zllrH)`kqjfGzQSsDnDe{TJ1BQsDSpjdi&0}xxiau$uHTOq;<(g)w3=pU-#hUFW8>uyT3Vz?rF!9 za3g!->E38rTkah{oVV9d(3#6ojx($e*o&SvodkkKoywkBHZ^AZ@!F}z-qj<{z-`w4f#msqXFTs*mw=)BUcL@Y@gj@-&HM>Z1eci) z7s0(fq93hi$nkof13WFrv74S*f6#o!E>i%y9*B5~8sXmr=B7KV6U)OW;5#a~G!9Qh z`j$sGN{6<7FP^!MnkOxWZ!YIz2(e9%dx}!(^8J( zCx8%*Sa{EU!q@t~GitQySnCs5$jn5gBN^_HggLE!?^kb~@jh^MFZ31Ns5rfUYW&c9 z|6M<@XXAzoiV)MjjMr}P`^}+uo;0v-9yz;?Wh+YEzi;}r=V|oB5mW^0GAWs@y+Ojj zf;MVQ$=8tc++MWYTdAk-^^;DM0~S6*W#+4!SQ^7{=>#iqGHVNf@+22KXy;-$~JSETBBTw+Qh#8@tKsDA*ZbRv_?m-~&S!vtYpFk;gK-9Dv2ps#PI^P^d#cVVll4P*Cjp%7T1^28*;)XynLLJWYg$t^$d6x@>U2kalN5r~y zggmBSGTp{oG-TUBQMiS3>gC-+h7wdfx=cK93d%Y}=yQ;<`VJIg9^np ztJ#Z8lMZVKwLdbPm?vZ%JbS1!5^+rtQJgw=VkcjRUOrAmU)wEunj!wG1Tvd5mjMWDHmIib%-<>qKkw9%lCmxP4_&5_-Zmz zCtlDoN;FBLzO;??FJj!^1659nSJ?zYSb7e$loosAKq3A`?NA+DPjq^JV`dk_hvin} z=v;au55ir_rAsxsl6H6;R<*mcYL*k&ftbz9?1G7I?OKq-0UbLFNyQd&mB-x@jE@An zJ&e77!(Sb20X9+4_1f?rcdFl1fl*utjauEoLW-Xona}iV+0Dr8$Cb9oyYakU6(T3afL?PR`6L-%uUoB4EF^Xi z;&a&A`p&2ZsIoY9N?JRO%-amy$L0{-e-Cj=u3kKDy9T|8+nepx-X`K6kTWg1t1Tqt zwi?%&uoLzywM*w~I1LX-yK}pH!WU#u?4j%Nvc1=X#?Vt`%FSna_#(X!neRbamezhY zHScaD@I_S9%@vSt<<1kGY?|=E(-cu#CudK$c$8~D`qgkuj({-KAn_91IG_R4+nIyA zBioXMoC`b!bf2!p>XbG`yW&8*Gi2vv*NoEOtbO$j3$5?e>ae>s$W@Rw*>teAkZBe+ ze_X8t{d8)uoE@SWfna7Q5W-uzdM1OQN-Cl9*~eVY-xlUyInu@2j4Y>SKaBa!LnR~9 zJhmt|4{7X)M&=N1<>ZNO^tY$a9_*(_c!xXR-4^)_^shA&SWxFGDZER9Y0tRnq+4rx zJ@t)l&koJ}0^O%Q)j>KFZ@W(a6hD$Bls)|jd@vcGc0Q|T>+AD;3 zzfAFo!EaPRf1um|<|eDfOT3&WeHJx!NVO1pIVIA(E3v~Oh4lULHthz|4etg6A`Y73 zd;LADf)hT?RwR)@SEazRG%Jd=M@sQM_N>QeLai)mIw7mF4>wgB#~7YT0u0QN9BS8r%6Sp+T6+ip89U9W_=`p!jI1D!VUp5GcA z=(sm;8}UpmZNm$*vKMGlcPM-E{89e`MhM%D!YV@MAa`v|XTD0TUK?lAEOc92%wb%3 zGe#5TJd&i;=on6v^4nPICju|T+Br>&lL8#Xkwhz4WW;Sy!+G^iMl3+cItmP6v+>Xr zC%n^~fL$NU<(FIo`?t#)jG{O9onpbh?`f2dW+IZ!iav^fD{Gn?Bh?y)ku& z9i8w)^YaP@jWk{asK=Ws=X1lCZZbU*?Nk`gcxcD?I=_qXAd^f~>+kx^(Jfc>xRljE0m`oQhxy5$ucek6lhX5j>kgL| z1Z?Eh^j}fYS03P*#^bzCADn(5lM0sCv9L46q^3i*e)C_kJ)g|9-{m7XkkG?i}iWJ?`(gjeA^R7yrb-5cr#^ z`nwtM?&ISK+qa(}=uR_| z_a9HT!;Ra||8ehu2M4?Yc3PTAO5|_1&;Q|aNh+TOVvXgNmdT$2$Nz7z{^-a58>~M~ z`u}oeHMpLdJx?!wV6QxJ`zM#O1|hK6=blRt%@US;`=_#vUrh=*=kY3=lezsPR^w0Y z1FLKW*eQ1d{h#C3PE!Ii@Od-i%HR98e@v?1Y+i%wF`~@PE&OwaDhwE^CmyPAeu@d5 ztU31bsHh-6Wd~jW$VR_NlTTi==p-MuQC)P+Q*Di$9vMEh)@qSw33rTakRlY{16 zGl_JMv3kFH&y{?4Z-gjrf2RMcMbesAGV5c1qCfu}k$7)3Q(3iy7s~sW_K3?}W>ftl zajWN#q5K~oGNL6>8@VQ-Gd3p~qxAZU6T~_PxQLyqLnU}$7h=2i!y(-Hkn5gQ2?)ZiEZUM{`j5 zX04qS#~(TGtQ6K?MW;!Jt1!z&JDrkQs?sW~(`DMMcx}?16uw;6TY)MibMOMvoUD&)9x`(Yr26f1&$tvZKeRz({JcR6vgmIVRf0h zCk;IAtI86%?NWBl)#t9$uUT21yBdG?JKA`(GZ*{xT>xgR0&N3`V2!pYR-p^%qh*er zzEm*(E$V@2xvJ#aD}0}+t3JLeyK`Gjno8h3~o1Qq_2e0ByDy{%g#-5%`|^Qig-9V^v0XA* zU=k)hHd+0^#gTvCSN(r9j4ThpCT*m)9tD=V=N#YTQ7bQ1zHU|qQ~*%dxZut%`i8H zTh*XGWmwJO@;VF)sBr72Ecty3S10`ZM~bFM zXQ)s*O@~&PUH5p0u7taql>&CNUog{QcukY9qSLf3r8A1w`$ebvL4uJFkF?lfPHurZ zDRsXZv+RfUa{^AY&vrIDIK+B9JhJrm-*{rCp|PK&mB3Nz-8c0r-W(3qvs~)X7QqcJlp#?jJu*q>36TSBZOe zKUO$kl)N=xr}QByop7URo#RY!jwOOc#eO!J#!7GjRPaq@*D9E$?SMRD@R1k8tU7-^ z`#y42h&+)pTErQPCUR1~HAr9owV_kadf0sPROFjm-H`eh*FSW;l9zRwgZ6EHg8#gOBLMj6|| zG;%3*)t@I0$Kr~<2-uI?%tZKAaI;`br5P7SUyd*J%^3nMl6(I49gK$z<0+0lkPh{2 z@MFF6rjEW+)jPECO94QHy|z+W1dy!;*<0W5UkEy0sm3inpk3$=CM{gqZcM9EQxeiq zND#f&&9S9a#ON{4z)p-&r1zLr*%@`_`SKE^B?xk@lQo}Kxj~4>_?OFtl8P*E zYF$vCDZ(pxyqWHkn0wKWTD6tYtR3m=Cg(#fCV5D2bY3RpjBql>@6RapHxh?+zP1|3 z&EY7h617O>+?Ya($g1Rb^49B<9q6zMwWC9;DJ=VFl0kQ%70m)Y}CO&NB5vT zEB$IWm}$5tNMrCro;^;_fQpb0qhyI8ajuulU=2UEc! z*B8GR!{h0|fh&HWvfdyO_yDl4i)sh_W`Z_b%g^in&lFBip(`{?A*8OiGH*m8P12#;4T>k zl;;R>fXr?cVs+2s2V|k_a%NYPF^30zhu+BZ3_o^Ek@3##`z>!)iZ3@Jz2@UBTn#vF zu9BbV1k|?%6tb(dm82ZQ)ghH?2;4CM*5+V*hu?}OK_}b-Z)rMQ9t&$d&+S>QA zn;mpk!zDm~|L#*EY(jx8vI+BMPdK3|s)IGf+o_CE1`;UUrwCc&ofe$^Xn-7CrtPAg zAlBXpD01%qbi4b2O^t=x+Sl;)y>CYKIMnLyAi!Ygayv%svDWO9j#GtP%YEpYr6haJkbq|S*ox%dG6DQB#W*wu< zgBP~K>W<(GV;1jig}kPMs0NVsaNClx2YPN1m0}{gN;QYrN+XTM6udeqO)yut1rpx! zeqwLj%Lop|yd3w!`vlcN{gyHC$roE?JL9DT(o`DTN!yGw&F&U+?RZ(939R)_hM0t0 zQymqv!}Q*SpO$^yM$F+81Pg-awG7#!g#@~Gt3QY;xe(K!D92dQnhIPTwXKCNcTv5v zX9cp++;gtJPA9L_ZU`PjUw886xB8{mZ#gE-62GJU;&jjptZ+jBcDuwJFS~yi<^Qql)3qD- zGS_ezuTK%;hu@#WXy(24eA7>{B?`sLCkcXT##C)ybM4VqUtaaHoIB~vYMo1Zg|Vz# z%>O(du+YMCHTUy|Qtz42jM|jS0MLBn$|;K0=3l-b0xp4Q@Ja?nD{<59`ma%M355zR ziUC`}Z}a`JGg#a-pUk@&ugew|t;=a0__omHmp`+E%DA;fNz1sB58r!o`t2+cLA@7A zBSYyzegpZ&JxP~78P;s~agU(~404R8&MK1^+!rLSeDYaeqwJYVsXZQ?Fr+{>J#Ksc zn$;D7wW&C|kGmu474a0U_x7IygA%veE_3-kt3*LiyIc|yEaFqH77oNg-2*@lL+z(+ z6nOb=RVuL)M0hOUu6D!olKXXhhNRs{Kbw%S#WhXH9pdY&>+p{zSMJ*^Wk_IBa%S2h#eOJ8)>-#$g-2Ry zN7QL^x#Z6dojk>@o-1N|qnUP2B;$UEORVr!cafHhvua9RTI8uv#M~aiHdu0l)*XW< zPM!;YZkn%4S5`yqINcDCPc!Vi2#e=iDyAJgn!eCFrCZr7yv_A7uVQfFmW=aqf6IVl z3$esLg{cP-{Q(wR)+4H>x7kUtRGV_X;|AK|K|qoK)gh$~VNs@O*Lq^cGYN$!LFr>C&EaiGn_1;t3bn`B1JG5l@ z?P9z2$*0TtCyXB#K1RdPhOT3-LR>3eLD9a8a*Z$Rl*EUppQrb%{y(DLg00Fg>h@MT zlnzM|1f)wEq(!9u(%s$N-AIRQIt1zN?oR30bVzsiSv==F@B0O~HhbS|-gA!e8*^Pu z4)OLC99S$=t45IWQ|SqRYjUzwNaI<1Dl)M>2F=QBQ(nGthUX~f9QSr=2{VGnFiYP} zP^cq$zk`;?DJ8qnu*%9uP-gR!IV{)RzocUq{KJR&!gnsZG@K-_2v*->wS=!pkZEI- z`&Ceq6cyH}U%ivGyXqy2A5t>6yK42rtNcT!HMD4*QU+%?;g;I6=0_e)$I@KavejO9 zJ0Gh-1!w0%re13?uC&IJWNZ%#;RH#^BM8-QF1nia4$uFaV@S7$*-ZmWZFuqI!BB(6 z;@@umszo*m3{<7N4|a;-cR!?{n4(!?lH)}sx$UV zk_BIsKdeEty$TQ~CCy*27ud4DvTQU_5&Y(}58h2k{O`0i`s4@S$Sp7*F(9n5FvBPp ztFRc`JQgvv$z@pFiYj-9<+B=o@6-Gts&5JC2Of0oUCJ+Qvyx_M`R6}`(_Gd=k8&p6 zEY4qtiLQ8Fmkfx5(NRoueJCwU)RokO8fDkvCymO#g<@kt-07~Mz*7BbR6biUioqh_y~@)B4Auus;-&+ z$I_hRIzB($pZ2|vP=wk1vsjRqANM_Bs;x^K7p~pLL{RVcy@2sbnCP->PM||cNv5vK(qNyrP_aSxm^N?irA>L+(mo*zaKK3g`hMTBnG)y) zZzFHPHOx?}UJzNUC*}84gI4#O62YBH>y8@`pE_cwhid>2WO%Raml5q17E#iPSmCYB*RZ???jcNX!@Y zQrkmd8tY6vdJJ@?ekJ(Gqlm6~~sEoK3%oO-nRR*5}~e z4-UwlC1<+m$t9#;S;CE*c(3sbLH_~0iSv?91ry1Pqh;8;@?PaT(XDM*Vq65%&OVr% z52bv%UQM3%1@{!QlH6*#-mHyt(~l384;*BZMmW#W6eeDS>7Xk2ou=Qpecc*x8O zB%AzU5%K2H`|)zt3_2q_9TOP!8Sjl=Xc<~<1=`H?saF&p~XC_t$^g_ zYs*H!<4V;zhNbY(pQW4n5*Pnz&S7}{9e;>0lSD9K)Bf)xE1qW&i|pm{#^@&nkG67l>l~29!ATLsZO!M0%y>5 zNf{%GS@X1aFh#p*9SAZ#ji1*&vNYYVA4%5Y)?k_TZ8=bdiU=&5T@8sgD+|4@j1e_|j4zbH3*}?{3lfwWT@`6h^F_fwX;_ zHEm0i^KnE-(f41H7pq%oZmaF;J3&E&>5tg5QuYXZ25`Sl_~>pmih4S>K^1Ia?0Kyoxi71$ujB9b8&!1z5VLv^oC zR|+ZWX!sx!w5{`VcYKM7B@eX-!6z0lD{ zQusTT@0N=qahOQ!r+Somc%RPrlPXe!DjplqsFW*z25L7Zk5H~ zkRDzzgo9mA=(v~&VS=s#P^ii z%Js*p<84|z6^ER7wp2?(7?&*Z(in-3;3r7-+SN7x-{9Q8)*l>E-JK_;;~dES1gCDldww4Yp*B&r$8}TW6MU49IvFC&snmM;Wy-@RfQv_PItmDRpR;B%@|x&8haku1 zhk;z-6ai|5(pAecfXkYP!llVb+a%DmX5n|KspEBOeB61Re`5HxltE4dFwxX!z5XTT z*-Is!4@ISN(RAg|MnhX4?U#;-2v02od}DyV!8QP7fs{OmN7@(91mq}dD{D(G(Vwf= zhtNDRTMCjQH!W__YL8e{y1WaqyQu|-#^)U=mzKtHY%6X>m{^V3@ zJ&O<=9Rkhk?SqAw)#d`O*;F#*uq4+JTC+D@2*sVvVULg!Pp9!Ep1O0~3t(Uw@crh{ zJd^(ey2z1$435BR@oP@hdE`C3qJFfQMztG5-be_e=XcKDHs@lmYO`r!^kFH^AI55A z+giWjj0&t9cfZEr-^wnBUDVxI&<4nn8JC?xU$j*#xXZJH9PvNTqrAL4ZxPzK{?6@8 zHyKBwWZ>Gb^1R_|G|m{h`D>HRW$PiD*T8?QB{O>0RHxtfC1-*rHtsi!^TJuZciZw_ zB`rjPsVW0SRwY45T<$lTKQn_Gv4w?>jrQorvoQ2^aipEUCsBQrK#sXTbgo`?G?}_a zJ%&ff)XicdIhvHMDIDa+;Y@_Cy$&%6)i z4J&1I95zo^?Bi!oN;)MCVX#BMuvvS8Jb z%RTnbWwQ^r_QP;dO~bx1(&QJvZFRt(m9dRzv2Hre|kjx zI^@T_%^tRbvZ<$afI{Z_-8Yp^B~oNc;Ju5E$=4Ev4d{KcQS~?U5dFWsJSzMSK=UaP zZsO8H?C{6w>UGyFTwC-fy&3&lwU8p^CiFHloAGhAe|$7n-OJTcUt>oUqqs(DI{J^!e?Wa6zxD`_o^O=-*Vro=qiIc|As^9l?QGC3ajSpml*~1P z2jmmB|DJXw40kx}qR07L%s5ZW){0-)(Kf_5vfX0hFtI)mK{U@l*i?Lk)3MbhE1TlJ zi~i`5N_0xapA(z!LTq5LlhkX?%sb}>EdI4$j@kX3)^gIs+sS%7%JeiZnP|dpe}_v? z*%d*|&3AN)E@h|gEm-Yzy9F`zM=ML_lEdvEc+h!xZ3(Z^a|Ctv30=AJPK3?yb|%H3 zUaxK{>(e5K8t>@)ej7bhD%B|HJJFiK9vtON5MMg1%bck2T>RGNdCyVkbf~_lI$q4V zb6U{RD^PcyLSauzdR8}8X3eCybw*2?wIc=t?=MK-p3CJfRl|Gv7JXiOURF|R6H0#K`U*d;w8KUd9f!-a2|M$R8Fg_;s6BuHaRS)m zl94x*KYWP^;TY7Hvv|YDW+kN$C)vc7C#X^~9DcWnGl|1UqF5Q1M0C8bQUXWGjHh>8 zx5{d%$jGUsCD(lR{zA@L87fls3s=)6QzR$h=v!O)dSGuML9w`03P%^_H^+lHE$3?S zWnsf`MVhnkU|qhc;5N=xSnxd z43~vscPR}BXMOt+LHw?d-9$qnaNeOs;jr($_4o`*E-$Rr4w;Mav(`gGQ|ILjC2h2b zIBhqLQ!O4`yyO0|UV_pMxF|g0PlqL9f~R!f@zg){FWAijE`4B_`Mh-U{2j!zcsj$I zA%cw4*VvNVUe|{muSdD_RXF@XVk!9VIKRlM<~ zMYP6_869)U<2>RY3Db|}`n!{C^buah5|zOY`xG0DHfJK`X!U#Pw}Ir|vqHIRk>>N2 z#x=kCLVJ1V*B!E!|92O_eBcq6d<7Z4!L8PR@OG75y_Wp1{ZK=4He<0}yvBJ1m&Q1M z>C-Xy$AhN)V;aAE<3j5rYTl@?Te2p{#_cwQVEpjr6npqkV#xfl>Khp~Z;dSlNTx=V zlcz#q8fwfd6i#e3$BGO@|JH_!-C8i9cdWzFM+2}~EbPBhxpKUp+fz->&W|N!X6fjy zYu>wO=tu4?-Gwy|FP4qxWe!FiT`r_l;H;m5qWMFvT8o=>zTkd5J5z= zoXjGzFr(a~y9r6URAfFRr|Tt>n*3vWh9i}KNZ*~c#&Tfa-}0mzY%lw>*C$AOeCu(SOikm6;k-&Rweq*ygdsJqkV zLJsHuq-#EDz@-$(N=+!C4sxyeukKPtn=jU9)gmd4ht&RMjKQn>;oZ*my{_wH<@3X~ zq33g$y^7;kBCDH|EN-e-18&;Qw_&w#4$Zb7vCxK^-x9D+RbXk>l|IKVhj#Cy0e@tr zP`ldDWgoaR+cu&E&NsR-lEtKdATyV0jMFm}80cQqV1AgEOw1?&X;$jXpH>?qsqB4J zS#8`K!<9vm!OIm}kW19S!2%-dW})Yq(ecaEHmS#DnN^X*(~Z95t<7@lw8ww*`s%?w zcJ0TlO+OgaE~+PGMBYt>0<-@w0=ft4{^k%wz!P~$T?$nE%2hqjZ=o?~$ff$1K0FZ}#@5Pr)4%dBTp-E@=m zb&}iO|Ha;_xVH z0_Gz3a04t4d2dUhFzMX?^svkFF zF=n14N%@(p^LL9znHGh;YVLyE5uaY_YyDuG>C1ZQvGk=yq2Y?^zfe(iIqQj>3S+Ka z8tUBjC*iDUHV(XaK{R2O;;Dd&#Mw*M`B0=y=C9S(+Ba!owZsL7s6Cu>E#KcJTv*wP z6K&TnDJ|KMhd5jvuaqe_G%Up%o1BlPa8e7O92y87SDU-%$tx%EIw!o)DBBLXXZs)D z86qmZf>tA!Az}Pq_mi=F#U$Rq^(WjJy{`+rw%jMk+nLSv7ABM0&Y82$gflG9{}cx^ z@*6d^g%oMD`Nk!&Q8E-^xCJ%c+b_tD?3NPzbCEV$%zXlj?9^->1D9mkHeJpWOzNf7 zqN-entd%x%1WU!vbAd%NR#TU(?=D=SS7=Zm$Nf-ZGE$@$*p6{_jCq} z;-+F_w(?}iQ(AFX}OHr@Et$b3GH}mCd?3OHRtE?caRrB|4P%7J)ZA6BosLy6C8K>FHUd^md`ep%-=ZJcq^!yu7Ao4#7o1@s z@H5;9Z{Fn_Q?%(md>C%Nn2=0mE;x&aMY?Jc-cMT(`PwPB%(B|A$zl;G8VTbj zD_{8TheR`<4;LHcndla#2|Qxj6p4DxQ~K{i_r)U!vk_sH6Z03-+)la$1;rmV4l7Z$ z{J;HM$9WnM`sC3BOz|wFQ-X9F`j~m&4`paRnJcnyn)Q8f7-I-5&YHM@TF0I6E$;2t zwqpmBPo7ND3S_}wYimA-USqRu8U&;B$c}UC@Rt9ll_@XGL*sUVbtxFXRH0A3P4a$S zSBMcP1jDk?_g5)|D7YHF8qr%Ihuz6Gm`kY?qg@wA;#i`#l(qk^uF;>XkK!xej!JC| zOauCtIfH}|!KH!2WR4b1btT?UR<;@FdvpKOTzAR2VFY$`H2y<;@1Xk; z(DdM&o!pet*-qecSx4{(mk`${JdIx&HgSVXkVmD{AL=RGMBNPvpBUGm$yU@1gx%&9 zGH6C*Wh&&C0oXDj{e{IuZqj>ITlkEf$3&Mm*&32715YSSbjTHZ7L5qNis&zeI0E7T%S8NG_+h)8EEUNtXt_|^Kr4D%$568p`r*X zQI{lP5}e!(B=QVtUgF>5MdRB9ab4Ph69d=y^N58?!!Wnot>@&%(#O`h|2{>T?T5Df zdRFBA-S#)#*wsK{bCK%YiyV*UQ=F#Sy{3rS{DXdj=tL-G zu1390OckL@6shL)FJ-OWj~N9IJjh??V?0Z|z@0n|sd?YXRxm@9zXcIN$hnEP4YM?w zuW>DVWEb`M(9}wSCDw!ed6!w0BAV#7lZyR&*;oo1q(rhjcgSrqwzp(@VXW?saVa^Y zxF4Xd#m+-zljA6xHvEK3GoE=RqFJX;cNld+V5w+tR*KfeV zr(lSN>YOPy*crmp5zqV?1ik&HP5^OEw7=jPe|#G@O|-uan!lh`-GvW3-i>&QC1si9 z=A&r#1Y?$xhDu<}sjfE4mxsirGKozU)=Tq~4zuo(x*(zfEtrPPl`#W&JFt^!>>)wt zikJjfOXiFF@bFn>`kJFAtyk!q<^F3J(=V4(9`IoVR@6jocI{|u%&5z1Zrx4%-cK5i zH)}BoA{e7T8Ox6;qa`n|BV8uf4mjCy2IoU#A}=tEwW?maY8g=Hsfet*RU{!_HUJB0 zAKZUrz;I9jOM9zGMvXZ0!NU7;`fYD4_yH~t=0%uJG17;9BF_|yX5Q&?wlc`MFv8pE z;78d*OaaBT?}juBPHiGZBJdtsy%t~7)wVs{6_rYQ?x{d~0&kom3E5f2xk1h2#%eCP%8ukJ0&Y z&RhpRt(hj3v)+0)SseJg2a`xAV5Y_ouu}Z${gZb>0t}!WvymYsp z6{3_IJ*}zruU@|m_&fvOf$eB({)iHnOZM~9&Q7At-J{sTT@%0TJs7Ozx_{!X8HOJ? zzBB2V(4RHg$NDoAU%Q+RWvc#T%NyFw<}-vXS*(Pn@^|k$`TBp1TM^h^lI&s{IDdT) z=%OvNO~$)uDU0PhJJfC+XEjqOxV-WXp^i49y<^~d;ecPT;CYy?j-`|x#6&D^2Sdit zP1C(1vqXVd*a`3Z5;R`Nz0Wl|LR@8M&_s#~9Dak_Pt}?m?L7gS9G^Fbx3n{aXZE&E||sx6ki>`R#PH3zC=3Cp5Tx6RkNUv>^sykyvc}nvYjMA3 zd-LCW@%N$bV((I$KLiPm%4}9Lvv3#9o%Zc>%oxi&Vs0;DCv3uzRQrzOUD??kbXkY@ z>^EjUY~G|n_Zp-{2t#(3Tf%4+3#&=-yJ;nOhi^o7C;m(uLNxaVW_x1_4&_*`SZs6z z_*r(ff5g=~ioor8w!5_)@X9EXZU1uNfwQxlh+0X^I<)WL`u6X`mfLjHzO&Rb?4<$&AB^|Pon#_FmIhvnSEUX4X|Qx z28BL412iUg$uC(-ApOzhm~Pp|%QV-mMZ4MRxfruwmF$QJX^#tc^Ap+o4(R=2OQ>*ciauF${ANNsZ7(3hT22ze8aNo$9OE8ah2906nN zk?PE=_4x_WEp)6r)LsSC6wqDagY(ZvP1eMdQ?nZOztX4i(<%4izS)YanMO+)RwoKs zN>{lfZyKl^&!{4^38sToU+!&XQly zjA%~ES{F5^DL40xLu79)I7xVb}oXDz4JIh?~Yn77yiQzN?|LUWc|+H;I8}WVpVK@X4_C6E%dpM zdw%PoLO0emHD=6@s!W{db)NV5ZUnKRMk|`=NPfo6Fx=dU7H#lU@~-&qdDo1)pK?#(&qyd`OJ4!ydW4VD4Tc; z$$1Pm#Bmg84Z}4!UF2kg5K?}(6!X2A2%M0?kX+e}ECzGN&sv}-lR)*cvrB(KYdr4w zH;G23?o0ger@XA;2|?&D3jk=Ag1iSb5mm zj7&MiKUl~^;9So&fcK4`u@GYvS}P^@^?%FyCkrzXmPpriV|Uv@zHp^jc3G==qO&X_J9MybpWo)=s&&EN5@pVPfJRP(wQdRTs3&-LygU&ZU!J!4)V zE8lyw&8cLt&k4Ah^a>pMf;Ba0WQReuklPzk8J8GOx_UH8CN&Yb@`bQaxblAyjd8Pb zaTsG9<}<>F%Pq4X_h&Ph&T%%sOo(gZU%uw5E*P(M;^pqZyI!yTZq{`#EmyZa*&@bY z=(t}?<=wv>PVkOtSk<}i*5&!}+VTB&XZw(W_!k4<9uU(n;EHt87sow^IOhkEraXrv z*3pVZp~p{2wWiIx{VU;6li*>*E5nxAvND0CTgUAIPx_%F6+eON8a^uDhwtrsNm$b) z5;4Sco9yOxr_0LCYY*w(g|qiP`&VOxzQhduVqY3fU~2oLPv5JqK^w>F3Ks0id%m#Z zA?;vr2yoFvn?q}V)qdHu{7!J~c2FT^PtOVCZ7L*#wkPzTAzc^PSTdgQO6`M7Ko|X8 zt!3AK)jdN41yeNaf}9ll%b2TFfir_ljU#`#&uKSN=dU+G=&zkEo(tJ&J*60gGie3lOGo>8M+@2va+!!LR7xcf2Aq!d0Rehj-Db*N{@Pmr0tQW)8F5nA zDJEK3?+`R*F6^AJZPE?Qe)wCjK_PhoIwr3-CoP^@RhmzW_A+^)+H=jHqT?hQ20Y!xaQdaqrLgQ_{{HAUM3;rMvSt9#E}595BYak zdo&^!P0e1OE(HWbNdBD96Oy?v_I4-GT;#ih`ZyHR%cEcY*q|$Grp+Fy$Vh?VzEyuK zXQOW|H*>kFeEE}hkB46uZe3BuZOACtm;X(GI>gkLctRQ!iCScf){XhbV+K;+#iOMr z#mQVRj=`{ZPb}w24P3WvwqG(EMCgR{# zrG@zKv8a|>W95S_BV7V5krVTKbJiH_X@XVKpY`-iavL25w zh$kjmmaz5>%5RUROHlQECqsfr@z|XWR~N`DfVDy%Rer`!F9_+m3Gek0c5VNm zyHGgxz$T|!{6webOD{66a;nim9$nZjiL!MT`;Bq=-Y&SRuVNX7?o;*Jp>6SgEdTK= z{=>vj5!TPjmsHY|%+Ak`FnJ;$Atgga!tDqj)c<4D0>AN48BBwV6pH(4ZQ3BX*`3q6 z7JKsr{By3h(2pF)bO4F5w z^q`rRI$74r|5z$n<4*o?@CsVYp=7S2HVjJm5A|c>Viv{0Z!2$ z2!}d}Ex~flMwGzln~#U%cw2v8BS65ovfg(ltI|3Zq25wgV`a(}ZCrw1Q)?@?_X z8vZdska6c)9xa)QJi5#0((`)h`19N$KhCUV7P0$*?Z3ZIO19DXFXltfPx%AS`Z{cC z870XCfSOZMJ9KA?Ae4$nmF^AFm3_WH@;*h{w2`K5b9PP^B8CE=7e{jwOH_>`(r_qU zaicGo;m*YoYr!Ve0uWavL2rpn8AJOj%^Janzry+GySOus&I>zn)xZu!JM;z4$eWp= z*T2AfdaqH;U^6&0~5Yg25f%Dn8cpoh>bcMLMKy}~RDopP29{umk z)iU#Z>{bbHhMYqjK-3O&M2*P@9YXF`e~*lgSo)?ua;V#eJVq=urr`+rvk&ehATAJq zWa*H7CbOat#3M2Z?<1V5cBS4E*c9acbLU5y)na{&mewVeY-EB6E#KBFzX_X%Hyw~lYk56r5*A)Z=Wa^VK>I%upLUWr0=&}e#olyFYkv<|KkAE`O?WNu+bFK_;5yh zR$T>0;I(2WVW*x;ZUgRtpIUBN;kk?fWA%h^?#fv`W>VNW8OauAtjxvZ#SkMfeUl7a zEBVg6s+f~*fEL1%Kmu?TV$4r~!fyLjb9ucwh!n<3A4SeAM*Q*KiyUjJE`ibW>vh&x zSjZ%4W}2VGC@$Sse9r!>)TT$>jk5*vM^K4cGMXXuF&X%)5 z9)?duMzJuEQ0%@UEXw8x0kB$2A!?-n;ZpVFm-G8S35$ca=0Rw_CyE57U%N zEmU3<{y?VUfcdZNPe5FQufasjV=OBEXqZ_Cx@)EW*lF<*_C9IsfOR$37N?<)R_v8r zOE@zLa36bD{FSc7Qt1`FIi zUy;wg8ncDU6d{B){|u`lE}1ZGiPwJ%GREeY6I2}pza=_=J0%o^;0`p{8xs_LN&;=6526xQu@5R+bgU{Z zE9PstW|#&OycFi0xf}_$KA88#+|z%r$)y8eTP?|QCCRAanZ9Lv-q$V+uEArHclEfr z)=R|YCOUVZ{pMl%n(PE3*N}yJ+iZ}et{Y)>ST!V}-FM+MT#Kb0162KiHP4Lo#Iim4 zah}E=OjS>zLMX99h3()WsD4|ft(rj+4g%?sJbawxKIdiu@Q+@DGUzl9KOL<-@2`~0 z+MaTe5wcDX>o^Blid@=#ght;qymT{Z-&LHhYoU90%e59BgQ^E-DgTb@qhscUm~P$@ z#M*}{g1#yAX?bZD(a{1CPvis2rr)QPk{g7vJTm0X1NVUYUd~pq>O5UCQ_`HHLbND^ z4#^lE0R$KZwrUUg=dN`}lG)u{+xscG4+Bt%ILc5rqh)F6n?&i6^^QBN?A6JPA@k*N z90p!G$agK*%XO;LY!A>BO5`7XRueffPpGoBwlKtV5@LV1BIzdm0vx6w%tEhVnK9~{ zAFrS?OC8JR#VcBsd@O$>B?xD9bH9kPSIR$uy8Z*iy33GbL7T!2yJ+q+xGYkrUj0`4 zZ`B<>y~bvBokeQ7l${hYpZ_rMNjDrZO!mXGIYhE zihBb-JAJT8JiC;xl({poziRCdg1K4I!FHN+>3ux6k?Uq?j|)(h8}C0W5pcgqtK)d-8b(7-Zsrj1( z2h^T*!}rIYJw&5SOA2MzVY7$F)7v4;u1aLV!_%Z~Y9;pu=uUA?t`d$(!_x8ZmZ8_I zbS_~vZ|DN7mI}4>;jAkX6dkE+i;T;w29{2Z_f!}w2&>__7yUJaano`z3RU;@uSoyc zbg3PdZ*N|$JU>Ob?PLWdUeuJ7mp&u`bOq3j+XV@yGCr zWOaHy-!E7Fvqu0F&5eBD`eAivoTB>M4Y9p6&$Ftp5PghHwCgKW<_eh^<4q|m_hm~= zP9l(sB%R5TK+TH;XoEhEkz`XEPR>>@R#B~&&*;K@RbI+#p=Ruvj_1bx(s4;00mGJI zPeB#io3&tGFFR^gAxL=F(a_xw0KV;_Z3$f|L|Ov=HR2D4b6#5sr+zy;>>Ha;%Ef^P zw2F*B9`7#MpArz!CKR>r6rB_F2jr&&in}I)DJ9bl&?#yM&QjToOBvm_lj|R-s`}l0 znhr7HWL330@1s_HA?i(6xQ`ZHW4RzwWjc=J=jtjd4m8wP6doz`Gnr*$P&q~WObTMujg|| zIvQKwJEwdlSHFLJTgfMU_oBnz0&z7r_KocABmK@6CVld99(2@+wajz&1X`k4lv|pz zh@s0rkPcBFkiQd$8oS~|$NuBB{@XlpaOh|CMIba$ac*hedREkc3fKt@XVV2{G!U-Gn{s zX^(qEYJ7w_xG6jjkO9YA{l*}bojO2_F`4G1mXk;iUKxv=b-9MCj?X2ZCEV{^Ylz}r zQ`{Tt6G+kHx~T5k>il}2pdx*dlsCwv^8Rj;SXWKg@`_7ku4Mgy^hjVSwk6eK) zN$4a1&L%yLdna;_)PxgW<%sz2Dl2g2L6Wg%Rx7*F>Cht1v37O-x0s}H5~Z`Df|J%g zgEBc?zMbGa6*=PU66_$yo7zfjEvE#DeLt^byi$ndf47cyQc;EYhJ_8XXuJL{4Li|wqX$aoLGHym_5u+^ltAq?p1{V7eM!UqWyDO z*&1>CTA_(BF!U4F3gxZ4hkzxutQJoH@(QNdD7A!467HeG=GrvQIFvHa`#1x?@^$ogG`uetxK%l849O7S zn7i z!S+`Mq`0Q0b&}jFw7f1OIG62WFOu7X%0nCdSmQKhv_cId+F9d|y^DRtp`_Kb3nc(Z zJ)oY7ojF-r(QT-QLXpE=2#Jc5B3B-4%>k)@kNmbpr>p;!H7z1Es zpD19oMfh1h8k4{cgKXUfy(H6p+EPP^^Y7AT#+|1>fLR?liNEgg#43l|eD`a%8>u>@ zc$!&*?f^B4)4)to;3_~5a5w}TkG!J3FB5}fc-AJ{M=Cagk&5Z5h`t8<24Fr_D zTGsD2;~3oZyr}Je&TKRzjhAI&e^Xk8FS5nxQ-#%al^_l_COr0R=$hY9IS#CeH|~ON zsLWEje=&!*tJg)P8BdYroDw~cle#ehU;P&vAN#P)%?5*R+p{}^vvf$QL>=fSii(*t zvo8jyjl~`6b;|#^o-4`pK>5QEe&M{)8BJmv%e3eGx|%c#Z`Yqq>;e}I=D5Dpn9%*z zoW#DNSV1m@8^DKj^c7W{{%^<=rtJHo_tSZ$7U}f|`=_%Zrb&PE7(AkvfG)`~lJToP z^MAi_?a5+6w^}dblv^+6!Np`Ox?Znd#~4{7NQ}6}td8nmZ3A+2Ka@2c6(jQ@jQl2r zO@K-(^m_F7D^hUH6(Lh@a$=}5C~3xYfO9xp=(=U-rQ4JOezTih&|AW%>y;HnTt;G= znXZGm*RTjF)?#H^OZ*JY1I^TpOpw2w$CzzJAV@JCim*cRCXv4k(&lH6y19yfaZe+b zFdjhn4cb`N`tW@zi}%~w32*9WQ@wx_%xe~~;)Ov=ONZ&lWS&52Nmsl>m{@4*%cE(>p%~)A@z5q`ZSkG$OZE6;ynsTLbxAZ7@c(l>E(ecfP>?YBY819P zcc-8kw-vlJFIpbghmym>7DBlFLW|If4gScRks-S>dUeC?V*G2e$KrOfIG6@}Z4U|W z=RTggwIiE2JUUF1wNBTJ%uyhXL^53fN9w{!iQ`P!Gh z%&H{ngO`9U-e`|qXuK(hbX_zQMchCgVXGPf&%G(0U^*}b$;o_sdi%Dp-$9Q--sJf0 zY0r@T#ZEAZIW@`USv}bH)4ml6ZS`#0n=XoCv)#ViFJp6jMbw^~gd)p{%JgBiXn&^2 z*FR{^z%x)r#^>{4W@Mwm04rP5iX<5%*8nyYZ^&d%nP;7s;9IxtSMnaKQxxJqm%<-6 zrk#SGD-FlYvQ9<$S+=bBzY*T1)o$UxgcG_PMS|bhBNR37l~1q=blz`aZ^t^;mWJ-T zU+ff?HlKYT%jej0s;DtquU)Wcv%iUKg0~BiLaDXfd?Ef!Q0bza*3rAi;;8s@Be~X* zh&0Kd4)NkIZ4GDF|JuuFtv^5uKk$LQriW`v`Ydj!_w3u%R*>-41qv!mP;`AT8v=8seG>8phhX(x$y@(Wta25jl5Is0 zJmDE+H}Wl4ueGx0N^?Kr-V4y{E6R@iqL$}eJ~rQ*G^~U|3#~EF-kWNmOfs?4pr9U^ znf@xJGc4K&IOzP#{n?b(#Q$*U+|LNh9!++?y>>n~l2*uucHylzugA8F%rgp>jILgXqZdgSM3jyCH$mveJN_=l}c(9?=gZN2(Ei3~bTA4~cnX6F^px=i=w-t_G7Qy=?LlE+T} zPL8q30;L3*l!$TpJw*V@3AodSJXW*F*h$a?Dfp*Oft=lLw(uTMBY1{r2W4=Hkd2x& zY&vfDz&<*KvX-l_-FA9fu%dQO2{Mfe`)=z|&x3_5k?)Kn3FCatu8CpIq8m!tm#smFYVnz9s`s!tKK72xK5k zSh6x^`H~aBT#;Q2Zi@SgQi;jbstGQ%W1lJGuv{pHFsrY2N1cNYrr>=X!bvjvVtg-R z_IJeg&u=S)xSFd(_9d8O=y&-2)AoMc)u&8?`n|Jt$jW+oZRM>^~V#{!SU@Q`H!k9aDH=X=`&4CF@-jJK6FDi@ym&d)1E4*aB zu;3Y3k5d!kl*=9Lt+np3ZP3oEq!SJLWTOWCpFqBLTZOK_er5r&HBS<@Y~v>rto+Ny^fB%UrO=nX6Y9|0!3WUR z@rhSYkQ2%*1EIrBrPte^OVG^yo&=xBUP+|8@sTOG2kH^iio0_<~{Wg)&wv?kZ{ zV7JO4kGy5S^IeSo8mpyIf*tkx6A8~Kv1kP}M7kAV$gUo|T(`^#g1jBp@?@|56MdegWjoA3yrM$az^Z%>k z^igQno(3ACnld!P55_dNUE@3a4T&mVg&mkZXK;lAgd>-t{b_=F+A+#gKNKS_9M z^*Y&so`d%ko<%95ES?P5k3y4|G$ZKf z&H$7{Roj3!^}6eIb~YR4hp${UTtzdR!qmFL;`p1J#Q&TX2_H_Gjx#2xAChqjeM?GN zx+i;|d3d7BhP5Vw=9##{#`~pVhw{K~ZR4gv@bb$0zT*Z9CdZ7AB4?VsBc9FQo=vn& zdti*yntDB6+Ywb*veT(emsa6w^e*mev2#QlQCxV+Mu6DeVonTpzSc9+*8zKG#@fQq zvLvdcSiFBGur1#=+RAJsc(y)uoK z)4U%NhSY4D4(*bP@Aw0Vfv12DI&HUg9(+rfw`rozoy&RYUA6lbw(xSi3?t<6Pop1Y z5Xk}ygO)YZ9gXfONI|k?5z}uGcI^mSk4Rm`w4Olib7zJ?+-cXoiJ+b4O{wB#K0iN1 zf0bfemiP_2qX4~AQiJ2_{@q)mtKNR2jsy04=_$>hOO-Q1(zFkr1(UpA>mF03RqhDB zxwtfpdPWO zS%Va<0}ZL&=|Xh0n*O;6O1b{x=;W z4R(HNC0xekR0q#kSSy^6wL9088QLjLR)Hu6H0J&6>QC(nHRT(rPi=!K*@@G@Cuhc; z90W2}PYynQ2^a2l4GtM0?mqmGK)7o)N#uI{Ye_gQ*6kai!A@^YtZ2j$zUwwfwtKuh z9Y$&%aA%a=MEBs-oCRLe9bc`R>yup=a9Rb-Qk%$Dka9Ijh;B=G_)oCe{|4=AE6dT; zGq9s;|~>vs}ifi6P@Gt&nhc;Z2j4*9E_?=s#fG^8?ejH-VRt2s6Ydp4zj# zYn6A~d4A^jBLDoMhh~1r;*2+FX}$Dyxy%>jRU@{^)M_1I>yna(T!aASx_pH79urZ? zxC}J-kbUY}>#;d&sGb!Vg5K)Qn7=czbX{~``Q9w*XD#|?IREmUgUdnD@~@Q_$P(lB zPA0{wm*L_Uf_bG>_CEt-WUfMge)DV} zJY1)$h?}^H`Qal4@%3jsF=C>*o>~a-sNAQL=j$`IEqv`(bB!pAnh2rOYUW@Fj326+j*FsnGOplGNh{?=j%Y-%ioU(rnX0|yz$WAg+OKsMd73i61CZ$pLjHj? zZ~!M5@zkaq*>NgOll3-Sffag3q%EI!?c}Qn0YxYPm)ZS*+dw$2nxwC)8{BsKNhiqQ z(~Hlax@sxoE3=hM|ge|Rj|-C>Na@}VPqCh@v1b!qM8IANN%)$b?9<$Ap?vlJG(_SakYv9K4l%B{X7lNB>^6}M`EY>NhvhOT`| zeo8Bug3W0JJfxT}rAu)&2hS53)?ZDCzvqa5D%rHvbR;(X^+GXRn%<|&ScmGM#j0#kpbaf9@Bs}U8^E!`Tsv|*#j#`M6(KfP@0`wSt*M3O^kN4D}2<6nLfXI-qu z;rO+vxui~ULOI$q#O4u}0(ohVhK%X(Req=bpiQ{G#lrq@zOdv8o%?LXZY(O{{66`+ zhmAAF8P!gUWig&mx?6Evq*TW?|UJN8vyPW(4LZu3+@Lra|1Ie5q;w6gW$Mg?q z1vKL(ieBif_=TfIm`*KuTlfu=1>XE3empWCWN8QlJ-n0jC?Em&1STYnvzm*Q9r;?t zr{mGc@mSq;!NIDMxyK3`Tmw09eW@oEfx)_nuuEu2FaniiW~*`g8hC7r&R zFI)9RzYOIb)V(zzp=h;30|ku1iQ=% z(9%QhiUFZ${=h*>C6N(!Ym9pju&Nd%H1k_npp(b>2qSyl>K)7|O6i&QUOe%La`IT{ z$*xfj;im~u>-V$Yyvlb_asMmO{(>C_gp3;>e?cEDpJO`hmGuPZlv=BaH$nh+Qr%x+3S8i+fu%y2cqojnswXa3Q#V}A)F)MK91VE z45|d@V@l$Qha)QQB0d(`FLcEET_I^@B}n6m=L}yO)&KJifzFw+y(9q-M))-|$`{f{ zi12FG^n3a^HlM*mE>brp@!mdqp`7r`$n+J7G>O@IdHI9DocYyVq5!~%2FfOllFten z)gLf|se+1uXUu3!gjDYrmxFuLQ2FKas!5Y*0``aJ=K?40MwtLLv-UMt>3LUBjxJAR zL?z)leFf1$%&K9fr_v-pRoZGV<>J5tJkU6NtwkPQqFpgsFA26SGxtPbj>y9Yfkl}K zJB@3FYy)pYf%I}d5NQ*dXf#eN0W#msn_TbJZYH>lu&VI}TOUu!07Mq=)IWT47-f%JD21a{@`%XtA4SN(<&Bw=68$o{49M^eno$#%txX~o3myZI*1L$& z&2LF6iwDAq382rE?rmP$8^^^@8P9$Fcl(X`BKP_8bT4c@b&6j7;k0elNciAj1zPg> z_)u>vpKnNbY|P`C@+w4Bgw6IX-c}DF0)0t%FRJEyxN5n^C~7lb0b-R=J1da`!_oY*au~PN%N^wDa)ZxK91u z2_$oE>c;#{E`B$l#1+Ygv%rR+$17!CbTpk{mlRjMQUwM3c3E4eNuwS`$yYKJ1JMUt zUM^l%4#TTk`N#?axC+=;O z1J)PnB)eV|5L|IQKY-@m-d7S2IMe*2AkLEuj7TEsh|5KKy_KTQgFUwrxnD9wa^i(v z#XwX$REspvmO;PApw*glFvS%#!rU z8rhf^7VqM;A6b{#E>Nor=Tnl=i9IVZ{F7v;dbK4PNT+$~?i@CCyFSW4Gs&UR{t?uk z4Q)*C;dL>O3So%cBK9C@%pR|^(}s<&XL_KuSQi>g-x)Tp{??}}doytr!wxz6B)9&W z-f$}WMN?f5?YH}RdY-=$t?r6YRy?AX+6BZW0_jdkyu^q8I&Ip|_-ikIaC4tVP zU1Sur#}c_wyFM&`d9*OegKy2OXGa*E5ZVNfW!_6a-dovIJLlWm|NMv~Qlf^!$TRjR z?fxm*9`M}@mx1ULP*8P0q*IkzXO4U>sQo6$wFIc2Q-<6u)hxMtsi>R8xJVE4xZnD7 z7AVpQSguW!e35o65qb8mY5V*xw^7FGipW81%s4)MuqW*H;U|goe7)K6bdUj*aHPFn6CtsmPdL?8xo(aHu+? zB4`G>>{JM_dLkxFSV{?9fz9E}rmxUj5i?cR2;H-NifZdUCi?)FGG`L@s?;}g^@S&K zkU#;fXpzXeQMx;EUKG6UD#d-1L8xlaX>pod9x$R!4n)y{d+8M< zVZhM3`S(60SSfqZH|Jxh!<%3P0n*_lnL*Bo(rnZHaB8<&)M?EJJw&S#9ac{e(*z?$~4 zB-Wc{H&|P&@q6FPLg>gNCw^{hniY);Ccm*;q$&jyRa2UT$>wo;)b`;Nn;p5I4qGH* zVgd3oXgyaWVFZ-e$DylP`soQn<7s;2#Hz0 zGgXVY^_IB8?rFVpe#+WxeGE?smYRv{L=BQaoql!i76WkY3C!-P10NdvNWnmc{mVKD za~l_!cSxtL{^do4am54uyRZtY5Fh@LK7XKPCs<~gcubEAViv{rLnCSUvE&^~M;?QP zq|5AR=eKHkb86y2DSnOS($<7lEwiRx|OdF20uV*L+&zvEA%-YyA`h~3hZnU;qA=n zb}B%cE9$ZfiO&;~?uzqy!AHpmz?`e9yPBrgOl2-@d74`c=OExKUM4nLjUV~am4E5k zlj;Mfxn5&-T~zJSzdi#w&@v~vGt2m|Um&=WMtjRQ#+#1Zc*IOQKbo{e8nx?RepE=n zEVHurnT=(^lm|Sfxb+i&`~Two_#Edp1`%t3 z!nCX4OE_yWki1BUi%C|?YrXT=`*vAz>s2j9JOKgYf!sC9I{^@m9w(jg)(52Yf8F`z zUHvEjLRvvWH=ZK2x6fNttpQE}+BevLvlx=#zbW&5G-a~M*m@d}%so@D_D9GU69w~b zNj-sYUVUma(8pq)Z#!P3QOi~;%W~b|zu;{ZDNe#*V?C&d`?K!fW^eB4SNA{T)k`=n zmvl$p>{YpwP@DG9`T0LLo{AI!>o?jm;i=ZM$5(2x2>XT2l?9G?0 zikB7$-j`lj>G$^(|M@L}1DiY!hY+r^v(4~d@v^!+I{y3W|NZ3uv5(q+ue$%5cvEDP^e~fLA(P=&-?$X`y++|oF#A)k(hr^ z0{-q=|M5vH!v)?x+LuuAU-O>)KY8`mKL~f~ZOvPo_+25~H&w`SJL-s8iQU370y#cp zjAjxCz76zusf92Gy13K$ziWnc7MV0Y%6ECp0U`RZlWvK9lOW`MDT_zYZB8LlB>lf>-Tg0D zZJAOWTzJnxj_W`2bXA}LHyHCnhvOf)v%(39BjroEO8$`ruURy>!9;=~n}1~K8qFr* z{|fGFP5X~5U1JD{Bl{RUG{gRvmb-r$3jg0PiF}2NhlyU7U;iK9-~Yx>i45Qdnc5J# z|LD16u98C%UK#5CBa8NK25^JCBx&RS$bgqY3#?qs$*Ax@`ihxIzzxc77vKIz&%GY3 zT*k?SiGTDJ|Nnh!RVK9-9(Zf~F}IEL31(fAr_!X|;{1Z)W245yRPK%#&yaU#;Q^#W zs_^R{8K?#zR>Hu^aEk~Zf4+j%WC@h=}{427`|-m^W4Q~GRnta%WttBvtVqyIR^*MpoLk| zVfg?F)&^z6{B8@n5?594d+O#o>(sT1Q|3HQNF94=#;Q8J1Ux(!iE?S*oH?;_a17J} z%f^}SQS>kSMMykYG>bnwHfb{haXXbOp*|M3(=Wvy)$@&t6$#nhe+@r8XuCpJ2s}cI z;7h$k{d?g6jAmE>{yI zC`cGcG_O*NJ=Z8X@%Q=>!1^#~*6+```%}w7k*DUpRo4_#eRgFFrNfxVY29Z_!6%I~ zZWgdwSxTVJSAz*LX3PPGaxNChB|0T-ppooiSaRB}8o7J>P{!n_Q@sqZey|4%K*bqpb|a_hA5F=j&u)yq#-c~O7Ih18Vf z;~xK^BT1~hw10{zo1{Z~u&85Yo6uOw5a>C`yRMGv`=0GE4>HUTc^$0f2a~N^eqJG) z2Fgoc8V)A(@Yb3DwOt)F>(JIH7Ku{pv1Oa{6Ece#?}N2WhlW)mckxDif^JXgJ4)I9 zW~m}Bzw-Q<7`xp%KEzqv#f9TOQTfz(FabZf#NL|UTySpNWiP-AZ*xIQ&iu$N;Z#2+Yw%`;|40z$wK|8kPQJ>Skq3FA5Sb2ZB@`Zqj- zhzGuUJ@7l8JJH^%%QBf!vGyp6Tey~VRDgNJ! z1arvn7{=Sh$SOEE_lhJs#;86;djqGw+1I-O;HNp)&@Olw7<3cpv$a(~1o9ppR}43} z>ix^FTQ(XO#@i-j{^x>joIc_^{KYsR+p6p8CP6vzcNZHBTLLU}cYbz^XUi_e608e# z+=DQW8G74S%;c;Cu8(@PO@!e1fTi2jNYbfoZhC_5ceo~QUoUgAYb?0%P^WCF{~)fl z-uyLQR5pX(D^0DR4+i~(yUPRX{nt-)3Pw{c(g?JANC$*UfGD`Q&ChQvxRQo*zKy3? z{Rf28B00knbv@yanN_3l}_`iEGRTMmvk4I zY90CV(sehQXAMXxkZN`iKzsRqsKH1S3@BW1(`iCq4ON;u=C=ip-x>`CEprh0XP?w$Y?KyrBf3|(HngMQt9&`>m%hTVzA(wIZDJ%Uzd<-9UX zyEq-Fi-$x7Bv6jzzX`i#iG+F>n9aet!_=2TbNuHGFPj7`@ZjQXPYFS_+xeo|=Xhg4 z73_?n6U9G~H#8Z@dA@hNH}p>_R+ z>XBuVfGJFHfgn#VcnCNsBrci# z{z}TFo|mdALG*=nhp1bYd=rS04Ai7}W9yg6nw3{P)bCLnF9EMP9UzP1b^hYV)*4t) zV=I~}=b|E63^>{!8qJ^jcx;pMuQWz(X=ktG70XYo-#UgqrVdukPPXdFvQ7~3+zl2L z+tp8SR;zi=sl9|`T?rDf%|km>1}_dkn8kKA(|oo}g}Ab303qA`bBnL^hg&>vjxfka zjyE5O`Alup_=G$|Uvmz(6`4OWJ09iy`^@((bvZsJ+YLlG_457XRHBl1c1*EnItO`^!`KreAcginM zy%cH6LzW>i;Qx@7o?Ogl-@6yFUW(LA^|JBXJ8SQOJmv$8y3#_;Pnrkiw_zV?y=!aS5TUAy%QTg&dzL6xx1?$5X{x}ENNLw{@4GJJLD9;2Xo zT8US>tc7;P&|9g}mw_yd`J*hCv<=VOkA^H$q}X_s!lUjNdB`<34>o|EG4>#x7^9Bc z;?L!%S-)}T@k*^>s9jsxwML>(vl zCQg0GgF=hId?tm@82cAJjZBIUZg{EClWqWHJg_=>PFKcA6Q8gM^j+-F=?`$1s2?-; zDW-{xvkSOm*<(g^{Pu;>o5EysE@NaTFC9o3(OV}}^~y1(Kz`eU_BVp>d7N!&{WO6l zuyB;7S!VS53gCprN7iz!dd~)h8T<>K7Q0*^8akG$!9&`jn|pe5DL3`476JkEF?<@K@i?yAWm+eaH>E1p|?p z^{D*?4n46|i-4*XMv#}v&Nt)7x|rHg^VrPPeDXR(!7SkAIQ1TbLjcoIpz@`2s)$U8 z{msl-rAzNKcmR_CAIKyZ47R5~q2K)RZMf&JE7@Y{hL$4h0#|AC1(E36*}$QLJV+V) zJ#S(a1?r7?oPK6syM06KxBn{BY>$jEP`z{oI*p!Mrk#uD&8PgmQh$Fg$P|FA1EzWL zm9%}p_p9=~Xs{8oZTo06pYSVs84hR1wwKhinktk3=koqu38 zTUX%8|8P?wk+&@^uF}|(h~IhH4k0F+^6=o|{ zwFX?z$35^dHTE;P>C}hUy6-ZRybuHOxQJ32JWO`qB!IyspX}PbqzLKGQ^QE_%r%|X z>l_HXSd}@nG)414e-25NyiDs7*_E+(ZvvZiBoyZ0)|F#Bt68FpTwF*CX`u2YqF7)X zU@D|dS2Ux*6M{jsTN4O+N~X)TN=9{-vdwBC^+u;uTeu3E$O;E)TzExqj)%~yrfX|& zty<;8bt=}Pa4H9-o(9$GXm!Hwo*BX;bV6Ea&-L9%yO+THwG5@-1ApRsg2_N~N;-}g zO>*xwwXwS4L=pH2t)>7kBk3!BwD32LV8AGIMK|YLM zotl4T!?z+2+P>gNK8*_>m6P*|b-0ohL7Tk61ZVQ1?$U`vCH=AV-Nr zMCvZf8&hBoWL+sRJr?uyzP$fzp~hD7li!&&vVqhf)3XOGl_CM3-`$a`m`!t{Yrl#Z zx+nC0o;Q2XnBM>UM4tC$D;J2Aap830FJ!sI+vH%^Bkyq=MFLHoC5y=*!j_(}{)6>A zaYN$q)0jnuBb_BbTZ>zsZjXn^mr$MkiDUQ_R|IG1hw?!dYw{weZ&oJ#O}DcsvsH=D zadPZzdpw1se@c_+?wMrZ34`q0{ai-IXdEGCL6tS7RAClKw@R*TMbIdNAS@8mXnwrw zzR%ZKt8iE!LRXr$prfBPVJa?QH`iEDsL5eUU3rN3u3f8j$aS+h)UL~RP!LZ@1@fv@ z%WZ&(+M&fnH2yO3Za%z>ytlX#`b#y=-IkOS0{mhSn^l+VEP{>MG~%2dbz#gT&hCA< zaeGv?0UEmj`pG=rhZXF)Zpw+-#PLQqS)v(jyTZ4|Kl7Ll)SXGV_zmarN(^2|GT$4` z>WGo!r8%aO#ccwIw!hBk=1-8S^NeMYMaGZ@c7CE05VyPJ5g)AP6&>7IT6H)r(G$9t%$Ji_6_UN%ih)uV;_K+uB04yyh9XFE7Xxms`|eJ!QzWbIJ5D=kL97MYG31Lh5Pa z=`4vC@-VI;Sq)SWj?{|9sYb zW%N8dyDZ*_tNwlLBLQLnbj^pa4BB@-Kc}?!PjQh140ZgYDS4cnn9bMg6D3SdhfHYw z;#e~urBvUFh|n{|DCe`Ur1FVaSzul(j%$rM$HV*-70+3p#Rrx`=E=E=$=JDAtNDw> zYbfFfC}){t!nf!Gm1aj(hWzrJ!A54sJ=x_!Kuom9wj-;G{7$|#e0PuEGza&Bjvvx zq0uBn`x(RG+%UHEai?33#em1VqK`fvB@%{a(AtC+6L+|4#OOKb~_jz0aO+U7Lg${Tp zkIvf0t-N%LP@4M`cUlb>AK$vD`yO}AT%$~@TIg_1bBcSdToYbh~P1RDEDlwWp8 z(NDVa=XhaU8cC_&c|&fIev8*(RUF&=*bv9*XW7a(jB>HB(?2n?O2F_svh$ccZ_^JU zvq&OiETwj5%VeVX4x9s)4|A_vn=F|bCYVQ&D5PAah-KbO9c}V9(iBKcdwKlH2sgUvX(wRpb-bhI}9(KUf``|Gq=o04|K_!oI)RzVDXexCZ?*6%aEz zZc}C;nxeDp{5^lfX5njxMV)Ea;YlDb_W|N8!w)q3&~&|RbCD!gPlBTa=6&=mdF5Mh z;vHD;J+-+r{J{l*%8?iFEf(h=W8_u~yqZc;mPZR&Nzod01H}aT)(U)XW$4WZogC+p zwWhjid`zK6QDKsRMJqF7+TKKG9FS3Nh%}2nE7lF(4RXsxQ%NZr< zXCsH16w_Q<*14LV$i=bijFwUyZCh$?=bUXu_uEiNE!`5zr65>y;o5M=hm5SO{2DQJ zJ?ta2pv>|N8n{3Tns(lzR9@Z)@tLNsYrBBW98f;J4|e}6vp(sfTSDSXWI|BB9J5Dp z$@sAH@>#p6KaNu!*S+?=i&O$DmuBI3l9!wQym@A(o%`>r2(wauT8IEZ5sf#vLXR$t zfP{GLe$}GZSHkXdpt&Oa+N%6?6k&L8x3W@1ydo^MNs&TKMisZ^5m%J_GAE1HE2sLq zB7#mNA`Im6*jhQ>AfxpJZw%-FXOLK3t?~1WYLSiEU%3Vj>Oya4xi_VlTJfGFrXfm< zP|7XOG}kslMjtJ6@d!K6n3|i=Z9Yf|jb{JqIq~>toLMO?15^K5L?H6=cr*K@wnmqF zuxU4zc6J&6kVfxuPX)bVSi4Q4tDdLP3gyHp@se5^x~wgoXC9m z!zjVo@uQ-cQwOye|K0mpQ?0A%aH*cN>TMyY1m`SI%ASjJTK2dVo zIq;i)jPY@a?Ig@$xE)lMF8*pKdSv58;#PQ%QkKreHlH-N;r&a9I#(ev_coUwV+M(* zJdi3cs?%@C1?AM-eERBdN&|$lgdR+YBS`ujji`po%v*zcQjw$c@@G^mgrfJ)aPHjK+&_FTC^>E0W9G4TvRi*!Bzthh0p`< zC&#WwA`oMj0{)6C=biWDXR9XNGIGyFFLz`KH%Z1`ymXj>CcIg;{^_?F%|rM|N;+Ic zr`pC9QAc12OBd@vJ0KNL2a1iB-OjB-?8l6#eOk_T!!D9l2A*tELlS=N@lD{=bG4kM zA9I4?!{qGa9YlFVQ)8m*v^C*bA)|}#6I}SA=fitDy=bOl z5UQo-OvwXfU)?o(h0okNd8l;r+*88tj#J;E@@GtcKWLE1C7Hs7!yVPrfD5&X@$3bs|7^mE`AKWQ`S)=xL&l)osV1XY?=5tp zVzOzQER6QJD&3dByVGP+X}|pZ4CYm4J&M`6R#Z+Y<+8S4EWBQf*mss(WwZ|QyLer> zy1SQ~?67|@FsvQTJT)YCa=4jO+H%nlX?q{kYNvRT7|nMfJ^e=c$#+Kl z^^cdSI-I^~F4d^a=ic*P6|gG4PpDis?-tzx6o{@H4ZQcaj%jTDQ{15pQyTz@|74zR z)k33ps$vms7qnYj{(1qExAKVzuNRE^{N zAEk|Z@_wKLdeID>8DReVc%5y~3Q{nx8xi+~dKD|lH*{3;afD=tA@(v+)SjmY>-K?d zDNP9cF0<*WiZ5uuyc{}=dTEnT4z1zs$^zuQ= zL5U9Hg?N`oXMeP%-tI?vIndHQ1s(eXPlWoXd+zK!9p4Km@p&f_Q&0W?Be%UF|CQRq zM&ZaAU$DdyFJ9Or14m?cG(QFxQxr)>;icfdcQmrdw4sBpERcLYZJk198t0RLGf{ zP+xPebdU>Tjl+#dh$I%?oB8|-ny&qZsJg!F>D(jVZu#<{TO{$;Gd0zqf-_FD8Atb*8)s z7PjH~R1%5nxmfJI99?gL*EGixTqff=prd6C)a4j?LIh!Lu7!)0l6cV1x^I8=_a8zwU=f0K?B_uhpFWH22c_WgAAWbz$?bZ)F=2 z!cZ1fO6h59!S_ntEfSShQ-SkKUVpjE2&;Z|ZspXBUmwRPpStDzcmyp}Qr7)*R4izz z{95sMf1&y`)0F|D;E_U>ROqRQ z$#}ZD)W@=WtxPdFFVHP-{d?FxU;KegM%t`<0k&2TES~T+&dJ43AW54>%!7@S#NIA5*yGnN3Z$N=a8QDM` z!E}ErlIKHTOg+{nnv zOLxc#^23hf!b4n#rB3J))vt7ev9PN9*m0xq!duEl>bS)kmx zRZe?3%E#{3n72L=3J0!gqg^{veEJQMaX&>L@N$e^gJ*(P=9q4aAATIc9!h$!8dMot zU`)=_CNX&9+>EHZ=R_P#rpA2B_9kMR6nS5L>7)l1xp&uM)p(mdZ$;$!a?YRzf3BK5 z{eI8#vM%7dnDcQia5}^mi9Q%OX3-&K^frFdF%xI;r0-;$?4b5rDIKQ-=g5j>F)lp% zpv=5)0?XrkDA`aRxAo!H3X-*5heu+td4Zx7>96wT#X44P3|{~=;DuaK<_WzHtM_>J z6mmsqz_>D~VY5chocnnwg9Hrtl%A#;=P3NoRpeigqX31Q+oX{(WEI}kk(P9eT@hq5 zaB_BF&LcLQAt|cS;-^1<9(~%i`>~^5V4Yw^)*MM4CKu67DutBgbVWpzK{8YQ|KF)T`H==Wuf zch1*}^!i_>k%0GdUff-%QX|2H{zu}jCx>U0AF=U2>88TR~^)W<`b7YSs zQohP6aE6o$GMV|z98rVIHXm+PGD6TL8w3JXrLedv19|)P$QWH<=%MM@d>pSM(Qp#W zDZcrLnz(k}uQriEyz04m5f}`Y0gs6DXNw?ceNPNLK`I^-cB90Vmj{yV@d{OGA0WsqKLZR^-eJaNItds4(HpJ<Ac^Jo zkFDqT%kf*jZo<04^<&SPVjcx_0x=gA-% zi9+Mrwl-%z_1q<>EiyfIOgid(S6%nviNv4|)ASY+aXAYUMaYA>p(dR>5aXMk}nX?`17y3B(nUrAKLwmj&n#q0@J6NK#Et zEWhI%yL&QkF{F)hqEtW6u+wkaejGV~{rH&3F6h$yM=|Adng-VgCX>dc45ZO0eKuHj(fpR#!~-&1!&usRjrcOCXFT zfBQMA%tM}io#yGF_Sr8j@u@1Q zYRY2YT>40TLW-L|s%_2!MVYrMly7QOHp{bDBJpcMZp{d~FdPXvXj9wuIgJXiX! z&)(s)P#>nV1mxXZKug@m?g3tYKA-D2JZYGkxji;}KklblLse>O2q0Zp<|wzm9qd1x^dFmzyB#eE?M z+__>Iu0{S7`Lj*b4Rk!dzUxd$I1Eq;Y&g_G zYe;Nnf{DBRr%oQscTBWEVf*4vH4c(AK@Vya}#uH?=o7A>w$VG128CU znXa6Qc_oA{pUr|{7-(jtSwEW)2kBAmJ!MBE-1%8$#<8dO`U|LoqjLWNxJMqgOz3*?jH52Ui+Opi=&U*N|RII=~EG>K)Kt`JmgRT)4(bAJNom%j5z2lkn*p zds8nBr$U*b=c8kxvB?e%lH&5~#=*caVZlv4snCeG-f_NIM_zzLA`5?S!vU7=?cm=4 zIw1YcOzY`N;`$}&pIqU|bb{sS4JT_wvra9zP+6e65%Jj5;RgqbLjqP~n08}-ZM@4Y zK4!;g=*-MOgmKnu!)WAn*CIIGCiSVsE%;yJNiF#(NL92%09fg>SblQiTkoH?%2#$C z7}jX^y3agAx$5V+F0?rMBLAmZuk#voF*9DaQv@J+Gm~2RpSxDD7FFQ?Xq1E> zHfjehZY)vO$tRAgSMqiDzyOI{Qnt{Fg`$f4%|(iTEhPiK>gMzTnr?2c)(Z17j^L9_ zp)Jc?t*e=?=8ec0`;GZGkDizv2DOuF*OrOt z$$sKVNfb*iiEN&T*FGsOny2}Ad8Wa8c0YDusc$|Tv>R@hHvm|N^Z^#F`N?q=XnmBY)-?mlwDbT3>&JDii_TAwX_;;?u{_h->W8~#s8p2ZkdDN8rV#ea-cWvHvQ zpP9gIi=v1<-9XTyCB$2{vV0w6Gi<_cD-oU9&wv3W=&J88a<1=?Mur`oa&D^r2lEhH z;+Sg*^#85}@D}8=B~|$841J#&tn1cGPy4TXq|2gk?Z7tG0<|4%m^SV-6+W-_!H^4f zc;4R;M*Zu-XV@^8y|`UvooqBQD%_0PQ>76I=GktL=F`DUPFt8aQT^#&4m;R5yho7u^oUJz=kp(C+nnW{`oA1CA8b<}#!6H*cIpTi^IwDEtN@4{P zKHvzU3I2xiqkF^S>W2E9*bd?%Py6eI9Ter4}7aCkhbtluQ5e|q^(0>rFJm??$gP# z%(%aiB#FQGugs*%5rtjIp+G;RLbq6v3pZ& zWGGw|FXtNo^5A+^SFVe6OC`DGGU$}JMGc|eI?|uc`41Yv6XfqfDTzh6t$iOCNslTi z)hG%E(ARJjE*Lm8eiSkOYR0D&O)3)|VARoE%hfd2?fei4?3}I?0=~b6D&XyZ#_B@d zHcUEm^ki2JcoU_u>tbJ!RIXlh1?sDsiTfm#P`z?HKU;8;rjZO{Luph!HMUV{Asl(K+6k2Mjid?tii+adWLuP3rS-%q~fvw zN_GQ)Q$hxuZHKMP84F*fs11|1cUIKt(*MQYSw>a0zU^L0K|~Y;L~2P&H%NnkG)Q;H zlJ0Jh5~RCDy1P@PyBj2zw6yej*#91{`+eViKAjI|jJ?O$1K4YFD*x%G8Bw9{l}72lcW?m=K&`wTE$cXKl8w$ALgL26hs5bzD=cWSV(! ze5@fDQ(?QIP=vG5_H0sW@gAKO?Ljwe;oT#XNVa2qo-azC_YsTQAUBc#h)iGBMc$8P z%hdA?Ht`8%g*MSM5sF2y2Oq-tIut-r?b|F~w^0oRZY*hV{8C3~is7IZ`E)5ewIVV6 z+GG6JI^{fmvky?rY~Zy>o!%{yi@n8VxA1v9EQWrHGSRHf{#A~TQ08*QdiiTzKEF4- z5#~-!-;ToGT?LXpWT&+wmNtz|;LYc1?F})8v2+0hwltWT5}mOifqEWw z7r+Mk{6qdO9lJPo1_f;}$S z`O}w?A(Fujj{74C{9_G+XjKHK+$}68pgcYz(9k4!vk;ji3fe0w3J1@6f9+D!0-eWS zuu@$Y_3O)D2Y?-7rcvXP<82iq*6)2IaxrTWlV7-wPPNT^KtOp|XtdaRQno*inO%|I<#lO8j~D!wPkgkqdCK7ilhSs-a^-&~4`(!dMo zrAWE99N(ym!`!J=W8a*42a}|i28{`&2H0S;g90BLW1``;10@Bpyz(a^W{YhuwSG-)!BE@Oxnqbvl(p)c^;|k8r6~0gBs$Ab{_pgm;rbyd1Fvf@|8yl9J1tkUhcTxeDS=9Ww+&gL+*^YQ7~Z8;`XIGVT(fg~JVo?(xvG(h5Vg}vnHX*PdpV;r;_w8{*VOePA;aO;q z=bMLuv_YM>pM$2eptMh(QlZLItJN$ZsKXI>5HMneV`hJz_3hoFGSaHLk{+-8& zn>K2TlF|3%EhCQ9eULgj)k)18-H6Ei>NMBx59o)nvf#p}q%)o`)|e$M?pBRUeaK;_ zPH#eM^IzZipkd}3Jg)h_#D`ermgpAmanwyMf!1ngi(lqF zi{x`+n*t;0xYiTmZCfVsq79S@SxqWI8;nxN$70W3qLBDtIS(FBs#|WR`qHt}CtM=^ zKvGxA2HkX*$aQ*Alp-&e4)I|hFr|^JRQ#QTv6~9DoR)=|uR;ER0p-!ohesIqrthRF zzZ;!Pva=3P&}elml++j5@ph`=CExXNR5NU0cK2p{f8A`KsVrnpvxEFgPV9U5k~Lls z(qNrm_%T}*<*}zaLg08GNJ<%)@!Y$-x6e6j;;H1bE1A`M81D{+uJj%&->k)5@~y=) zCP{W@G)sH0FBtW;Sa`Z$Zyyqhw8UMvo0p8mEoluCiR3!+b?4N~=R8R{&^d`>O}Ph%J>;beKn40#%Ss_SxM{+=g5*v` zxMfpF{Vr`rQrxQ*$UFIKwB8zZ(RYfU^;e5E>?TNr0p|*r;(TEYs~M> zRES(3kLlX?Qs%^e9|mF(?;6u-fB#|S(uT`!RmWCll(bwVryKC7*;{E^xWBZi#xsCg zT2lG1e}&czzKLbG)LNx1ZtGp^k#|Sv_L+#=T!!~r&-Q=a?MyBZIf5@^%olqkAB(EA zMr;Wi)V#f&fs*GQd2d%+0H0sX`WxVb=+%MxjFMiKXui5JW=Oci27kT##`wRI>3ikB+??O2WF ze!6BX*vZN8b>*%qD7s%9E3nU7HE|L4vD*wEbNx z7tJYUiKP?^PSc(5-^D}{z1l(5CShX@>4$Fn{f0B+UHkq7W|>a}T#J&x zlhqcR&p{U!ZApLT8x6eJIM*vhlHuGm04==G5t9h)T$Dh$?9=gSzx{62RIa*ZQwR40 zkhluJSP7<|FfF0AKZ5}kWlD@|wtjWc%ivtS>B9ETC(+!4iBUzUnubqJf#PTIy7(adr@~9H_T}oS8Tx%_2K*ct%8r-MMIa5Rh^Rjg`(9AA zSsqRXQ7-xPJ%Z8FznFp5zr(yxUO8-yEhIVfE8`s!V=ZXc7e*MVQG?fqlfBS!2~_hYbl+o9;F%IGdL^gUigeiLg0KdgaIw@VuFg z1frAR?YG&imSQWI={J71_wT*@udzsE$j_2zfo#B~z#CDll=7Xu-wm zl>yVm`QRhQkv(?`p(-y`RUnP9?XAaY%YvQ^52vMDB7psRi%@B6t5qN+fz&@T_e7Lm ze}C~h`WPOwu=~uY)ytzkt{A>8{~HUBthYH>~0(Odn22@|A8Zs%sc!Vg_LnABfE%;VFP#pILOT(uO> z#e02IaRXoQw)-6hnxQgHmb(g@%#_zf+P;b`2O&dN&zmJBFhvNa1 zZH_4Y8_98(wS;fclw%V>vnok`vWu#SacQYpj7|zTZ3Dv14#-nEN03jXwb)~ zu5}Z}^*l(LAo1eYfad_VDmp~$!4Y`dLWQgHFTnq=7&)n3ZxWNFHi}gA%o){oYg|lr z=`|DT5&G{f5D*=d?!G#(>sA7&%eWqFK$4avqc^9h5Y29ox2wr5GeVS@tPZ;4g59Z) z`2c0goT0@m0~*GsSwv& zHFqZMDpoXGPuW&2BbosFL#!y=%OU7 zm*L{xKm^MJ?H{zEpDs0aXDXD}`{HE891*g_V^}uVj@EjjdIxkhfBQz<9j|%ew~Al=zxC1}tsl2WBK1v1>>;Zc-KYge zOXWi`f#23C{$r&J)Ybu!wXJhlKwgv#uNZ)-@*mjPUS(JQF-eBM_Ey|J%fIQHe2Y&i z;-#^&V@wI}Eo2n=MSTLfHP7dtsD$55sdqmUqGWJBR3K|w;y!-II#D=-DN85ycOXT% zKYwMy##OaieYWOke#TN^rp%Y%iA+oiOC*p%&$?ge##M}Em-}wUAT+q(DfaJcF8^^Y ze=l8o<5NB{&*q@_LF}?`YzT=Q>VF0B8U8LlQ@9f8_}sAVp87x&x7D!HGc?ysO?|*dwz(>9k_x7otn$<&%Vm`&&(-(8N^Jvf_W zggXCbHgYRh!Vc`+z1rs2aq4*G|y z|29m1d$0fF!6Z-vW07<9Rr+vHsFKmw7kEDVS8y{ZH@ozpjb@o_F*eXo$%~>BC>Sk>TQC_L0jQ zsQ*ioj~qFe>w9xmpZ=AR#ZPqyR>)%8{!0t%|9@EjdddI)!}`}+{eSgl&B%ZEMRT1< zXP?YNXWK!oLtu~P7OZNisH`tDSPnmtV+BwP1+fZ3#2X~|CHERFg4b5VJDZBinx6sR zBk8b-*XHKy3)(8no*%BGa`JHv`~qj$ ziLHsWJiwz8xq}zni9s{QNAmSWz&os%=6)(XY@h0JVY~&>kx2h5iV_#H0znm%nN@NH z5e8{ZtuqpoMV=2Oyieg~yzt7`JZU2REVh9<%9@frH7WMSS?dCNc^K3G4-(b&rDup! z95_K0(bazU@wxzZE+vt$8(apdx6<4Ph>Dq_PqB96cD}dynjCG=-c{$fYF9&<;k3xa zGxMUPIVsyYlTkpFRu;&eOkcW)(v(v?JuVpSH@MQ~s?Fs^La!{|$_#jZq*5!4XQqCc z23)KY0>o)a)xb=gf0vI8cR&Xo9^Q)|En7PkLrR8yS{UU6CCzGIW~;V!wiHTk(oN=T zWxFr>g&;4ym)k}14f__6>$`_P``SuEX6|fu`HHU7JKJP&(XxX6j~>=9-Jart8!wCY zf*U(}8F+gXfJXR`GYxkcY3+96@OB4$Yeqxwp~G83j%vA&o448)dPIum&<^g?3J~R0Kcof9>{5mq9814}KG>3Dya*S9j zi-x3S6{vS6J{M!7gHtV^2T{+Hs8>4k0{^?Y*FfLfDq-dHQ4zMy{-9aYu%m~OVQes) zLzm;~$u|wxQ=%J+X~}G8XluG+-vsEW7-X_;ZrLn#$HM|nz-+*A$BY>PL>%mQ+VG;J z;iM{*8nxm1a#b z0g>4o{m>I&W`L-<*+w_Y@0Zk9Ae}OX(8I*?eD#9?@Xzl0qw*8;({=iG={@}ehvoM* zC2jFS)fuRa-C>X6Y$DV<)ND6GR86)jaj9r`-B^J)r;H7A!UnX|N(2tb5Y=qW2B<=o zc+K0YMg#0Zx%IZ(*v*-WhI$a>nm26pwADKYw;IOw!YJLK2X!2k{tF!+4)};A! zqoD5<}ie4(CI2a~h_3XH-1EKJ=1$U!1SAEdWffSLvZq0@Co`0WE23 zFsgV4JhF_9!IW*@M%u!Po~Botad)i&ey1u7$@?)%3B0O+sk>O z!G01>kG zGZ-MQ$_J4SS5{Ly1zGAHBVaS*BTJJ$2Ue|r*brRGwjww_;WU zs^LP_IrAwEGmT~;=dM9K?&=4?l*m)9FnZwr83L-YAjl(ii!CwqNTu=DOuqYN^_CB+ zc>F49%gaj3H`yUZb6y1Bb!MRFSesOFU@NF~j2lUdUcQ?-*pwawpO}vL}nxp?ea0(`D`RPSB&HUopmBaUJ zlm-hNB7G)#z+3ku0h@w>(L}MP;6zCpq0pwXayHra?LM%T)jm#~GgNADI=G6rc(PzW01MfN^m~pO)d*Lh(8D;FYsbjVWzmg9S|4AOCELl{r?0d)zqqz}y*Q4cqCQ;Z)BDS4Z zKh=x_ev)r7lZvID01h$}$~eZQ|e1_VcvJV}bL!V{KhZY@1XdS~+EV8COyP#8$!GYlO$nbIx*OUMRc zi7GR+fD%NCwj0}QEy#F=t4tl)^~FKT9exq=)75J+OF~4k`ARA~2<&+n&}Vi}{|$sj zdD!W1kYCpKM}uop-UHu2Cu$VL{p#OS|3Alm=2|Xs&K+4b7%g zbSKwmTqjEjUp6ik74{`=uFAB3-8U0@Msi>jj+rKPQPc>mft_|*qBSLl{^?h{Tu1%H z8~dql%4@V=+XK*(cqr3b1V3B&b9Aw$?z=uScv*k}aSWk&dh0o~2WR8Bs%0P|GEe$T(WT7{J!a20MV~`QnmCYcnD>7Ch02iDD5jqR70xarvVp7?H#9e3T5E z;@J|BW_apxI*FEpn2=ZUJVF|;dp5|PH+7*kTmzWYnyP$`wC2Kf>^fLOnNpA#q?QnF zXXe@Mw4!6azjLnMawj(~VV<@ko+4?B#<1InJ{sf^iJ3@PT|4i0Tq<%TPtqstqM8JC zR|CHzE+vFmLC6HW2b}g|q0J2AyFtqq4Bezchjw%I++U$qeX~clyEC4@RBmNY@%^Ld z!n-XLea+3J(IO=jfg@7W8l<}|IYpHV0o2Ae?=GStc1Dc}LNo7uQs)yUTS2F6oyD7Q zF;)}qOkfQC%ryn#WJJy2+@esks3yx(;8`hQJa?)}W}LAeFsViGgp?1NnS@_j?%?#t zOxJ=1iqC=8qmbtZK5F&H_BEl75DlDI2d`xAtJr>qC<>Vqwl4^t`Bcu~gj7$AZ*Q(m zvLEYclpCn}udlzJ%vTh=6)jr9dj&(Z?e<|dlAOq^YhmMa0c~oDh9^I8IPU&ncJPf8Iuo~#KWn{(aX0tXP96wf9zHBAsI{T-UP524SOrZ zy^dmX>)bL<{osa1vSItSs$>N8n=*CAVY zJ|`Rkj;56C;G6+~FEH3j2Ymsfe&@`;!_{8WvNLCH3&VV-EOweN_ZSzLA6%x+*aI?% zsnyyUa2k_Mr_+3N%<|R_YUn3*Nn59x#uc&ock)#D*GBc zYrlq6g5>b}m48Xy29^8M@t!}BZS1C#^G!NG)wQyFI^O0C5-dEjmgb>|7wjaP-YlwF zbTBgf6^)w1sQ>od2z*J3INjyjGzPa{M`CD^6}}IB%zY*I7mF(Dmpk#86oR^Je+)n6 zte8&1oqfdq*=Qg@vE9Fmjt!8j@P3lZmadcikcR(3YVSr$1|n9#l=|jD<0+c6N00N% zHqXe<^teFQ8ORq5C?-#5)3{u0;gG%tyg?zPZUP<`1!(g23f{X7vmCeHC6&$lOyDSR zx|WI7g#kJ9e(b-SHTFG#3ZoG)|Im-HleL?0w44|XIGtS+c->v{@9dMQ2L6C52z}20 zhnd`xXKZ}C2x8rT8}sKW;v_gp-ap&b9pJ*ev%?nL;@_WoSE1ia0oFhU-eIssGm1v_8qA3r}ElK|79tLxgi zAi#b}zVR*Abd5hHK?%17o!-55n)s*N^nhzez-IAquVH@+>a?HRN7taBSaPG|548uQ z*fvaka;ef-U$M%>MGwo>bqAVv2Md`bnTx?1w44jjMJzApuk|X~NtB|S_vmnujGVBj zy+ghwzr#*T`vdvrF4NWgFXUT+H$uR33gE=OZkO~>0Rcgd%q%;{SC&qqUf(K><>%`G z^v!@X0zlt@8F~VAPERMr{P^tL`^Qzfj3Ho~_)1rQ0e+-t1ExdjNO-k+q{==32wcD4 zaz`7(K@8(L&6O^2%iBvlF7=RKV@@e|j-7KWP@=UIW&Un}7iH~ing!6Z^-1k3 zJiPRg^iFznac;clHsl0mj&|$bS*13w$_Kh3%2FeO=;n!0QyNQ z#!z`17m)?_a>S0lO=6QN0OZq7jY^A!`T+ym%eANproLoMuo4eG$%Ptqo`>0$Oqc4C z)WiUCc%C|y(z|mz_A*du0UtFezh89Gn!*Pid_<KmWjs_xiHrY$4Ib{&Z8 zMrTsoDzu!l8cDh+l~-1_^1lGiZVioj8sjm0<*Mx|O9jv++rhVE?S32LwR;Z;3$qZF z-`$RdM-o;akE&x4-zgQGdJQ_L@PmsSFnBRv*bp2O8M2>eta7VQgn6Ds40Fnp!})=F zn_>d6{~m#H;~2_`b#Sbpio7(&>ym; zA=bNHpKxBESvFo`C#p!y<-HmKYXYzaLVeAC{2c)&8bJ&Ya04zx(0a;AL%wStlifX% ztj{1fKbO1;#ilE4zTQ)}4V_b^0k{;519wM9VsC`OrEzfC`}Xk45ufvvwcVwl&iMM- zT^A0YYet-yy2%c2_ircfaa?Y|z)|91H#Qaf(+6lTR+?qE@EhK-^QUmS{ichAs$%Qo zaB2YY28d;>>&6TKQ4?UFLkWrCr$WhnsKjPAox7GA#~sXR_k+R~ot<`<+kVcxCJ)3j zja1AAlb!1qU-Iz8?XBnNc58X8;Vwpyv?Or?U>bw&5FhaJneSEg9+5P3QS5h47EJN? zTr0F(uF5GhMe|%)(N@II2b%-iRZUQ4SfA~EKyS>-@UR#w(|2967guV$I2mJZnz#$62jRF|(D9FyS?%f0zC;j|snH5|IhaW*~NOLa?L9QzN(p4}!K zX7CvNyFmXQ=Tkl}oQg5KZt>ONH4gX@Z16A zdVz9iXR-m4Bxp&;11lRLxhru%D!O?zD<+npCOR6c5rIaTxWhR5=|`8`ZjB zLXVNwi{Osd11y385&UmVIh9xb5de%Jf<}1Ns9Fy69J^ADYcHkDqyaEtZ_(lMu>z{x z5x7v>?Qh%054t&EF)WK29r6|*Z8`Q+_7qZrmG(P|2q4N;Th|v2aW|)N6`#Q^8(sKV zZST9bv!5^Dwa=UgOvFUoPPs0ZyX)C`vsI??tt<_in{<_Ryv1A&ZkFU>FIY7+t=f+B}99IJQ`c*XE9g5MW<}Fi1?*LrRwqo53@$FJ+Fv0N;oN*F? z8F0#8PhE9)W4c3Z)znZ#wEyuwds=#F5Z~VJ%|DCOuLNpHxFxCtLCeZKY0MH%WC~-& zNB1wqvR_0_EJL;g*bTq8!p0l+iy4jpe0j5(zkVPb!eN=GTvEMvkzVb{^qV?7fAtf< zyapW!WTC@bYl{eXD6IO8!~*xT_PR4n8vZ>Hle;%TMB80WOnpgT3^wB z(6a7W2Z21q0*{a@}@!#M1a}nIoZ<+_>3u^p8kzvd@H`tv&1L zgs#g!eS%f?83GZ`$0r^Y$aLJ4%RJLUiXewjVYh1WVF?N6=dvJCQdQH-Og@^aO8`xm zW*Qb&d-fgKnZ?L^uAmO=QQXc;+@r`|>etUnN|cKHF<9y*S`7P?MhaDPKgF}RU5Obz z=%;AB_2{PEM8eVdvE}?NIqbvJ4`9Y|VLJgr$)pBJ=LCh+ixTWf{l$APwY>3Zj!_Q6 z?95%gsnBmTu)0+#Tz2Bm7-?x&rNG)naq?5sXNL=~^%_2WlRAd%{$ zjT%270=Jam5>KZ4U$Mzf0vet%&E)>{%2quqmudtv5@0mV_@XOC5-eonmK%HHV?acQ zm%TLLfb^&w+d=eAFnY31LPX;DCoqLmmBEh!Pi#4ckj@e<_kj>9nS)?`62C>8Pu=L7 zS&cHY8EZI#Lw&5bS>k8vbehb9kboFL9@o>iCseOd*z6Lg*j~UzQidOGT_sR6q`Eug z)?OWAC}IA5<8AZgP(pXD11Cq-*rm%Z$yIiQSnumMYfdrqs(6G4(kpD=h`1aii$n50 zYQnu-Tmb9^*=Ht+X_)em07cy=!!zxp zS$$c?=fmzl)yxf3aR!cp;(CEBkR=`P+IF^*QmJ8pNvbUX&z9M<@{(M>2B#Uo^1I=@ zh9%0Q`00MYn3cTeK~sogUI`A{jp$h75^EvdMc2Z9z>>2<4G5*^z{UN@#oTSY4gMc2XH$FEGt=coSpCMPJ(s8#t6&;}5 zR3v;IxIa}ARtqw)Wd%N)NUMvk``AV50f%>K zbW~nURX3ENi#pskpZh61hpj;ET=HjZb3MXL=#a(e+)#q_z)?uB!qCNur5H}*ONkI3 zS3As_=;oO!Zh8EzO2~9UlfYz@_)rbCBzZVLImS}Kni&nlbCmQ#yaYw=6F(;C zf9&uMzK+AzNR<5(SG$I72C(C2i=nDJXUSY{L*?HH2LwnfS+GQ-Z~wKG`!#k6Vxx>6I#w9@~>y4h5sDhg0p3VGnY9 z3dOSROjoY&3Y0WWt70iPYts3q~&wq+nl5u1!7t3Et5#iC4v z*|z(C>aC)5YNItf)Uk=Vz%W)#%>+87t9HX_VL*>DTp>kT032r zZk<%&6*UNY6@d37=285ZioHNI4G7nU=1-RVkTUy5gaY0p+B}J`wpfs6?b#*MteJXt zMyG!2W9U!trVW583e2=xC^lNUby=sk05wBe!&@V~W&c?> zis6iX&q|iwZvDwt&o?itsp1r&o=9bhwlnMPnrGb|2W{(lzisoWD&0yK-oCO7x&o`e z#h%~z*obJuFzm(jq%^!HEE$2)O+a^BZJ-QG2$;=#nS_3VM&e?o*J$I7$k-R0b%`HX zi$T$V>MZ;9k~ue#aS(EDi~D6L{OUO}sH4ll^Sf$pz^iMZc8;g`%n{q?GzJB>tJtr0 zO3|F9G8^eMGlZEc;<1^9A3Y~9XE3Gt2Ma*)0`SmZ!D?Odt`#u433%O93N_9n<2mi? zO}K8PQkY)$>!rj%9PfyGrRVC*1Ex!sJR8tXi^_JbW{#TgY?J$wi;!)x7}(rkSQlxy zBBCCaQvxwZCC^M@&v_GF%lZ7C%zupMCPcF`Eg$UWrY8M@TqX*Q9sO7oFijf;=sa9Y zWD$#-3`myZSXDb(=aR2~#6!_;n1sCsi5`XXp<2K#Ry%p=eYmR&>Mu;Zq(0s%+2(){ zN~7Dg@u8YB`;H%@Qe{%r%cb9`us&P)_S3A=wMw1u<3{7G?%gbk!mqE;CHpi_}KWG<`Naq(UO*doZIlNq>zFNV~N$8zC$1`D{9@Stpux*ny*vnx0 zfpcM=TpSX>iB03gyCV#5{RA^Qa$^bz`Y(+AuPy?W>h#tV65u&B;%+2Y{jg&AP zCH<4%tdSLFcS5R2_|mhM1Q;maqmG&mDqz{KewvclR&_62+Z=<6>T0~-X;LoPTLFBB z^3l=?F&C)G7w{j^Jmkjj&>LsPesh>GHl!E+bP;uyNBr8NfJVA{vv89hN z<)=O}#_}iFf}@~?ih7j^9S|3^rd)0TWp;D6+V8|H-qy7Tyer(B8N&2|?Ezsb{*+dq z=C*sWlN=y4$`|dOKiTOy#hv^UaBUugwYJs8oFZV zT@*0wJ{+pKzHpX=R~3V3dB;&kOOQt-FKw5@#=0W*dQz8PTDo$!@P^uH6>fspSDY?* z$kz+)O?A4Dwl)4%UokufAGu$Olj&!aZ*PuZL)GP1Kd;eOM&5?y=_8wia85#-9u^J$ zL_U^HpIZJnT6xs$9OqKAFBbLL;-%NeYSeN`@A>1+DB;g<{6LC6(@?e0ux|riDF{qC zq<%V8ins~1;nZ4J7?g|8Y4pxVgWh_rDMO%%2Vi%b_2uQg4&D$*y_DD2} zfu_4jMM!7xbmt7gTCLyPogURDTeR&N&kDDS8Xn90K&PXDs@Ik9sa2q9fkd*~VGror zK5q4F_&6RCcP%ZEnH7q)cK-&G_KGkwL*bo~GU>e&!&1&5XmCl4`VqS>9>CX2LL?jQf?bvQg4SM7_JS(KH09`WwlVU!1F3q~`-hH<+2A7VHib_$g zr$4APO&9eDpq+!DfSdKwi!7hYX`oHdt#XLAKZ#4bKOQ%6+idojaWjUBbo|YY0Zumu z?bX|cBo^aB#B}N64-tgsfmwvM8aB&c&r`9lh6#DjcDfqEW^TaD>j#@_bl8jHSHeVo z^F2%0CM&C8f^4AA@PsVy;#w43r)p1fj<#1tVI7Ht!Ac!@#rVLhbkRlWZ5{OT zJ6X7?_UU8A&2amY^L*|X*7y%I`RVWr_A;lDY`OuI%IaXNz;qnN>I5Y&&8QXitNR&G zZpQG)X}MCB@ z6ii&Y*u7=Y2`6*UHUr5fALHB-2=OamWMcwfNsx_T4lNlmL5e+h(ngp>BR8`;B!_P- zf;TErgk=c!)o)1R>6o28l~QmFou(SjYW+aU*(2{Z$7A@%41U@X zbUChPsVlzdymB)T7LT=v*_FdSi9meNM=|FVu$k0e#)W`=-#Ziw#{y;aqS#Wp)@jBE zmEGNC~ykErbOh@xh9QhoDodX@4YpiwCq#l^GoxI zIME#tES2d$5W)Fjr5UXq-!gJAtgyQNOqnx%S+=6mxAN=Zx# zPprPhX!t9w#GH zd$kdlw6uMgG*qKUz5@R~o?o0`Xl`wBia zq85s~F}s`dLxeGrUD?4C>9pN&ftzozFw36g8*l5sh^L%?&g1gI6t)9Gq*(3#e)F)I zq69DpfDIP8mdmE0@c?y=;ip&N*olTG8cbbf`VkF}P4sAr;ACAqLiyK6OzJj$3UEl& z_MYFFvCU+ybk3gyS>`}>OCCCfi{r6dF;vOVL(hdgN?T#ij#2e85N!a=P~dPbH$Rob zHh+~WtMEy80?n{&Kvv3Ak}j1uq7Cs7t<(g{@XO7dw5-PIrNLW~l$Fgf*$R#_$l72A-o9K3AhIvHr2jY`aM_AkydSAEll1B!$4& zSvJqmv_Rijz!TR!VxYxu{$?dtO`Z`2+1Ck?-}h(b91g4LY>yF719I`^HQ z^0hZMu7u!x+rJE=3Rfr5jO9D(e|PfUr7miC2#*OYO`;!LO5|WaiT>kw_RphQ<_kD_ z?|HD(&TF)7MR@ckihOg_MzMR?H}$p@b&a9ghY|Y&5KP4^*`WpQIi4FCi*%Ztfi~-* z>g}q{SHhIjFG{4hXRE%vQm-zY1&6tzb)SwwlNLZz9JLbC%*j9qYj-1@uQ&0%w>wvZ ziAD9{QRFvy+)jlg-bG2Xr80ReeL9!WoOw}LVsLqcBQTs21NHNSrLqF_rW<|z6-I^r z%`Q#@gN|In!8`eS<#d0E9+)rno#%W+tvW*}b?+1Iz`oh+E-$-oru+l7Re;`se86s+H7CTZc*I8M&5+$MzTgLHaz> zJ{VazGi_sC&{{wD$j4PLaJ*eb|4_~57aFsvrwC)v=GE!M$#~A(csy!yuv76xAv%cx zp4tf1t)NBOAl_K(MQK0)rcKU)d82Ip$8?56927129y8@Q9n)o>aR`Kj8n(#)5Go4Z zElZCyyQ^b*&psFe?5H4cA}i2$g^t@fP8Vz9$(HH)Ju-|Z7#Biblic>P7{Y#GSid6~ zjZ_B=H7`A)s4XCS9|FgEu1-NgPnrt$mTWRF`Nt()DvIh>#i$PAj0Ps!Sp>C__?vb`GmDDWfi~YK1;0>A9@XzbK2hNeQ z5JvzgI-h$dsL6E$NXN>_&Kn7nWsB!4!DxqEw4|@AOW0UCQY?JTqV*p>O`!o2q4ShE zno9wagt76nZjQ1~r)SqfU|_$IE7^B)dwl$GY^EY7*mlTXdt}p&@3{#4Z%<-9Z(5VT zl&gme;zF+GjN!$kJEj!c~t3ixqB9_SU=C*?4>L`eUpIN3B1v?-xY-e z0L>QJiBsT}<^?%u(CApjXEOf0kJi{IsE5H~DPNJR51D`XEBDyIrWc%h2OusM6tl|S z0<*OKNR=si!7q>Ni$FY!+C@%>S;kLx+*ekt5vhcS?+rJkSiSLQL(jc$uI~rhkCx;6 z%q6FZhpcL6x=5D2&%8XiF4oMDy;#j>J=vak8aAG{+h7RB=S+IPNAo~^oeX@B5Jbrv zr?3-E>pIKH^@!IVbYu(_^u7A~`X5%A#hs!}B*IBQY4>nVk1VbO=3J*DJSGX3tVs&Z z?jR0j9vaenWUZX`9@Xak_l;>52b9LTTYU;ZPvPUhOPOvxg+2-|DK|^GNI^S;rL>`5Q&t6%DK0EDy zcX^S9ZTr2}aX&SbZa&=T<1;XG3k#{X+4&md9K6J&sX5tNnCi8xnKvT)R_8mR<}&fF zD^%tq*uwD$21&#bvQI4J$_j$6a@K8%jog?o!wclr1=hlXEqeX+I{mjZZW|;$B1q)$%ppWU2kO66mT*JF)qT zd&Sm%L>Nc4W7!qD);*#(kv~B-NEa?>Fg%n)S$>RBtnw+Ei_h)W*Qs(upXy;+I|^oJ z01A;c0kctmzJ7~%vJR@C z3m6w1?$`rR)Hf3psZQKG4M0^XI;=xqa2#JH1?<10P#esr$BrgyFOY1c-GD`()UWun zcchJBUm%?(7l&cRO8-(SnMCT4&cbBxkFqbyPX$ZSmpzpHPPV7|pw8*}gb0q$TWwLF zoB@(cFGdT8>K?17`aqj%4HK@+pS(npkVx?O+5KdUjXTCwh7iy{(Dcl}G9}^WK(kgM zaB6+`L7#!=`)g-wrY>gCRD-@NS2`-Vnn+e@p2@Ty1+`;2VYOl3A$@8 zr>6$sUo#{V)P1Qq1v#|cPpZ@yiZMCuP3QrKxdfe{hJgw$*l^ZGD^HF+klD-@`}M&b ztvanV7)mj(DQ;(0LgwoX-MhMy38;VK8b720dql>AVZj*p)XkIu4Hi@qat0$ke=u(b z0h^cd_ry_Ngs&fS-rcdI)J$s^tvt0l@f5_Iaq3;Ls~0MXS@pLjy8-J zpv)62;Onn8bzef1OhSwH1~f^n!+@Df1o}O4*tQOoKmxLTc$LM(Bohgm?kosbw14xd z#kTVT&P(+jHQgqS8Mx0c;W>Q(kvBDlTq$LP6#orAFRN?l301= z5Mzb?+^tBZj%mlW3&_;RM*vXp_+VPUYr$tl_#{@{LMd&R$JL>4-;Z`QWy@DrPXPJ5 z$y714%tx^*e@{qwQ{ooiY^QNdXyzCVHErhPuMwNT5KYQ7hUZ z(C7$QOMY3})*V<|u3{p3koy^AJjige3p{7@v=4FUXwrs?^p<`6beGZ`bm@B;k!D_Z zIx5%M7{;6w(6`K!rn*{qEAoy41@pjTQPzUk=SEseqVI(_z7SMd%U<}KL<3+b6fUs_ zmgxu_fpS#&;HA<{?B>MBUmCilJ&$U{*fC)^yh|S+uAC7dFsFGE+I&h!)hhk&ysg^$ zaT+vD7pBMqyVLxLH&hSTtfYX0ITZt#G zs7$7|Lfx2d9(&W_xac8ryf-G30=#MF(KRu93$A&q01u5^8z*VdT1z+o(PWaBjjr; z#fL<3)hQW|?=+*&NDkf#2MF@Mqc!r7mD9VE^p$An8j1z`5s|5RMRXnAIV>i5%~7IVZD6E z32D;A>!`9nPA4>T^I>Oh6*ML8;1K(R)H)G}6^WJ)S0Itw?>40E70W)ZTG?EdJF2Yb zewF*BZG^JMW^pB#xa%$+!MRNXB|0m`V0S&`mPrK6re}Aaq;QHrl;cN!oln35<9Kk; zm6ppag;P-f%I=MNrJ1=@?;KY~H&iQ_mwptRIv9l5zLW!vv6L4wVAtUHfT%FDFp#5Z zCthh|evo~2S1GSD9>G};s~eEUhkASCrdV}sv|D!gxdSIigOGveB+7zFP>-nOFl8Q@ z#FLwQAg&9Rn)F4BO*Xr(zMo~?f;S-Bs?2=u6rT9A=x48I%qROxQ`YP1jYB!bc=Q&& z_t6gAcq4GmD($zio3*>`&poOy&-UY17ID~LF+3!aQab#XY;&>>$ake zX)Ysg4W%cRfYM5xJo0pIKY=|b%pdmEl)NFXTKR9C_$8i;CEN-b9qe@uKK7Pr!Bp0# zYOj&<=ps9OndhV5+{j3M-R4JE&5r)_=N<{ULs)-D+#OErQJBZMls>12(<>r9^%n1y zaBD3qe;Kr<#@kSJe-cbA*74@mq7aKJsC5iJU7H7m>FCtAyScoeD$HvK;G*hqDA-KN z)LUM^G&W$HPQiTKFaFi5FlZGBu)dv9O$@PhFXsq44D0WViBGkwpK85`Fq@0wJLJQds)alJ9Vr~pjK&qYSh_ydvQ?H-^2g5+0!)7AsO4@4Z|Sm zD-M`CP<0d@wmDPD(rYAMd$86YN}TRuA96RwN+x`QHqA z1499$5kiNtBU1JKYev1K3LXLZ@Ea84r$xTt07${}SO|u>fp5~N3ePn2!a=+Dh$~IK zx{mnh+C*LF7PHrFRC2Y_3}T%vff9!~R40o7nvT}P$#r9#n_5l@APrGU!@h2GCrV(g zwEPhar6b&d^~)g)nHnc>wu@#uJ7kHT6y$w*JlX8DS6#}{_<{q(#A4FNVq=$}N=Hv^ zt~bg~Yu0WDBp5-|Mt0}3qGgec=9)a@EQ3bxt}7-jh&|d&OwrAqvA?~EP~L1j*m5Im z0WB|c*v+RgU6|z6mea4B2a_3&1_(|%P=z$a^_;q2p(;N|>#c*8PA}1JP?1jMSKeP} z5MsbrdEN&3)hQXB71VO6l(>MNLW=-set&B(cJ%*%FMW|UXA)ND$XobHpL439q!&H`P!a&3=nkp}7j z&H@~+y)YUd;7y&3zDWxdz_}WXWArGu)&=C~oR~XoypJry+ET zn$-^WB1Li`Tc^bX2=)cyI_BmH)RCa*9OKv~K$SlLErA$spgoZSo$}CK&jz#6yNB>| zPzM(HHU1*U4=3leQW(X0Bv^`TNF8f1rduZQ`v6uN>a_*Z4f%M^2*PdhvY3`or`^?q z9?73tAS8ZD)XS2_?#w^saeQ4Xsj`@U+5w46iN_dUt8$i) zN^rZo3N`49vOOX=-u#hOaC?WVMjCS2r8Pic^P_l3u^?% z!S`!WRO_m>%izIvHS&G@WSry)#J9;^zzR3kwt?wmcn3Km%tw*Gh6YO9a}z$erm2 zi39OPl3fuV5rHL|a6gDF1m+7w$GN~xDltxm*Ck)}l6K5#$6^2atW4Hr&LS=!x}6;$ zw(+cNIk!Fs5!kn%8K4oX6;JhBQ?E@pEnR!w%_%}d>i4^{6P$Ls}Mr+4c>k#Pq=wg%J1+6r1OQI5uLew5|xb4j?jt|w50 zA-ZEKP}Y*krACU2<j4dkLw~^<8KN(1?qd|!;pH**#M@9_12;jZx#VisK9Iyg z9AkF$RneXfCxb6>-xdDaO#=ImG&v26*gVt5!6)33W zBdD3|jecv`o8iM2`o_RM)5H*O4y;1a;UV}M;Dq3W1Hu}ko%T&lLEe_Z$!eC03PQjA z77DI@DVY`D<&ux?2UZOa&9SktP&#Eg`nBgvo~Ko6)Hqi*pduF&2>S!np?~1zaEABz zZ_+Ae=jF<#{0s+sxum|-+ZS`YKzFwx0Dqa@-7y&yc^Ym_s#?r0w$YDTOWDv(9W9LV zsnTPUPI(v34l1Lp^V9X_p#jd}k*@w2X7y>sjP{f*0P9;BTPu4^=g#ubF+x*>%!(YhksnJwOMt6^W z6L!=gs5FI?yqq0hj@gWoVk&zhCFQP}6+V23UGfyy2-0Z>9MWG?bBCSYVjK2+#uKZdSm?c@>&9&>y#F`W>qfc|Bb^xb!i;S|+}r zXRYf|6Y3;m(?PX8om~FBF4DTfhHP2NAM1kPM^H)A0JS)4re+t6UQ*)1mbUsSAOR2T zC_P>di0LGcm{6FSXh$bN(X+Ge|GGn$Wic82eVcKCRFwwll&H!5TxBg82aAym4rQ0P zYC1h%nYbGJd4r0P6lklH&OO`Z>OZy8$ z1JEZq-=Bc}L9sw83=iKIOuk`hdizSEI0IDpBqha0K& zey4gGrQFEY%mXnvwfcc*>Y9*AGPTT~aM(cUif)>kpo+<$_9JelNT&(z?F=;YEesf6 zsq@XL#p<9DvE{E3EWKcnFePR<;gwNS*FbehzJ5`FI%DlzWD;ZGG^I4D)nAgN457x~E1+iU+TR|q%Kx_^3nm$6a;yLw}jp{1y%e7#ws znnI49=$_sR^wWxzfWhpgjA9y)#fD+&LXEz&CS@#&Hv{>Qt$I>)u;3%yu`Vn6S05qx z-5Y^G;HJRw`E6-G5?4AXt(-XEo&NnuPC@#ZF=_y%o%_>tiS1CT9Pyt0eD&WtyoNP* zCtw)iFt<_g2#u2Q)T_uu;SZUB|E*K{5lqqnpYvPQZ#-fR&Q_hYImze_>il0IQ3( zoha%d{{2tvd%Fx8j$O->x;NN;@+5JEE^JBF^b%6`js=M_#OONM|MENh`-A_---Tn* z<2fy>!S1x#u|h)}99Oud=Gy!JAvz#Yff?0EbjXJ=6%A7iMyHrgfkSv`4V9S;G5yh+wrMsvHbDu9%x; zpSNQFq^$U(+~S{pRET*5YoF)7HDitbfBrWA>A#c%gUM}GpX~qa{rrz-C!CKC?n(KG zU{2=Gh3tRzEdKHPvZOBzj~u@=#h*U8f4ay&-oNj zWGgf{^xs@0m~#H~SKeD+m^!5CyR8KSo;TKVkXuS6;dVi88qMxO|t-2ULIQ z(H&C=`z2EoQT=ao%>Q`T|MZXF>AD1NrAOBq4F*iAYrMptU+cfQ_{e;4?NXC`JBH{aIL^B}2=@E1q?BgES0y&`_e_;}ra?c4dz1|4?Shh+!<<|F;nbOrvG zV({rYh<8B#;;^Fv!;WrApgQHR4Lj&MRcsdjFRtz1+)-pdbl7b)^i=<~M^_6Sc396a z{<&5)RdUTIf!8%o!sSop)77AfH=)NK5h-gmsFU?rXV4dPo<&ge2 z)9SxiMkI`(YwWKD!@0jUV`)Lx*su@(*d_j(xBUP5Hu9vQYizT}d%nN)=zQtHfYBOd z@%u|(EJ+M>jqP`du==CO``A*uLJ^E%z6BW_WW-L1WxzGS7W(ZeoZg1IyO>KO%+xXv28`wK>c)}N|{*sCux z2E}oLt@z>^qzKa_enaJfz)f~^8zm=rO(EatdQy4^c%WElZ)Q4xPLl} z#mE{|0rnLr3S{j-*pGyts=QqvB~Ca@wT96gP5HAiSr6xQ2- zWxzZdaT&z1GzvyZ$&y$%bh`0}J?-S#T92s8Ej>j6nz=>qTSB{)Ae^u$$`TZ*3Sv*t zG0g;xSh|~);DXR>nWVqnwE-2&`hoLf*QI;f4dWog3cI(5_nlphh1N=YkoaPw z8yVL`>{^n|OQ|nAOVw(Z9c1ERA?(B44<5vwM-3)hI+m_TH~5JspuSD{(&phMf4~TN z$oi;HU*RLaJMbgzFz9YCDX%6oYE~nJq>V0yGs{MMOK(8;||JYBsa7AE`$8X&M?U)(2XF&bO`tA(|k+)pBQ73FjOq1)`W zhTu_r>Tc`?*qbyKspBKc&$m{Uyq*k^-JlD>kfhA)un;Y?wsRK8vZ^5>;KQ~THC0%;d_eV)>O z$-f(3`6c_Plx-%1(8+kfeL`me&=J8YgG^obg3~*!w_JW-`S4eArg5`Z8hOp*t34 zWdu--5?pt(CRbxrr$VS?@CGgf3CdUy8UW#x^i~_0;1npxltAbOR*zuZ8scNIYV*_Y z=&Z)EwM%!OTedSZqk1TM3VbO?KNB{nnFApX4)Dv$_wc!RQ{%mqt{J=A|46{2rR6jk zqcWvjpo0#~Z8e~kI+I2(1YWvfkp`!D5R!|8lr;5q9@QqlE7NqLn~b;V5W3Kvt=1j< z$tI9p(vOYf!!=8E6_k}0&gUy@#%#3wW~79By1f^ZVh%n8K*xfqqo~JykM!pyJJ}aP zrXCiqOalKk@4Y&cImG@Q%PLh?{VGf9hzdhHw}%&X*t_A8z{HeKUJ6W6!m??oU*;-C zP#h|=VUQUY>d!HOTE}nru(n9GpdV=Kn%aT3OvkULha3D%kgu(xMTfYa z_|mGlFgg7MP;aBAn61l-JDS0pOhJeUpm;>j36J`Ku0kPJZPQ$*(j`xV^MNY=Nr>j6 z+m^PIEJ#J?FTxd9wxu%q5L5~Tfx(lG!WvJ!cJpNEK4Y)9|LAl~7uP90b|OOtv{A3= z{X2g-sBmnr>1G!XxTC`jjuyS9vaX-^>WjbxUg~<5wegOa!{6r?Yx5Z=N-zP}4w?(6 z`LDn#wC~ObiJ2fm8M_#m1b1Gq@fmIZXhW>Xgp(T5u6I;zm(ivUw7Zh|@%RYx1pWdQ z18JAT0on_uExnZ&3=oZ-m4s?0Ov;b$f$mOMa9}~%o0;|d5sV^-;X8iCvtP6On3Rzg zFm*tV@Om(f7(FeH*(ZrIS62S+{mVbUA~A8j*8Wch%lV()7AR%AJ|4YpUJLK~Mn@CM zW{GU;Oq%@pa!#q{^x(vMU|`o%l01?f>tkLa_s8=Oi$a$EKbVvZ8DFKr;}p{G3KXMO zY)pX-WDXZ5z+ryca%x}g#8TBFihTp5?6aKC`)2e9z^XKMFrmU~v*>C03wvsvN+vO~ zO=b=Rwi!2X6*@pYY!9btQL8g$LY<)1-Z6G_wv#tcuAZ>*S(^lYfT;9X9}eNT@(Oa2 zM-ds|6rUv(M;~Vs!LJ|%ZB}k(1s#C0?S}yP1Yy0i?J{WdEuRV~b>RiA=fzI$PQ#TR zcYF{aZ;AMg;h`^Sy9r9^3`E|wu@;E1_9shdjKAJeg0_f+*UirFugFp|kF0yoJ6XMh z>6Po{sPh;*&D~Bnt5*H5yn>Tvz?Zp5hrtJm>eXU$P;o{pV6>%+(rU-<+Ott~zj!RAhDO=INCV_xn&n0bFF5IpNKuRD8XR|M z!f;x|7O2li;ZTVT&gb^A?+xBpmyAcrkiciC0s+f+QPG?kO$oES4wNut{1UKL`1l+#>0n+CM6PBTY6;65pu@9d*Nt+n`kuptJm zQw;yTJ4tWgjDajqv&ineKSRq%u7VBrDpn=Cf!ruzTZoUtaS}Kn3Nj>@7js9sMCa$PHBI> zPI1V?V1BcHulS3hUYB9<&74h52zWc{TRAyg=t0P7HiSJ50Vm;r#G<-JO+&At0-6}1 z^@pGETfQP2wxHo?+4|Kx&-W-~StH>8{+LsABFLPJzh+(1bWwlj@d|{MxvYXX>M@7@ zE(?Y#gDxby1!^xV5y7~GQJn>A;B<&VG_y-~dzNz8r0}(=(ieR&;UurJBnXVCq|cWg zjMiIoYxl8nlh;wABsoPr2`tAyjNy&5-DG5M(Yo5~wcf3ASD=Kflf({uZcBdGAvQb2 zwS;+YYyrN`R}31CTI<=!qB$U=?_w3~FID6(`~V!y2c6SmVrsT)y>CriLnG1YBcWsQ z?IrQ|Rh9%kkUU%}DR2wnWrNyU2D0`*uR?q>pL62+Mi)$;j!F0L0m0!_sGC117HY9@ z&_*JEFmz_5!~!zt%T-|G#sr_ZF6Cs=ZHVjHD4gtUVXfGG^K{YN5BvLCgRZf7xRArf zQ|n3Dkmc^rgvM1oU-(KNWx;;Pg;X6j60IN)`9OdgHz}on6L}HH zfq=&F8J~-HRXDewV5`Lly;o#@wq!~8@m>c4>;z22iTu-%+EsI?pRljyB9* zzSpr&u6&(zOXZ3K2eXICsU^we&-v3P`jVPZ5P&>nIIAX z7eZgsD4GImHxHF+VfPA)T@JFpGG2OcI%$1!8UE5rqb^ojB#u5Y9=(^H>ODVgIP^f+ z#pOHsUquMvm!v;(aMYr?~A5jPQYtW|W&F;d#2JG^F@iH$4*6)gLasubyYqwIvWF=GW z)Wkp-J^jEq2AkpFe)#~?Isb?anV1#O3(6a?ddXIp$cO{vz|N~_#%>MpoJ;s+YK{FR zP#jtT*3)hPomDV*YXbz!W&%UIKEpwbArWpWfqg%0Z`p_R)PX?@o5Jb7QwiY0F>9L% zQ>h7c8r!2}=-Y|xPo(tH7D1jxS1vY5-L_JI8ei;g{1vbD$ETM~lS*vf2+bPwN2f6M zC16;DqW3<&0U-cgYY|+kcQ$n!COf;8HBQWWh_DC3rQutJ()KwHTh_5%UrkeSX^Me> zkABu1h7fyQPZC8R?6~{;lb!tM`{?`#I^E-G#C*H()bd$7fDL4W!oY0x=f~+=G&1+P zczg6-^8&&KXD?>eO3YZ8P6K%-hS5|o>xwMjDar{bC%fo~RIzne`ex4M)Wiq&;Ew*{e~?~}JYUBt9jpS&^cQAb!E=}ox4zDRBBfju;;#pTZLvSnm+>k% zOSyYVnLm|05H#r3r_`BzPEULOg9XsA&t^BJe(k9nJp7_W7=`TTP*RJs6)n&aX|$Qz zG!)azYc{R}h8LH;`0`PvT3Psdg|dXq=Bq~^eQ|o9lYgjFDM>^2e0>L$+sNm;zDI4! z1u6xvYQF?x$*?DT+7E|Z!lwbJrWw4>#}oCg(?~9xedcBp{LI;iX@7z%J!K-n%QAi( z+gqd)BnIphmqoW-57acQLNaJVXFaCphen<9bFWWZr$o=#OBykI33D(yk38~`*GV$- z)lQf;GSOq5ft;n_kvMv<*VW3KOkf71vpL^Y?!u7c_a-PToBsBO`D9A-C^sGmM*FaG zf<8@j?o$D=;!3-?0g!f01<59I=oA|~({1$Y1RI@*72M=j`E-Qe86lrD4ZSnyP`?|AHm@5? zUGgRvIFd)>KRDpFT~06WMdTVWO?At@@#?yFta$wbb42W1|6n?9wx^b<5hugO|LDcN z4rCaba^QZ%R+P_Mg*ws={E9If7tm7{{o%KRk)F9NdkfV{yX~4)uSfTSYOxFo-$Qo4 ziPJ~E61oHDzXER-?1c>G+?*=og3%Oh37~A_4B1-uLy-_XdYycp9bjw1`vT99xrdySrSD z9^@NOx-|$HfvqD48@B$_Y|@%UTz^-r4wUTJbx;#`*^jP>w^&l|xI?ui)2y^*_^_6p z^gzF)eVTubY9)de%BcAgM>$`NdMp5*@R+Tm*6sw5P6S^XcE9%%558D_LlW!~4}6T8 z)1TOi8xA$hVUG=X;Jnq`w>3-|zXow*Iuu;+NP0BM_J{~Do7n&^ZdOpkGZoGk%mB3; zpLhyD{ApMb9)TWMmI$kLgM$~{?kW5ckf>1K=;Z-xWhSgDDIi~gT#Ciui_P7A{jveW zL%nBLzamnA>r~fQ{%ou$e~!~w^@|ozfV0pll^OJY!Yh2spxfLjx=`=LSHR8z+|isN zR+M;rds9u6z{>v3pwvDc3|^Id+Q?Tum#uzJ+AG&HsI^B&+tc|Na*ToisG`ol`xV5{ z2ls`bXJDmw%~<`*TUiR6Sl}BB_=V;xoxnDv^HCU&YHUHr!WO3RlknFv&+D+KTWf;t zH*zs_F&$b#J0AfuU=0>|lQw!-txVt9_KWjgQBAL`HL`IEY|n-7XRgD0U{WuVRA83_ z^%gq)jg7cCmTws4zX78`%0-WzK($&}_<3I~$~HgMTjAJ4W&WkNe03BOdAA+75P*ms6}nd+=l zM;#2Vc|SW?4r87TTNmL0LoZ@$_#TeMVyQWWlEMz7x9ili`MBMH)nw4MLSw|svxY}~ zr&H4POSfJ(@Z}cb=v0&fDLh3NT9vktsmWscg-jVlFbAAOPL(-2y-JxNiU$|(fP!qd zE~Iko*%NFagdb>yDL!AO($b`Gs;CTfU%PLdnO@C_+bIQ*_$3&gGv*&7!rQYINm>%`RB$k=gSelPu|jS$14}4;@`!S{*bxeNP% zKLMV0$p3eszQ75>0+6_8t$!=&nmo&M8am5&!X}Ihrv^XWavNoeN zzIrTCe6eai5&p}0b7A6;?Ql!W7+xYxwM*}_(Hr~r^_T1#2k>3&@?JAfpACOUXpFcx z;A?e`wTc-AgfxlD3ac;HR) zRDSlbhfCU2xj|VcMR` zH#lBtdHHp+7%G6`R5ebaajB&A;$JEQ+wWPOii3G-&^KPDy7HH>zTa$uai;+9EZYa5MZp7_OZA@e%iMLmgnunc z_S$`*8eo5robAufonE(xBSWNZiQ_|{Ud-#v`96EDE#=^QzBgN+wyvK7NKdGE2#?@z z{%`5hH5i&8cpabXZK&nomDoWKS)^vm1EVqAQh|)C-Qo0+c@@$yEiqLXP^*v9(!L@P z^FWiDc?CQs9&RE8dq)!(>QJPaM?TVea%vX1eZUT{3gnf2lBD*7#dMgTa(mQCU-$%Y z$EJc2=codN4?&WKQh+Ji2R zeaeazNcG}tP>h9|dcJPUmmlXAmLGO@p2hg$A0UYS*m!C7>DO~8UuD-JqnOxdo)Ny@ z!vt-gL4&cal=#0X)E@==_&9)ls{UktgiBq(8jV%3fz-mISVBuaD1Kh+!Mu#!>e$;;f|MC9}9ET(bfNF2@Qr$g>L=1CVT zj#C9(4)<9)9@oDelz&FVuSD2xT)cFRI=E*(inqvkEEt3aU`_8)H;Gx`f`dLBX9s~* zK1W=y_o(^>{A(DH7XwlM%8f$CrAbk1sSQ*+lM+9<`2QrSgR5&hnI@JAW&lLS*M zs@=4Hr-Y!NZV}gH8i)1k?TBeUw017JEFF~38)XFK@|^(`L`C?TMx_~*wv@7Sm0l)8k!CkeLrc{PbZ+#jTtxIJVT&F(U z>F67Ln(4TB;a22FVl=$1H{xAkSZ5hOEm(s;iv{CQeObZ7nX&=rS&VBK-br+cDvL=i zrD6i>DLgVdFJ}2+a5ABtvyXN+U#PZfsN8%xZr>1YY5-`exPk8V53bem%Y`5fMOn<< z{gv@Gvs`Lp|Ih?5{)K`gt>zAMW2vzEVFlT`EO3S{B#Snplxu{HwqGW zGy~8}a}8@=yY|!aFivaU;FWf5Jxb$(gdl<0KU?KgWE=3B@v-F8NJSo5eoet81%h7? zI+3s3V}X|{=gEadrzBB7|C21OM?DtyxmHeet_m`fBPlAsZHs&NO8NWoY?)o$Hv+r> zBmy8@d%ptacaM~Aw|IMb_l+5~>!m^c`n7mZef zy>EMK+dwa=rqp%$5w}#seZDr7uHO@t^>q6_{YI;lC0wwux)x4ZZd9JetQUnwto98v+10Bx@nQNC} z_VdyHb{28sV|TFB2|x7F0Sx(7pjTH^nHUsjwT9tYpcZa8xl2C^Zr%lA`-fZB^Fw~;eeS&m#HFOGhl$7cR-P(OSH~TwMa_z! ze9;T*t?U$4b#5>9-gp(|pK8j&>5!Wj*z0fK4s5v>!+KQ%7isNO7VJwj1(A0f*rm2G zN15^A?K526bIi@P@wx9Z{M~)Z)3|IF9ey(<_K`N-efRh*pofBbg#02+Sa;Gow^gTl zY0YFvBfm^#kT`)TZGX$K5&H;Dc5SUU+~tr?Ilo|>1$!=AhTh{jqEL5o#s#`9=n71( zzQ}niQ*a9W_{zo3DZdizHrti7mH=4%{m>^^z-U@*%rjC`AL?rS{Qxfy&J=F~T`mE} zj%MjuQj!en+_YhaYjCAUUfT@vnA7$w`?u*(l2jZuksbr)K#6dLl@C})K?n|8A3TUo z#}`Rb2hx8QF!}$MScZ@;s~#I8Z78(;h+Eq8H?eG`GiN-~m2w+hykLvNMoT$rQqW66 zB2|Rg`_>IVvPdL5LzaBPsMzRBbIYtnxK;Dfs4p%7Op2VQT=e$Y!hl-#!M;mO(klF_bu2-^)^9 zDHY3rZtD^hsuLp8e7%X!IUCw$t%eKGCqzysD=L`LZ=h~w4crp^aGcac z&J@*Od;Awx95CO3vR05U(Xngs_!`qE4%6NU}+qSbgpuVj#O3y zqC899v}!$^s4e6Ag$fdt2-rYnFuW&u z+%6r0_s7jA`#0zv>OAEwayP8+E%qL>lgF?x3z(LOVwdYfsJl^PL zIglB2tv>0NTs_L4#_jVXopcpg!{o(hsT0sb`wpPh!nAY1k_cJt3NN@5xxc+yTU(X# zbZINc6I0?{P8EgZQx7D=Jpx!ZRyQy)lE!@)E!1Og!2vv$42ba!Zz-qli-qQ#p*xX- zB>zwef5=2=oST(7`Utg3Z^9G7c!sWB9s%E=Htaj=cz~AU4Rt5!@2q~jmsCvW~^TWuziv+m=wT}$UYksu+76yI9 zJ-S)UUMnCTs2lo`_U!ZyI12l^V4a3?1NTj{z}_SUosy|)W*H`>^Y8+h3)cS1y9pHb z_tgsxpBnKYeg&@RI2{cw4XuV=-kxT~#w$vg0@s7K{SixK>kIy?Lx>@v9xP%sj0l8Z z$TlaxC9P5ocjFroq)AQe_u-!3+Y(r|O}g#L0#zfmru~|O(K{m8$t^Zjzkm)XKjG%* zw&_zsB;>Y?%LEk^WO4rofv}*hdMBGuou(U_3AK-ayfgQ7M#k=Ws$C<__J7WImiT$U zL!E_L;8tGvj~^jx9rFX)3mu`CSIE1QRBO1?S-YE$u%laSd*Q~qKJeQPr2ajkbnNj* zVIei3UNIb*`cO;x-Rb6`!bgrTRKDQe(j3Mq{z!@Y_0_KdnYY59yzT{py>gC0M?zxY zOOsNRqVB{FLH9qqhA2{fX(N+fT16!bx%J}_2RhNI<=G>-@ArV#i{%dR_>^G_@Wtr+ z$cYj;JpQJrs}@%QfYG#J$U|w`z=vwk8@c?{RX8N*MCAp0>1CJGy{ilKC^g~#yym7X zvPClb2mdSgn{Q)pJ-;L={l z4?%8a&+Y#UE-g1BS^>OF`J~Su5-vw~QeHD#OmuO7l82l$#vkgfkZywhb3wsiX_r5( z>ygN|ImZ6AxeeNg@w7h2?;f&Utx{P(Bsaw{5^W18 zPo#*y!lJMJPV0xo)FU+IkL=2vKnNepx+l|tWT+j z)eRVNB-B=-{$Yb?z4{xGJ5e-KBEQTa64}DTQjRmZqlU4i@6WcMuZGbx)wr$%!EBJM zkEAV~1i>)N#XLAAAzwmcn97HHK4UxRElJLQxB~7g$pKSTJ=Lb#!T9I9$N31?sLTw& z;H~=4G-+%GGy`;+?ME)3ZnAm~h^&8}#Tc91LW^R>jlYlzZ~;V`8fOUcHdWwOZL@JG z)KD?k+*@|APrMMR<9nh2gHEGTVl`1grp=|=e=wxqS}DV=4gv!tx2$WDKPuNID8?>u zCGa*KXEZ_y(nlX7C*k>h$X`E2E};4wMw)=Zbh+Xy$O15?Wh?yhn<)O$do&Xkm*J2* z1;a**!yj^5C)@;_WF&gHy+ng?x=)?smvFPQ2>Iv0CXpqeiu5a&ald2T?)y}_pMPUW zFD!1Re?E7~A18VJ&S_>q@+Gpp$U`%LK##PGZ9LeYtWw5q$n!rzo5iBF&){tHAwyW- z9})bV0#|x)Exi&XFtjssCo{E^EiBtpXSFc@Hvay4cd|gzGPg&3+B2I#RiF9&%cOgP zoR_X$JN1#&QF?Rx#GT_?Y;)E%`Pp+wqt(uK24RR!?mt@Im?Yi(ng^7i{RkUH89J4H zvbs{LwOw(=GuZG}IsTK{W{BGeDU7EiVNb{Seh*|exC$>(l8*|NI?e;ooI#JEhr>Le9C#w2(dqr7<*FOi| zo|W-jJ}!9-b(fmPZ2FfH3DoCb%baQp==dcoZU6{OLU``{=F=)Oj8hV5M;ZzN`x{UD ztWk`dd)#m(7p+?x_(usR`K(%w{igQ0{DQKOD!^?Geu=fCvpH^saF`cYeyHY|KU8J$=5Gxh_K_!@x8Yjaj#Tf z4!t-4k)kvYfVzB3`VTRMBQ?#;9DeOVYDcIJyUFDSZocFr$!1ccj4 z7WoIxQA4D&MN7kdk3@42tz%v+x0pc?(c)j)5V5r!T|^ZpnS^-^A?l8I#ZDiE@|3Pq zL?Yju7S9nLsrcli_QF65r?in6W956c8=EIXT+{hI7*E>OzFepM{`$1S+anWcouo45 zrByjM2$9kjY6j2jIvk6BGZ6=Sy^@oYJ(@PMLx4u>%9EYvJ@JzR*?`f}v#pakuw@=7 zeQ;HyWF;_TwE{YzuMIc|D$kXt}7-mMw7#Pz99KHB2lv)e2^h zF6p=J>1Yy{zJami^KhM0MFQ_-^fbO9sE5cyt1iE-siuJ&@djE&seO6YLE0Et;2H?3 zHFS^BFAiE(M2^MkfVHYukfDEgFrCpjQ+D2F-o-=M=U4MBkn|J>A9rPMtK;Ak@!YefI1qU<|EmE>Yx3Ag%+$!_eMp@D#$Z4w6gFDdEQBERh`1xa?NB2hbjOzc156dQHWW`e<}9 zFDEr$PFa&-GgrHz*xghtaIWFqjO1gYbnnIvJ$~Z0A@n993}K@G&Jmvx4uFfuzsN{& z*fMBlS^v_L*Ua2}B|2=#pBJUK^VC5L0jOH|Jg-VUtyMH)XMT9shN~|QUckpDZV%Wy-Aw>mx8!Mk z$-j8I&@ZFp*$!)e0Z-nGELj~qI8x($#VZ!- ziB#9j&`Lqf!>P{3*Nq%A3pFGQl_bHEB!oa=sps@Te+Xt{?ukm(t9{?)&FUWhp5z}d zyOgqptqS>U6{Kc&S_O9y>(rWAz{G3xFbM= zS)P#t`tkM0Hze?n>1S3Q%3Y2G<@oz(BH49c{xc>V7&etv##2&~l(CII0>5}FiQo|p zb+51=0pw-deREQa0Cj%2^67P8K_rNx>Qx>#EiiLHyA%V;1#T$M3kkvc`QYPsSo*i7 zNb8m79GbBq!C*^+`Bj{Mv{&z9>yw!!PWw-E%C{k)d`xrct0F4Q2bGl&&=AoS6{|!A z^e{5U%@%1?p~cV#Gr;aO`#TeUYq@M{E(t($TtNqDUeo3!v49!#a`N6KfSYcwe>^{& z6pN5j%t#MwZx)gEkfPYYPCq=-o-P5yL&yUV_XJfroEkQ|MA<4~^~b)S z6feo>z|=K+!P8v6wszv~;$zeZ$m#@f)S3Vg@1{ngm_PBUe*v?wXsh?jE9yFH+W+Aw z=~eg#`n=gCnrf&Mn)vw>F1%AM*9wlyFL}oeaxHU`sA8wX2%{}Pp7S!Q zm2F9OvO+?SyTNC4l)nlVRmvvvAbuI1$xBcwmIy$UDKhOyo&%)P`7o@4uf{$hYb31` zj~!eOepWd<0=P8lSfs=&W!TNB?vJ>%K$Fb;Dn@NEe54RXM+Mu7==NpE z790DVrHxR8Byiq!(jT*(zw7(1`^!Tc_?NlI?kD_3|IUCc7{>r?b=X8y@pm}M(x|E& z(~!sbf$v3NTUNOz;V z;@vpEVPZXfgWd~!56zPZm~4q%QTR~;u`|U)e zt;0;IDU@SAB2e>Ay}!d71VVHYy3BuGc-j^rj6&UY>NfJu{L%9pzH_l^XfL7Rdd>mt zwt!I1LLGA~VgoP*u%F>iA>S5rQD2RZoj{=GHM)};b!Tb7k(3(Nl$Yg!zgT#dw3Hb@BB7xr zI_^y=K9e3v=3#sv)THjk@6S|`4Bkh=;k!X+v7frNw#}3ZoTqT-9+>oU4#&DR$VZUU z*T~55!KlP#f#Nr*d!BuZ!IKArg+-}auZXQ4?sz^=x&w+bkpSJ2Ik7$e2z|Nf$YG{d z9UGk#DCd>b-(@Et4*1twp>Do@nC`T&^lbTkF}b z*)3DAus+qEH6KmkQ2{y;y~p%D30PA_9#Ut!lW&yanH007F=3H;@qNHatjqpkQ^xPQ zb+5QVg>krHJt};X8{^7A|0%ZC4iHGmZP+Uln|5PkR|BvytTGmZli}xQ4pP?o7yET% z1+7nG<{RA<-DG-}ItE?VL8Cq#L?KQT#mBD>u?7JkL;v;on$s=q5AzJdVUN|&o|s1* zx`L+k>)!p&0~kZ6Fd3OTp22QiK=svUmw~J_ipEx~-5`tf8a_~28Q>WwA4+=>;6()D z5+)ZB;yR4bC2(Ox5E?6qkgQcyE9)6)ix-ZZW>0t4ByckRMwSGD1u8kFVZtaldBkBP zOx(&{cD2sTep)&9SeU7Pmb&LbwEqcn2d$qvxMz4|pe~E$1f%6;FgE_wRit*WIC78D zv9hU98N~3Zf_{~1%IDCQwtxtiC_p4Od7~i5ZCovk5-qk(VDVVBu$b)IbCPSUUN?2r z*6(&hiB*iqo~4_R0umgpcQcJ%qAesKALBBa6!6;_HTW}h%B8=ofTj`P7_TWepfg5} zBWT?uRI3T@s92o?J*x1E)n`;GkKFK>QPp>;aZjz9B(d{n!;%lAMvy zi__u}WOpgQWHgQcU}KMuJO@329+!ik;_8wWLM7SC?96`g_l>TfD-=kA1-lE?#~Da* z;bd4{!yd_A72B0(CIe3VxPlV@C3&I)C=)YiXLI48MSdg}dR?#TCCR8Hf_rt(frH(e zsRfFFuy9E!-kL-1hFFdX#(F}4m5q8!Q(6qG{FJ zhO06%E$6piYn76{E@tUC!Om2QQ(ipD|KJ}bM7~s;u;qOw6+Wk3I>D>uS58D)oNEpI z3f#Z%L{~GzMW=#Oezhw=7@UOT!ye$Uc@jv+jEG?EsTdFcNo4p7jim=e<_n&N+2w?)1 zLf%rhvM;1)mALgtZyG97GG-I@@!Xw|EW^YY=LyGQ#)@wdel!q@(Zl=sWV?N+jHq$E zqd$Q*8!Vj!jiQA>@^E)CXJfE6oMJE#TXZuvao!O|T1=mdc5vz+qzkt6&$;E`Ur5jZ zkm%0niqBLsMJk5c_8Z$%->i`cr@gH@>Fj^VeAuFe(O^o>m-ewPBRmyBG+Cg5cgcW_ zV2ua*(Y5sk6`kNX#J=_sKc*}9Dq`RF`i~pDzs?V~-4n4J#qB$gGBmgBU1-}hDQfaB z`ca(mjUwLm8IDsb{=i;;)al3?i1->|QdUU7$V>VOdk}uSlEYqd)7jX-YJ%y9{PD5z zj{k(zs}y@mYMk~nGL8gE8=C&Tx}FKJO7;rJ$UD}%l3sP~QnmWVBR{pl)m4gfWNA8Nc2(I=P+WDB*- z2*}ujPM)-%p$EY0kK!bzvYD&m_WfTG$pE?v_ecCv1$3KAJx%^MFnzI3Q+vG*0*kF2 zT4hqpQxYSwN*JDsr$T8i>^_mV?xn!9fD}sNy(Fgulohdn2&N{GsB=hhcKYEXs6fSt zX6uRf%cgxQ#O$4gH)TazC^yEJdI5?IH4@V(O>7(jDR<38LAwsl9I^MM%xXPkSTaYAuUL zDg%Eicze*X>%eZg*JF`5TN7X$P-gfhh;t-axI%6}$k>sG$1>R+1?)fulx!Sp3p z{r_q2s-vRn_BJ3WA&4NLgrHIi3?kj2C=T5*#2`J)OU%$n2#AVENR7hK4MPeHB}kWa zcXtfk@f~k^zjeQN-TVK?UuUschrRbX&)NGI&+`~2&r%7G(?x2lsek^ESd{bOzIwi* zT$r3X{q8czBYdH}%HYZM9dq4Mne&ydC+(q|I z3=9lt+v}L&0|s?>^)EF&iRXLO*sc?@;NcU?;1N(}O8I&cl>3~oL2JYA(FJ(n z`CpA=Xfb(%X1M%7I^T~?mZB{m`GzCoNYx8%k@nHR4Hu#auQ69m(s!O6-yn)$S1!~& z+MfmVnLD`Oi`JdMpA=)Lb?adg_FL2Ul3v34GjCdkcfPWig4pWqQ;XC_NUxNyx9rMI zt5Rwd021DUYEw4hnqu$C8DMWA`6Dcd2zMJJ7P^ixz`Qb8u3p;0!*d#4EbQLj*)aP2^5lk?==4(U6sXM*E zK$^q$3<}$g5;E-Ne|UJ3>c+JI)%Ha{Q4e{UR}wTAdFe?YI#p*7{<8CiCPqZ%P@aW@ zRJpUGU$?OhYB~^i$iVz$j!XR$?ylEf;dB!N2P8jL#e-Zc-sd8$pf1&yfc&-y9Y>hs z(wVIw5fY;DtJ|&!n;?w4XI5ICmGmIg)N0}u0A3=bJR_p)YcSuqyvbqXH`N61BqD$7 z=TlG*tA-D+YSrbLdblZk9v4#qqp2{_7|}>RV-t_Kx*5Y({qhDmC(yVxwccAqg*f4V zXQ(B8+}daPa0I?F6UcGb{_0MrYsM!$xQoM>t@5{Ig&F<8+UNT4{%NOv$*mv$O(*SI zYjJy0FY+Izdjc301N6?F7y>qZ4k!5QEM!u)vmKx6bkDlU*VDdU&o~};ymmV%P+3v( zbo*Dsc$23`gin8)GFzKfjI6b^*C;9=2%tnjaI9oeHp?rAefmE#pZgj*wHA~+G82zt z?U_653f#8yuLN4pCnsO=F_8d=<*s?&OSb%(Rl8A%oN|@U<35ZtA@MbA*26Seo~qV-q=Hj{?yUu@dcZ}46GG

    K_Sb7bjfg01tZ;$yEQjXI)?0h!rQYu7Ys-%FHYdQ zpmt8#NPG9;G@dn_`a=mXt`Z>AO@@+0!1@Oh&ZgsQ#1m*#p)4p1ropstkTHRMRi1tK zUU0@JYa_`#U{1R@{aOBQ$r2fPWUHx+6B0t}oX71ZdAjac8>MS(kQ3# z_d`!zXc&3THZVL>OzaV8Dpb2ouG9IYhm`dw68FT&N~)%v=y|G z{Ro8Cx!bG2fxMWn3Z&);Xb;y>R7&?gmGuV^b5zpU*q2G9PMox(X>B+%g889k(2E^5 zc-GGVFn?0FaBtE!K`?`y5uCc^;kiv8aE_^kME4br^;jaqd zcTa7tSuoKJh`KLA&>n})vhEaz=p92hSHtCsXep>O0L66{n%Lt(0zqi?4o_kBnkT-Y z7VA<1*K@`tXaa9=?u=m?PfMV$CBAifC0dT`n^jla9YT%9WLKQ`t449LpchfX`k*io z8IYKPymtvGOp_jPsTUi=&-X%<&4pyux{TT8)cbWZD}9x4H)Y8o26xi6JXA6?d)5wF z8EJRktctk#X*`_&%T@!mv{3t;v3(b_!;cHgtyyFx?xOWcleVrA69`5v95ERv>ZF`S zJAY>JIOV=n<62ku5F*Vo+`2S6R?w-HEk&vRNNobte4Jik!Ioy#weOdMbk{L;*akeg z1d_94Mcvh&$q^7?SsftvtoRl*g&BqIs_w8r&2hQ!F2C@lC|%*y#c;sMi#*ct%{t=8 zJl1j@dm)>(;cWJI;6h6y)_YPa@t%%bX}n&1pLA-(>E9R@O!KYL(X_PkukEbd*jb>? zdmAVq&ZUD9xZ+}^$NT+#U;4!72cB9|=KD`|#L$lIS`~8&wm~;pImsc9j%B}@7oi)z z*T08GJ_w2AbnVaNOX}v`G}>#Aewz)kYlqq#Rat5Cr7_avKdumb^7@w%iK~ ziq5x6FtV=hqsF) zFrcH{eygnuCu46;hBTcqNTYu;e>VM=It1ma-$_RR_~>zWV+(KT{ygN&5koqI(JrrJfMqP4Rm}F8bemJQw7d*m=b0l!UR0*b zAZ#U%#T9SWzcaec=;b8r9smjw)4e_7v0eBN92-#-Ske0mV-+A$;fR(|xv1U( zJ3qSv6IsnPbIu<563J^4D|H@X@}8;Mq`vW7(AZ&AoZDp*w)>;IWDS2o4s|#>b$Q_| z{%V}4Vz0NSHtGG{Sqd}klSlW~jv(4E>~P;}qWZcvK_0suZ1*xiMlRTmc)(t=U+)Y~ z7I}oE3k-P#=5lAHS$IpNA`9CkSlet!W*)0%&ni zBwcfRfNOI=9(-k&cs~1#kd$OTk`danD2J?uKAbJHuF~#wS;XeNlTit*-s>%_1(@O{ zeazT~Wlhy{*0Ph;OVV8q#h1`i2x=PWiz<8?LkB&xa0A&$s&GL3WmK5fAorWTevW>; zu^3yE@PJm!SPOq}IuIA{bx`g=DSt&Z;S??Xqg2*m6gE+BarIoqo9?}3<$B4`U0)@9 zIA!P8%fVyB9Ed;o^A#ht%BX=*@AFR5b0BPi@+WRF^|9EP^;fPuPq3C=m!doCZ`+H_6!eW^mld>q;YmuSbd?}XCG41%NloOzOm-gOZgnt zGq1~peG%cPozboC9bvkWjqLj$J(l6i{J znt&T?5c%rMupYana~D-L81a^*8YtBB`^j2T6EZ zO8`3bQC$F=^(sJ239++^qt8CYUD}@7*~Xo0axDoj5D4;hYbivZW#WV`<3h4xzE(v=P=@ zP8Np02Dng-`-r9LJWnr;l-W%5uZTG`!TRZoFMdg>$Hg@06(t^TH(yfZ_wgffag#Y- z2gf>YD&m0VX8~ZtCA8N*dzwd)2vp;EK52>{PKtAJlf~RTT~W z@X69U8VSx9EE(p@V-N9*(B7w=Qm5RNZ?hXn^Q(5U^>^$lBr|*vvwK_SP2we-VG(S7 zj$KT*E)L{4$j^F++b5`UPR=C{hEvEiMgXVgbzBVDGH+q_(cy4Pka$$t{uDpa&e|@_ zQjX(=;UbAo;2n~<8=~`x)9uj7o)4thB0sx1PVAH1B9Ep!=Nqmk372OX zf%XBxjz8Y;J0?Fod>yGBr^vKM2@=a0@8mk`w{a zKiYY)#nckF64>KYSKu!zT^Hcy4s6O^qiXdiPE-tdG58Aw9u9X~S&HG>FF! za32%jrcdx^Pxsu<2{^q&{h;0PDRp_L^rc_Hll&x=g;ltNUrX6;z?t{iTr>a8 zTeJWd``hBgkCJOSQdoF>w|Gz8CA<8Re+p| z(4$~-Cn2*`gbFU%Al&tI11qdkgKK#e%l=(IvK`aQfRaCBQOhU%c+>5;J?o=cXD#znTOmnoIyBAp-n$0;R~GIw#64<5z{jU7Fb%N4TF*_S z8Cb!8PaV~W=ONIk5h5SLNUig7F4|a~ZLey#nn9;Cg+_g%uS;=Mz}oUN!`>DV(v*p& z=oryf7moSRDCpwJ<#-_oK2^`rx?llk5tKVORn!BJLJ8lX-$VGJG6z1U7DvzeoNl<% z`>I3?b4=|c+KIV5MJ3Lg=Fp;t*8mb`xw4;ht{=MW*Y2?c{|qaD-_NBc~2iPow5=-kJsqEGVH3AO>?S*eF#VR99Tl z`{>PkHicexZBp$6K42!6?k-s4Y>v$8X|&l=2$`*RQ~xXdmJ%IbwAfeH51>8e9KZ)7 z5Y9t7AF1O^3HS4NtWGn7vMhDdjh+l~VR*y1oN~pb;e(fiOkxkpJiMgz3X=0RuHg4QKgzY0M8*A#{B+Ig)-s!9?k zA6hN&$5g{YeZE;HMb?5tT>D2WT4y~JyRCr+O9uWYMNm0+@%%~#S49GASVW?}^9w5& z$Rkc%$3V^RISGFXaO6TUTNPtm)?88x+1(16Q%9=XQZb#9+YJ#NosI4dB=fgGwiEWl z1&f1wPUme=T)_h~WV?gXG%67u?5fqR6nd*7t;Y*oASHCkyE2C(Ss=p!1XI6~9p>^Z z!7?hYnm&c>b>-VvQGS^+oj5MFnnE4Zi#W?4jqs5E8+R-FO_aDuB-(O#LfzroEmm6vxn~#-P`w4YGUUTH9hua0_rb z;keG^GVHoes>r7f;f;Ez{6}OEC?TgX=H`PJV~!`NhTQR;WRhp}xCqTtg*q&`@Oby# z)626xL#H0f2ULrFvQ`sxISqd>cf` zdtSgdRw) z>p#U6SW_Al_Qc>6qvs(K8ucdKiH`n&_KaZVi@ViexQ~Mr{SnYaS8N%+z}tSstT~Ta zCiET8f}SEalYr}R+W%W!TOl(F3XAfIZ0rzLEnTcShm&qdpKTeM5m{0*{G@vja+~j` z)?;alM=`ErAwG1Wj}lhDpkzR;u&~_>X|E=|&~iybq1J!_?qvS1b^Ax>SfU`>^KlP( zo}OdB0#E-E8wxLHgv4U>raT|lw}*4}Wubfd6VFm@gwjXI&OQYoD+ z3Ljr%tN(F1^0_3N*YnkBCRUQxfcz;SD9zQ;ne)j;e<0t-E8lWyUY$a<#`Dy)B`5DB z-u;C#|5Uu&Y)`?>JYS`u%>Az?!36q56@Yzk?85VJ69PGH(5IbUYUK-kp^CVMNZ&eXrnZ`Yp<-dse)>%MH z5NEA~J13$O;CY=}puhDALbTTxR-E5SB50hV!gL)wDK-!}gF?0iOdk7Fh;=(@>?;}$ zwl%Vm>G!2~v>ZoUuN}sz##*cX626eeNs;4x{`&NHTA(3@g&Vs7gai_vtyO6%Uqc zF_-Ul!bEX_JBeqCj2Tm*HWfBx^ZlZ)fAVn*bJKd{d(OP~R;Z*9#pRO zrshrt095i?y*$ar{?^lgea;$B%SQenF-*b%P|C04Rvy*L=W=#8m##KFED>1d!Ja+V zD=TR2^{X^?t^&&cL&dMJ8RS2mqWf-@%`Er)i-%A%lPkL9Np&NwGBtc$^30aj8AO-R zo47P@I68H|uc|?Q?w^q)igI-Lo;b&Q?ts4`yW?HL-O)0<9>G2s5_Kvi$^HJm zK?B=Ag6BLJ`4hJNX~uP952FpdP3mFK-_6wXw%x>6z~j6b|ILXuZg|x$v2+PYL+QTe zG@}HRjhr(yYypuKuRiv2&Ocr^D|f2UsQb!XgrlSMveIt*qDP%F(7g|o5KbEDK<9$N zrPcK>hPO*q`$Mx1(j7fseD{dEd(fyF>+(7=ipmqE?Y`Z7WsZF*n;FQDeSQTwFH}Xv z3#@cx8(MxgM%k58S`!ik5E`rJAaWZsb(1{Htr*CcLMDz=a}6{GfZ!5518=NZB@p$I zoHV`H4+Mqolb&^%{rax=MX~)ht7K*b)vo%RJHmXjTaO~%TqU1$w~FG6W%!Z?G_p9Q zRs9?rT3Db83WU9yt;ZosBDx#Ud5fx*f6?7v zeQxRU4xDuQ+$#JQY5p!%d7NYCn=09XG9$pXV+)W)BLsRy_}hhD9xJ}{62l2zv_{Xa z+yF`Kv`BpO>}m4lmH=eg#y_l$eqo!EV*a>(wx?qTB_GyqBK>;xL!+i)#Q3K>{ANVv z;F$&JOFWDUIYLb|;$_j7rK}^xhY1y+)~wH1jbQHCqjJ3|Mqqt0)gW2(qn~4bpKQg>n09W+dxW>)9MH zhci!tR#O;ki$61`E*{2Cme8bkcJBobay#;9#|&B=OZJ8zH122_!2kwzX}u?qjV!V) zwmCO8H+sMSJPo#b0ZULXoS9usfan-su|*;Vo8~r?iU5K<@`2^q`z+p4Y;uq36c)K= zsDO%Q9XOQ3WqeDfL`||vr^#NC0IG*S$7o+x7+;uHH0cGaovYV~XWyU`|B(Tfuvzok zX>BgO;9Q$%tjYm-d4LpxM!hSHsl&vamen*2K`NzxKV|?mm9(3`;zaS6B8!-^6_H0_ z5ecJOMI!lPm~p^UhQV`5{QYPlbxGW>!1h<7d_z>17>H$a%l85%KpG{xG#(?iT)+fej_2x~a&mhJ-5zS0vm{6e@ zT4g0sK+J#rc0VBGVn_7ezUF)Xi;Td>)EtwP2Yn}pUj)~DO;-EGF+*vn5DE&(J<3a` zNv{$t&l(WYy3K!S?We@>TkBW(&=J%yH+KGjbBsW}&k}&fiEuxQdnu|H0L1gKhwz$+o~1 zb73vw%1BNp>Eb6sH_0SlKQM~Op=xQ>dIf*v1im3oR%oCx4!+nomjpcwJ$jaCOx)5N zOaGz~=nU`})<17F?1R)bk%LO3i%(bft_gZwdY%RlB9HTO4;}ZLG-<`ndk{br_s79< z|HE#q{iVaz7<>0Z{b~AL85Ui$8*k^$Rq%Wb7heVfIjLtXYhoA-UCSuO5Ta%@)r2!JVf4oD^m z>>uoNIX9$bIxfZe#oATWsvLY2$DILfPm~bph#&CH@sYrY+ZIuF%iTc}X-%a2GL`g- z%b!h~b3cXMx4tcx)4N{Wr!rGGre8GAQz@50O(y2!!-YVLSR*~ru@dedOO`Q?Kx+)C z3lK7vnG||o$fXS9k;GrDij&C3xDUn1sF0wgFOy7qQy+Hp((f-1<>%|aO0x$?xD;{o zs;ff<;>4;sEnx#?4##IjoT!gAkd+P;U8ihf3FQZBw_}|-NIfzeftlOURw*lkFQ5*j zz(cxYzdoARIAheO#L&>`!1thif)GGUor@o&) ze6i^Ywc5$8k8F-oGG(5zvaMd}cn9+e*Gp%>kB$-*v> z&hKo$jQj9X#z#BVleYgpN`K8AD3S01o~dO1n^kS`ZTTm40Jqg=$!Je#Z6`U}zA@N(+ zt_tKpue=obOHKQ45uZ$=GiCSuI6%E40wgVKa!bm(yi3gz!DCTQMNSzO>cTD9Xlpj= zFiNZ*w?QelH6UnGFyS(mPE`qO)#C{Q&Z$tox~x_bzPY$}KLBx0=Cke4c0XI}MAHW< zEZ7ko6wo7Aw9u|g2zZee#<4^fUq{fAaHko*z!`Mxy})(w5kju`ip&}10zNZc;<*ky z^qownh{g@^iD)uh$XwlEICKC|BYy+F%^|$FoNh~X3(ARNO;@Za9Xt;$KP{au$D*_Z zzFzZ0G@I0}T;cM#asO9wKn+ZwPXW?-64yZPizNOTd#J_|aRyfLD+kV4rExsqS2jJ& zzJ@$fG`*2*00{JZSwY18biNn>1nXOeHJ2%vk=-;Z@1;2H+L9q-I&R~$wFXW=YQUla z;k7oH?QGg4ivQQ%fWNJy04u(-O?1E$QFbyx^P4Pvly791ZaUDXdr&r&H;3z#(E?{D zBMGnp#|Oum353QGWIMk?CQs*yG1p3F@yn1jZTJE>f`V8kQHy|Txa#~s<6Hmo)!Gj? z>-|b?$D-+JhGY}dNq&`@YwH4nJ`I$i{0DM|2!W3xEOFtgnCoO~Oho>?Qn7;XXYH>; zP#?)*&s`pfxlE15@qJ@r5duIfiwwR2?0~MX2P3lxgb9(bWh!=77BVDNtHp-7sGZVK{S{lpZwG)sxm1(#B_g{^R(*LM zSj!&!2ar%WH0h@25Jsu`-u}zJk*Gs*#O0h0en3zz5J?8z+`65W=l4l^}oE|a}t&?xcb?#Jtzr1?r6{3I$}ejz0~9+DX|qACvEWG z-3^Z*_}4a`g2LOm@qew`@3Z-Bx``^lKyv!7DVF|6tN+_w1jGRF1sfBzNdKdr_xJD8 zZ}0Bnf_aNzxxI6 zSIm-k{?``t&)%Q^V*qMX0K5kdciCM3TX@q)|G3J0z5z`T_+ztGk9EvSHKiCqTt&)@vX0M*!iFR#(Tp4Hy*tF6WZe!$OF Kt}q;CW@c!Zv0+ZbNy7{iW@cti!<;5*n3ybo0u&Df@}mpz5&*^jcU=PX6A1WU?Z5%TEI=Us?jsK@ zKR)rm>jUOrCB(N-5NO~X8t@9v1N|Ek_&g8ne;}b#^1%P64v_}z2O_K@DlH8xRg4`0 z09z+>J7-Q?0b*bSti6<`69@emBC;O`hFR=d6%tS`=R~Kh1eliU?MG{dvM*s;YBQqm2 znE)IK2??L0i7BtLn8e@Uz&n02b7yCJUM40tH#bH%Hby%~GbR=u9v&uURwh}*Ls#x*pub8+S;Bl|$~@6W&J1h`xLFD6^3 zzo!M9Ak)VaCKg6!rhktO1m*i^e~{>A9OT7jGvfa7EO zx6}mS)YUUYfr2Ek5L5UHEI)MiqmTuDefn1ktb_J#aS$BJfPe^rNQ()7bq77sg>*-k zLnq4GR*Mu(*gpICFcSY&R1L0Wi~MR5qB;9X_TF7TtjD>a^DX5Io@q{TtBZ%5Q|o#= z8~T}_tbZ^H2?PwNi2r|4{1hf3Kpp2DQDh|*f zqUTGJ{x8vy3JtKH{&$o6;6^)GXA>c{sPGxJ};&VPKBeW zV8KEcBq&@d;Ey^;MCPTc{~WLn1ME z2tsN=2xceX9irdutF4~)1xO_bDG4bGsfc?x@#l;r5#fK52@~Ll8gP?n7xa95{=PkH zF%>LSLbmyhgm(2fd+7w{KUN=UP|x{BH>vl-1?dd#t;mP;g_y}Mu1*gOtg|zyKJ-J9 z#IOVBPCz_^_+Lx-?=|la2|L5 zgHwFiT|;to*gbWk%&mfHi%cAQ`r^LLv^x1T(zFCw-{({84l_SzxG}0G#XP74Mq!L_ zOv*spwNxhk-uv^ew#Ou4m_7K4DjbDD5V0lUTdpk(H&ACBYl80%@2n<0`;Va z6=;^Uhr+-7)1erge-L_3CwpBF0CsKKn}ZFk?YtL2SU-~q)Mw+#GX`W#MCfXXW)1uR z*J2YJ?|!;M_ZD~u*~l%-Ajt257Bxdk=o2jpR&vpnP{thvpox%FkfN{{iG|~`LA}FU z85dsy`O(uI0fIXIzTY-e0UMAf;vc@3C-QCigTVjMB9ap_p#jH5-OqR)i$EW?SJNHZ z#^!!ni>SxHo|kC{j~hF6ODa5;(Zr15XMFK?1KuI{29lrjDvWN5K0@J|rpt@hf(bJa zO9=lVrVksV{ox9=^-1xFKbQ@i<;t;W!H50&H%bZ=5+fc8#UcS1(3y`8c&W^}V;srn z3~=bzQdwP6rJs0`E4dDY?9Cp%Xb>1Ey(O#Zhk$vC2 zC+Rg_;ItF-e*tGJbU}*32o@Zd213SN7#7ZQ1)(pPkrJUX4Tg7D(pDDV5!? zzQv-qE>4s_sp<4(_q>M5(>!s%+%6MHeZRlvf9PBR@Ai90tCng7F5HW0auk7Y=wh+j zxP9W$=qDN|wFD<9D$v0|Q}ltBiAgT@pPE_@>zqVvTqu6QZg;b+fTB1p&(cV3*Zs>)8@9VXT4X zL;EZt;&o!T7Z;DN0vMzQ91Ri*Jb{Gy14oyA{tp)m+`)hc3mpRY7LJF8#Ps}5Hhb{= zK|fA6ZQ13zEWTGYzAT+{@pIN-lpF9PupBsqp&+T#rNJ2p@)mD-qBIizDK<< zVqygdb*@=Ppbc510ZTB#gm!Va&R`_4%bnq% zaQ)_lCdb1bg-$duL%e|apITH#JhFDEczmQNM<3igk`V&`Yp3w{yh&z(n0RJW{h=<9 zKd@&rbRFEXTj@qfB3~i~jK!}CE~2Ug8MxAFL_cg=zxWRtR4~^M zX}dP0v19(v`v2fQSi}c@ZL)JPSi)+F-K&i}Nf(CGN&9zWBN^sB21VMhVNRE;35us1 z$_KW?cp9`uGmp43707d!y1sWDU!RMoM4>@hBM5n0RH}Q!fEaqi7#d|^Uf@6E>{xzs zZ~%=xrN2wKKc0(CCK*fMn607^3?jPuXU5F_beRL_MW88UQrKk;L{^{;| zBYV2V=#M8=Z?$!x~SAmKPozQf8iqTuelNxD)OQA?cXfzT1 zlw=YN7(Sj>{G=-qjz$xBi&qcD)4_ znTXHd>wdL&{FsLOb1_)z4P!i~9NtI*+vkB8e69g?gPTJK`@0Fvb-RT85W^mSUs8e4 z{;;^7LHJbnZjYp zZ`4CB&r8r6o=2!U*WS=)Oai@Oc7i=iEWCv>IOt`2vT!cfg&<)gH{GrpD+uS^Prct) zm^Nv(7i1LH`&z^7NIW{s6`AG0Ms^60Ba57ZC!~!tv*yRx;tZ!<2HDLAts}mRZ_QGT zE9IY()!f!$TeobMBGu@mi2ZCc$K$BN)^L6yZCUr|t(P%#40IiwIl}zmj4!nL_-yyn zX${kbLIrO(*urT}hoN`x>kTxUx$R36RnP_t#?03a6oOl7C z_dZ5JeMys1j=S%&|MN#(!|#>h#POLVR8}2d@`}&kA&~hyxc1x)Qlx8Uq zKdT%h5Lt!MU$Mgh)y?__%U#-3pi!qNxLHP@g|vz_6e&8<#pXz(?cBRU)S~3CIBQmK zs32vVBqQd@ftI3jn6^ZQh{qR;$mT>s?@m#=6fY7QMb(z|LZo zCg~@2+91Xv+cN=XlM|vG{hVa0@v%e*qyT``RBw)`oBQ4$V=Li03J0)-_zz%OupzTmip*02;MCgu^z(tB=9a>>v9@)!MPI1*R_MD@~2R(gMl0 zkB{Nwi1_#AwqA5}enX1aldenbNXTuydhG!uH@Z5%yGIxS)iUqI{(8T%#`=$A<7f&) zrf7Cs<6KK6ly!(k?3@Lg_fr+hI7@*9STjp9sf7-7eND`&2Yjea_vpBZ3=woyT#0!T z9{>s&tXkkvWSsJZ=Qvb^ZA?%&aFw) zWR4XAW|8f&H(nHu$gLl#O8fd^Z;(chXV=I-P1aeWK8Z)6 zp=x2f1r&+@7J0BA;#$jA=QG@GD(^s~cQdjFM4c*uj>E)DTaV*FN=D2bH?tW%(%7{b zXAZ@a2)wdgG;>39)bg2KN*}UWetaGUZ|bI zIzj%^?g1&S@eqQ^*--&`B%8eT5iVdtx9g72`KUf&L9a6pced;se>{T{hnH}VUO3?9 zu-^Oe%VLK`)SS#rqm@~SVr|FV-h0#LYU{P(^jT*|`j=!{kpo7;m<;Rn3d|1Q*No!Z zlO=wQh6vF>4FS&^9N8T9ABQ`!QL(B$w=)Nv-g4*&Ve<0y0L6EA=hi?`tLm5gIAKV6vGlY1Gp^5puff*Adk=B5@6MvQJDGIGs&J zJI7%1IQ-k36h@^@`zs@hOm__h!at$LvbIVM)M-9gv(8LDlbhg~kl+uJ#NE;%4$e?x zC?%?UoZ19vNR;E0h{ptGT#suVaPge^aU_KA#cO(KZ-jZ8SdHYNvv$H<$=g+>Tn&D@ z9SebI zYBq$+;F6b%JiHi@4q{IR;uZePFF`{-LFm`i%b=_vpI32~L{cwf3Ms0rd3sn2^a&Ytd4(;od$}C z{hZJHLCWPj8if?dWCm9NeL9n$(IpU2Y`uEy1EN_f!5Aki;G0*ZQg zMbeo=`l({{&p{Ml(NAJDB$&IsAHnX|h=q7l8&LQ?Zxja?mZxlzHHI#aErcUmQETbr zVX}|SHge;A4h6_<)MiajlsNe@q|k{KMVEFau{E6TV6EHp3A{1Mvsc)Wo$3GR_p9#C za5$s7Uv7P-IBT6WM-;T`pOfTM9 z!u>6)=!xXSQ9!4!*k{mAQwK-?O#YZPXhM$%SSmt4(75#{;koGA@2>ML@m!kh1wne( zQ=NhGD~Ps;V9QL&$i5H*Ilqr1!U6my*6jQx5dezMQ433>69#@<84jZ&(gss$`4Sew zsE-Tbdv%)M`dXemz8=2#zCl%UYHi$9XvKs1#ek#_yjj73 zuLAchprI2+kYp(gA{bO~z-YUEGn<6=U(V)=VId^lYa1A86o7a1rxPiKN&1K(62K4dp^|jh$7r7MR_OI=^{Q5|S z`F_O51%<V(vvn3} z=Ki@_MhEId{Tdx`b6?L1jKCmF?R%o`W)p2$(dakWlxF?#{v|I|y^>J`${xf}cwozv z=6m!bylep>R%uHjPPVM{|<^OKmtv^s+t)M-Ft$(~lbvwZ0NORdtt8+5FMNQ-1qp`_?C~mD^dp8jL>d z;6E|?l8NN|c!|$vARtN9^#YMV9vUWATOhp74!bBgi8{yG2zp0=#MQZe#S7fe1k@WN zNab>KQpBRmY0p;ee78Y+ehrJeSbe7KxbT2PC71ATIRUiDXqknhQPwe@9Ts2yxyIhy zdPMMjeVRNP2t;7C0D+aLcThPNPnaeE8Sbsu&S8iVh{U-TsbL%B{c zA$IN?#g~zT@|j>c>J{4Bjsp@lKtDa>0N_69oA~cuH&>>1?@4q3!s`!N%R+O}~fc_I(0%=l6q_hcy!U{xY@sjf(Vzq4wnMf>NjLXz&d2e93!T z2#GVrxkA@totsgmsK!CojFAs)y7>G-2!wl zZrT>2s^Rm?n;Q#sk$r@SrPn?Dtj-4<+YO)IuDDAQzq2H&1tD96XVDG^-$mqDmJCso zMXy874Q*K56mdA5YSj5*uXPX3sm~SF&F9@${7GEua9vD$ca4SwJE!gI_ED$ZOeb7u zEDUK2Jd57?R?o znPD^SEkd>1;#+Ljr5(0wP&rLLD@eYB$kr>Ao$zr7BRPau`)maA7$J(+PY<=|_S0*G z-g*y0z`bFM;7)_ZjL48I?qYrL`vDk`hq(eiP|bE5p_yEEz0Ypvq=2%d_>jk3MvWT( z!?CjC2y|-Di09AE=l<>O0TaEqD8K6f@NXUD+uNS+Z{R}ji!Gt&i#Xf$qM)PSD1){K zqiW0F>_STH{yZJcS#dk=escHl=p9e3Zb@l_*A8lsbt3Q)%Jdwn3DtOeyvEk)bi`!T zZv|uW(d#oSLOmI2*h{2VfTEO1hM=atF5>3rUzPC!$cQb+d6h}RfV5#aHE6IYR}-N6|lJQ+~rtas7815Nm3Gec>lb?#cp`P3J`aT zrWDqGPm>WT=|!SSOC&yd4+v`3?+MUeUsY%9GL|M{!ZR^B*HDzBTWUY>99@)#!JcWyQ9ap7}+7sVtNfdLv>=c5w8##ToaWt?hN zHTX6Ww4msILdxM`Jlv;fw{F4F^}`9Z1hAb$4L4$C7psAUFn{I74tP2InXM_^<2BHg zIsHiVIC!=N?V0Safs|k{Tcv$*7DZfPc>MV7lnZ0E*yWn`-bzV78o&C1&A0EB68K&_ z{I=fTubxXnQ=XX7Ib)TU%imwiZ)Il$+-Y`8W-s!yU_QU!{NCZRkM@aEs1fw$%3$yy zD9L3CIWuGqz-?3HzCYv`#yUT)U3$+dB`gqaorq|~M>w^j(SEc+*%_u2pg!rmBtWY5 z`fa=)M6~nJ@$EB!@6*5;x_PH)O3#UCtFKfDaL*EdBuMcDbe8KnaZOGEXr+~cH$@Av zKYYPHFMRG~6&(s5dlQ+NSAwSN?k-wh8IwB9@RKrMEl7aMU|eBW_de9;p_7-CEUvJjwL{d#L9a}8!D-RzSrjNWC)?xNSTfZ&CHj-yqg~wrq2U1I8z}1*h%lU`fZw&w z66U%h9_bIk{1CJ09%;qu#!UTT^An@}4_nVW*s?2r_?-Ig6-5ysPsT(#{Lo3`NAG|v zF0a;gt4WWR={JktU*6bbb?SMsyxgDEDcEI=-XAW(T~8b!o4v+lKyi;Rw)#Z}>jp{G zW9c1Py)+*&o6fjFi1fw;SjJ3%@slf&i2TQxZve7pn{lWblR=4x*XL%X^fnGUVDH-} z!nb8t`*1ipyjg#-XcH{QyFOKW{<4=DwI3ohE#p!v&~XS__xU=LWqTwtQ~=^Ab`QQH zZ~Atk&){a$&wgn_vIE@W+VeW+Ykmls&F9x#2NtXn{_1|c26#*`AS^}9Kni= zCOFx2Cy3B!Rc}`pJAJSr#LBW1;rs@d=0>VvE>A_w!@1HzEj6T(*Ren0(=vA=B+LRS z2_1GbHiXtod+*L>%6DW|D`idbl*UGlpVKw;c&ceOLH!X|qPv|~jKu;%X7EIAzi7^R zlGRuYtKtY45>wBLXSC$&AKpf3gWb_VL>y!dQePbQ@p@)>J`UU!4hg= zDeaK?7rL#b$ZRc77L!Hj&-`!=c8Ssw%K4q>PH;cy%Bijy|EYS$2$ni*&l#b>?hj=> zo-hVFbWKW?0JQFt&2P0W2c4A!a9UCKQNi5Nz>EPnmwl06fffco{q{I`=Ea9)d3xZX znz~x`P7*Yxl0Rcd<8iF6HpuO3k`*WT+iX-7Xko*STFexeI3R4Ed&L|z&NUzrE(_0D zs=Dh?x%_Hodvce^9V;odmdE3;g^5|prRTC;g_!-FGU++~I5kyja9GnD&-|UGmL?7q zR-Xc%T0V2^8JJUI3WJD6f%&P9Im+;4u%5~>on({b?I&?DHdtoW!%2tVU z+-RgeTu*+!=#Yy2o=Q>PD7pNpqjfJgksW^TyOe#@fUwS2_CS1l~WwB^hIyORQ&E== z6*60@PhlneKs1`LJSSX1ky9v5bML`5a*jZm4jPWm*u`7<4yQtz-&?X0Vf5zdFE?4n z+odO5^0LGjj<>UO+XO6HGvU3zwws7sc#9-%3{iXLigW48Uz)`lP>`|^oU1a4e`QC-G9ds3mOh}?43Z?IYz27 zQiqMFGQvGxnW!h4kYP(3soW+s$ffhal1D~I;QxZLq;Rv?_&T?r5CQ1q6kkh`I})lZ zj1a@vy)=*5V>_to+1e?3PrybbX^mc9nh&2IU#MAxNMJk~Gp07u%mZWoJ4mOH#_=7g z!B+Wad?{>p`whew)!ac9{JBxYC^L3t85kp6=`R=*>2jkNzR7uZ&A%;ER*R&>^3h-= zNC$C*oBf=qQ6XpNCzY%N@rFU}%1^Jix7Kxx=Z}{@OXb?JGsP8N@09nyqJ}(<$*RGQ zvORl=#8yP6fT3lD(8{u{EW`_78VJSJNdV&GZPqOsl`;r=9bm{_cXNpRDJm%+C*K~$ z1GQxktd0qnmo?{FQX=-E8Rca&H!kq=E8U(agiKjTx-vR!#{uiP89iWHSqT^XlNTa3 z4jAbgLrseP+6p?&E?XHRWW)>A&M}%tyy4Cc$tyEx<0TF{9b*VB?;{rq!LAjUi=xp|;Pb z2)Pbt+H&qSL4rG31O$=vydw*E;Vc`sRC?ioK z(MXKZ-kBL03@#6GWr9%$s{uSCkLOs1GD?hwPs~0~viAJWi$>RSX`_y_HHam7DOX9O zL-*DyHhI&>4m%R~35WaP0IuftpU>T0MHyGAMY@9Y28e1JNXSgG>AVrI5+TE%QhVkb z#priDC$f0(k%+h{+J1M!$@#5;38xuG4P4!XwrclPZQ)hfQW_+&qbr4!jBRTK%U*s_I0 zC?@U!ZNBNGXL(4URNiV*fsP7%Skx}~HEKAC{w>PlxyOE*zuanEJ> zNxm$Vt!KARmXVCq>!5Y(9Z_Y|ym7HMTFq(xD}MrKn;Iw108+#V8M;wR&T?Z?1YkCp zfqg%ov6Q~qYPx9kQZd@Nxsn#iy#^Y2lb-JlWSQ-^>d0|=V>FCVLM$+HD~CGn7u5Jd znEag7zwK)s@w305JnT7XTQ%Wfg55oi(eP9AMzfJbS`Sk8ViZKMM$`Vfw}ZKdUy48j zwAxKg|J^haZ86$?)76Uqj`%qC;7~grq5QAR;Yd!%uXL24OJSa;A!2R7m_M3N*jM7f z4*0WHD0M+AJZ{kfg3GThwvXOHXz;6BlMDU{<5QRUJwXDR+@KYlMd&?EiLA;v9EHX# zXiu*f1Sf?4$`41^)$vwER~_`~cHh{DD$33)sA70e5Ji#s)pxEfi~NxuV~{t?(v>%l z<2JJ5eTQLey|8P|ZuX@I>WEl5Gl-G~t^$npemVt<9Db!sg&RU1nKsic{iEl8^_6F@ z!OsOCmB20$|6O>XjwR_KKFx1YJ|}#6h#E?L-;m$vk%Ns%%P?Q$#rQG#G)k1#^a-jl z%}wVrIHEl{kuwCxfw|rSbRFv(icM-(;Bn5FU{BgYwjI(@@I?ZFPQ$ynGiO;Z~GvpQ&He^TfOMmQDE#eY_{$8`WaN}CbGlb!C3UbRZi6&K!%M@mqLfM#A>!lakU;aoCZ6^ zWT`m}ahW!6D{PDP^u)l`BM_3;RR2>eB?2%JM>Gi<4>5cx;B^K*tdOlPZAmfVR`mLaBp5@!BIqPZ`iL{{0 z9h&VK-Z-V)HpGu8?7-boG`7i~Ts4N{M5fEX@(^VP>Od#+s8VVH6bC6&mIyy0TYIQo z&qZPu_KuT$Vjqe5sYBf=_Iofu>PLg$b;HLdTxsWmJWo}V{#vbnzJ(W2bxVIQ4dD3P$z$9hwlXKtN}<5x)Ra)YRBT^MLSw!pD`vr|{T|FQ zHDjRvAwbM?=IOEJ@jzu_)wQ2G5@vgt+4;-jJ#c|zI45@n4ZuG?M7BmFJ zkW`+n?o#MI6Z&|X{|@o5ZnWI~rnLpblZGJLh>b=YmNe!L-s-UZJe322s#x^VIkgzL z8ZUKO`4`UfhA+CAYsV|1rTL59X#euXyFpeDSD@8;uix^?`6y4;(q(`@?k=hT_>{1kF0r0ANo#eShr5Fpth^kX5@nPr|zs7#cC}FT#CY7Pd{5LMVuc2w4Pxo_QdcB$9 z`YL2c)rDJ3f>Z(-uEj=v(^)8DraRBeNUk8z5AGMJu#v4!P9h>P2VUP1&upc|QH?wB zS0>X#JBVL6{RpEFOs!n!#bkT)%43 zyYkG_(nb_8DVwP|QnMvWqTkiQ_r&GGfI4oqP!Ys&q5xxAu~6F2Uba0LRIXm#E8toq zG66e2kdxR%{Kf=lnf!_cz<*eDKU^jX@o1MfVAeSGKe&~?;Qa2VBB zV)+u!ha3w#7ac{+Vi&~HF_o+$jf_Ht= z5?oh19e@yI)o;eZ+RGb8hL_1c1-k~CLE`2O;*m(orc%Uu9Y%dmNDOV|pg0{EO06*xPkUChXC z{-?NLf(%7hgZ&?ItG6syd(i+Qg^Hq#Mv0m1rLZ$Lvclo|M%q+L7H+ng1@lnXo3j<# z(V*w8*R16e@yA@Q^$PxU4J_ls;dugNb}_>$Z-R7>+mngl^Ln=pl7;ZsBYx zpK=69z0@F^?sM=7saOC%gc^wFtJ|yiceGZFh`1!wcBy(2k|g;;=*@FdPa_)0ES-}V z^knGY8Rm<+3f!e2kf{nIiNO_*Ti`xl1Qb0wOti#3S$C9a$@HAF5gCX?;Bt^P=lC0* zl0xacdrmYs@fN4qzin9o{VhHzr-&Z2;EX(8;xHLgg)X=V{m$w|J7=S?ylvwaIl1;_ z4y~2Pj?ew3AEhcz*W+uTIU#46gHDj%M5M6ozUg-F_;k(ojd9f^J=}?r*0^dN^_~(i ztaf1x%~&?UT}4#Z*Sqyv;VUpm*>_j2U^tcVe8dldT*sHGMc(tQXn=yZut}vYYAnAj z0O^6f>V#P3>qr#=mD+ClH9|LwI3r(RVnSk?dk_41P0+H~n$@@7jA~?iH*bH<=avwQ zd2$DFVM93R-ld@La77 z+jpLrUJ`b`z+CYGLtfU{Z+v_EVtqa{_eJEhF0dJ2MYQp3=mkgLZOCsI zqzpPv&_zsYZ%e21v~zb}MX|&d+yuSt6h%MaC#NGh){pY>2}+8=c_Hb*eK^C!|*MIT@Z_RTHW~EhF)=GeAdRo^p|BK?f z5ng>MWsD|8azgmz;kWSTrI@#2jJ2TQ>eg<3u6yjCxfXCy&ekYCyZLu4Uo0f7&$SkE z!WOK%AZ*&{n1251V9BV#Vm0n%nbWyW>s8G&libUdPfyAol67ykQ_}dlwa`96?OcV1 z?x8`&d%(aC`HrZh=SY&!Ze)vI;7Oe>QBK0Z(ywKtX&z!sh$|ZXTq`Q!ft$|2?#hpY z8maZ`ia-(mdEo4fwlwZGrIH6a`#4Vm(}_h-?I==h!UgFj_wb8Dzp&Ez-Ka%y_rJ3G&d5ncD& zJ2n%eXD01^ikHnOoH&jJ|8E1J40e+^vdOjBMz{w}o^yC}`n9fACE1zXLcX5{UZ9F+ zE=!}{lFQZ0f2f_VwPRl=Y6lKq_9)+4hKj=EE6$ZAe;1Y9>k70esf^Yd|0IRgWU&$% zb}35b0$E#OS_hIbAThJ}iQmgL40tB)y5@x?$l`&BuY|AM@acqyt@T~IHCooz_$f5T zN>#t@3Wph)qkTMuAxKztDoX&~I4#1A)I4@6Db6eb7|P-IM4(d07)wmh(5rV%Zh|3K z)ECN}Kb^ffS*q_%t1XD`a+1n-{j*A#!_U@1u~z3oE*4YvaH>p~3A4$cFMd5wn^O`X z;Id4C30)Bgc)G`nv+p^;igt*POB%aBvbJ`fsZ)M^z}PdAP%xJ;nsKbv=XKm9 z_x56_kiiYf$Hxc4Q98~{yrx+X*t2<+({&nPeQJ_Wxe$rpJYOpREC@pLTfp}Prc$d( zb+US)Tpc>2*_^5_=+v?t*l|Mack>NJe|>{mrV^FyuV(TLW2~#)TWtHYb`>!mVfG47 zERI-EZ`ja`=0Kek4uwldM~uY@K{; z(LS5-eHWF9_P_Yc+cHn<> zb5P?~+OhWD@5?bYgw{Vx#djEGq)M^on@g9^Pe3z$TdB)3rce%h@?E*8|#1VByHl1l>%saOr-HOz_3UAV= zfJB(vcAAiT490v`kE?OPl{LSbmra)eLawCP4C^Qu7>#iX8h6+@BGcDdp>GF~d3%^2 z5~V1HN&&s;rh|_ym=){i|ARI&CT29T4&g%s|4s4Oxczs7@kDAZS8m;WbVGDd$DNkZ zqh0O1ujDWK`yqL8?f~jIVWehl!y3H+fn^6K3rIQkH~=)J;!ld}C1F5JWL^`kWy3Il zswrXQOqDUn6Q-}k04EwENn_r3+J5Y6z32I8xHVMNSIY{sXyUk_9AegFI+=`!o#_OD za?u!eVycX7WU6jB&mY9Kz zizD0iH$y=YD~sS%bGo1z>7iBYUJ*6lGOrz&hR)|7BY~XSd4u}Qews) zl^sUG(+YR2WN0%O91AH;Is5~nV-!sbDs~!O5~Td7%_5IZ!s|LqBx)5nprMy5_vaTk zOYl{vQCbS0r1FPif4uaJ2HQjpHMR~_9YP$-=hUW2Djm0|FU*rg=_@%s?WRZZMQ2QT zIMOn40Ac{C4O5EUu7CnhoL0wE&J!N{q+)EQZ^)6DYI?3CI|&hcurpZpr&jFkYmGtg zsYx3?K8C+<#5?$Bd52ol60cLM`g{ASJy2)AO0a6tt+){eX1Za-fRUvj9zsCZzVsTC zi5V>)*X1GaWk1_qQ7l0?cB+kkwNbu_SS`k96V@P!L4BlYF2I)P+oYViTk<+=`jV8d z#+2bEkuYk*5TVg<<*XWYw;s}~Q;%~@vK2W%Hx|`yOp+4uT&Z(^Tu;+s-A*JM}$POKIEhe4Ajzn#3V&tLG z9r-hEiw@rMhWCJAD$%54m(}rs+N<_-R5ofl%?|;qlCQshnS%*oW|($2OZIBp^K5NV zsR~IDM>CcF8U@94Jmjv;dcDJUQbz}WrGzYMb9vsPHi>$C;OePTJx_IkgHLxpC*>G9 zw+5SP>MMIEW6RyXZ^^GR$>`nAIir$4s{w>WBT1wQ8F$T~>Gbm%&Z#RBGfU?4g zQ;zuap{EfVLp%lhSF1eNpb@P6!eF%wGdkiirj;X9jWVe;ke!~lo8o8dozU;M%1EE{ z&b6ZpMzltyxaZ4P;){2N(^v{fB&SX+f$1P{QF`)U%O!>?$%~@Iqc$se@2l zTY-Zo9%t0&?OJoRS$PBDz+*Viz+-jMBRYBkKC`8|`lrRZ&rOP*o~j9eOH0JsXsY2~ zH!UbCZ@w&X=*|3Z=z|75J-&*C;g{cuI8@+P+YgXBJ@LAxwI&^vFh*A@wK4CJ)8@a6 zdik~>_XFL>K5TD4j7X+V;nwZ-!L(wx9yy0J(LFGAWVrYfHM9fNT3*&_3GXqpZ9jak zQoE`58|C;cX@yoJ!S>+2fZij@j~!yc%!hAY9>-vO zx>T46-aXur70J=T8mo-j?!3MPN4jWZCU9*VpBuLnVMtohimy!@4+z&Hg*l1{%_~ia zI@ITc2$ebDqbo{ZkP9i1Wl`G}hwbhVw5(qYlN(YwBon+RxQ_gJF*Ki5g#)J1-uoPzZB`bYNwHy%_*yTjt&oy@&QzQ2B zdbFMi#own#cW@h41~m4{IfB>yCl?G~^g>>G+7zf%uiaXNY-e)%KuJ?>yCmH=LAr@W zE_&p_Tt{%kGn;FT4TUNJRs%qbzk4nq;G{#S*E_-+mj(FY(P1mOyn24ijMY2N=v*2C zI=_;^M^3au7xN30JD~P>bmCevx2~C;tE74NNt&$esh-IFmBabVx^ta)!;ly0Zr3wu zeu70fP9BDdDAJAdPi?-U)Dmn)Fd4_$_#{|^3QXc8^!o{2Bd(QF>EZq8(D_N8qUmrr zz&^kzpls`#Pi9GmEwuwf$x0QvI?-JBGCfns&t=%(D%o+kJVn_PV!@V(gHj_#Z!E1a zT^tJMhnmrC!oXJiJYNDitL!SDj!Pq*}z1l=`2xxlxj~Ptp^H7?!wiP%N>6^>3eO>$r7J$~U(~mWQaQ*7to*{7> zXr@>0SN$bU)h?>!EEJAoq^W@ETnS|@IuUfOc@iwZ&xPBeQ}xZ0L2CQws%GijQO|+8 zdE{pEda=25`>jamI5;DZ>nW?%25zcYJdN~BaqPXpThtluDP?o|xUqpcYkA!y4!5<| zCc-3|st%F?{s=D>tkr(=<#y#IS;gNp44pO}pph7x|9-QB(G{=s>NmcGuH@u!$_zvEM(U zdLq(=y4<(Wh+{*5{}b@9{{lcD7Wh`YHQaQX+3I1DK$6%NK$-$P2licMm@%Ixjm3$U zE4PA512ulwzo&Ru`n^H;2c7M8t zTyHk5nz`C(7A6g*%V0oefLKdkMw;;QvgFjXu-@ZjXUFY#)=}0P3otpoZ?;>n<7q%q zeZ=HwQgY(s?-kcP-)?DV$^JamoW#r}a9jij)R48>x zOOIT_S?{MiI`LrBjnT>}XYm_aX5V^bpv zB4`TLSohtdnKiLRGU??Q3W9sg+5vwXaL@%4>bX*&2 z^>Xn9R7|2fP&g`y;>hWk$sYDhXh??b){;kA+nZ_q{6oE=wo_%Jljp1jiA_;Nj@VJr z_aQPP%s2JddmRcz^sNa2TSN>aqM{->9!J5c=0MJ`Omft5Jd-y} zqgF#R^%mcQ7}L9=g2|&6q4y~sIWs8cWZ=Q79`7_$&Arhvi&NUU_C+UUma zr?+5D=<}W;__!GSXY}!DnEl$@E1?s?eIu_5Q(w<>8ftUbH+8K4kGr>wimOW&KocZb z@ZcT@kf6a`f+fM--DzBdTYwNOKyY`5#@#~D#@!{*xND$kcqen`o0)gl+%@y#{d#|T zb@w@E@2c9ht9I#$`}x-2X3{LrJ>6>X#dR1q)zinIcS?vJHywHF?us;`^>(>vhxAuv z5|_I}q&eDqy%1jFTVz7FZvlyD+u!ICC+c`o=Vyn)VzP|3+X#gC#qXN}@_D|#Hd*L7 zASu^tjoQ(@2t@KZv9}v0)NirDzqyGFS$z9Au7HOBn<=Iv&Ewb!x+lp@whqM3Wj{k- zyDJsw9r3r-ByzpjG7}3ow|zcBdt=;@7$R!kv-=$BbN>A$Hb#q?*Z3FTKwR(X zkF%9lkMVpbm-_JL3yZg+HLr6V)}EFQnoU;<{~T7v@UQz}dzC9CYXsxzMOQN3uKl#0 z{a_QRqT3dbwn(K-G|&AwUAyfCq`-(>R5G%O_=Now#vB{xl~MookP;E)5fXn~ z^@$|r!_3q0GI@7(HAOnl1Rx? zu0KT(Hta}a^AulcE}CW?-+F^5N9(?{JD?N&Z8)l7oa6m`ct06g!%cp)SF1}pU6nRoiwD-%uPM8+ zGaHTE5F{wSkry1jZfw+3$K2EdLPvv2vZRwX1?+g^aX*s6)YrE=dQYL=9jiA3!Oah{ zGnPfg2prXs;a0PL>xl<89pqw@lxRLe`N0~DX4N5U&njYe19bLrE~+i4zSiV8)rZ<* zMmZsyX9cy@T2$tiH$28+tZdBZDK47jH5G&8MWqQIUj}WLwGt?xa&z{Fe17|qwBGgf zL58t&t3#Re?ydBB108Lkvdcoc6;R=lGoe@ijW8{<3*Ca1tk)s3`}y?-N>=u;iHGv@ zXpTv3oGY7Lf~TsRjk!9>Z~kP2X4^$;&5|(|Frgxa#F)nvWo4VF_6*9%&gYgKathjT z#20=ov6Y36K=G?>BeC#)GAX>_op^?$wj71t`hP@^hoBPtR22NmuG`Y z3TzX7I6<09d$BIHL*9Z2QDF3Cw$pu4^LCAz4jn<^?Mv~R`B>Kt)Q>9K$4F?~VKGA8 z7ke(BPR9{fWq-PvY@0UAUwF0sPK^@Wm@rkOj2@6fw;bF>VBJ&Uq#8b>X7zsBYQo1@~D;Nlb5ce z(Tg}fe2L;;@IEEh>+p&oiiJx++{TA_Q(@9Hz)3Jtj?&#q8>uk5(t2R}b7GYsc7~V3 zRtbeJLE!7W=jO=#B*Z4#t}Z*r7<~S>Xmb`TEwGl4dL|D}+hlwWqc8M0->OG%yg~co z7aZ#Upb6R2F^|+<`47xV8 zW`mg?J~^CVhfOfVgW$!_8XqfTAc?Y>-^!DcSbF602SL%9S&9}Vj~|;{s#|i=Ce3HA z61-FJD#~#LUp3}IBy{{IuZ=R`ZGKubCaN(~Yf2fno5?Hl?)yOB<8xyJF-iKQYD$}d zu_5M~7X*h_>Q^filNml(H zng)oc5qEzAZG=>kq>*Y+xOutDuiqcE+Px}TEamE_qEl?E4DwPt%e$ukCg~=?%7|8z zKj*(jbw2%U^iGR95f|$S!l~ zUCERBL?xsg=k0C~qtG>uqJ&EqXptc3649efXEw-o8~JccqOUidzSO;CZXtEc2t+G+ zT~zi8`%>=EM8BeQG>OwD&~%t_JnhyX^M?;Rikew`-)x))AlZ;!1b2NG?e+P>o9-_? zb3coB=CzgEz~%y+42gIgWf+TDvGjqX%BO9o0w7$5Msiu>*Dya74I zrFm%_xj6Km8gM;+9XIIXqDvYHuZJgtHXY6d2pvDC%*2(Eb}lati}kuOH0bhz^ya(d ztn*$`ss^DvRhH~X)aq-Cp>-A;X2fMj-4W^(>u#2-rgI^mpPe=$$)A4d!FM-)>f1TZ zO}nK)8a0PD&_CFpl`|jY{j>ycd+4ih9o>U=(lG5rF`@Tj(8bmw5&(AIlQfj6!ZKkAPqgR~jy(`&wXM|$G=0ks zUR^ehw*AKN-0Y7Rk-9oW zC!X;=`kD}TS3a9DofQzz!R}XJ(z(G8(+p#^izvZF2Er#ieF#~dzu)yg^885DpZiop zO-3c;hq>@Ia?|=f)|!woPVU2*@R#u7igXSbduG+E#JAOtUozNTHk>kJQp+%xSOIAK zd|_B?Piu)X)^FdDrN}V*3@i>5`aJl-w^2qT$%#li0dw`dt&YC6*YR7r@9!<{7!3=H z^jI@&=IS=*9^~C|5TZkw{Z%>`hd~GfNN4Su6%QNT27+(@as(_WmiG@A|>@Kd=n}&@J6ND2f zR_RorUj#3Y8j{%kAl&Zp+?#|7k#GmhmFpLXH00BOLl9;BgGu?j8eqsC)S%Lr_L44D!r>B_;RCz1Eax@Q`npR4Q^^TKyRLV z4RrKEJKiyoYkV>Hq0w+nFWVm@HayIuXUBWasAJOnW7@QIRvT*jLCsE0mTyOvg6gFd zJNY?fEsW>+gXuLX+H~}*sS*)7*xSy$&6LLps!qRJqETIbw7|_4{?gpo+1c6>?&McY zn}`~TeLxM36}k^|kMzKjc0CO%r=tA_rCPH3uOS0hbz9v)ydx!i8Y#DU!VQSRkDFh; z;y&vC_3R$H2V@|QJH($eN02(|R!qN8%;HPj$C%cc9GXQ#AV@dD8u(N(|C5%%x(xc>DH{d9zGud%fyg( z7aOX^N*=D~zkd8tI20OE_W`z&eIgBo;eR_zJ%3yIx1(6wPF)zuWgo@(3DfG_+4@hQ zImP;6&s@OgCXGvyYpFvCl zh1m(meHl;DOUho=aOd~oro&OsM%XMZ))FqUdA1Qp|Z(W|t|E zL1TN$&-4Q&h9%u!7mh@p58e>{B(=Ctt7$aXDNYvQ8M9Q+zHpc7j$lZ><@?bWY6`Z* z^;U3*1{#*onC9*I%QA27MWaXE6hFkzjl4!&;CyY!cl4e2*bB9%=*=Us`s)lldMpht zqk1R;9)k&q?-PW>j%_sB1_SjRra`V{dhX$(zN;_L1i98*YR*Tmq?F$&3+2==K3a~i zOPVw-e}*rs@$k5s8NK=f#X)OZ^A9Oi{XVrnL3~zI0($KjS68RNe$-_E8olV4h`dfG z4EaP%rNa8waFi{%`IsXm{>d!C`QZu?-_Zje*yp44JGAb%a7KkBahhqr1arWx6R#6m2h)OP6_ zeigbN(obH(VFaMYQpZrb>d1`QGIg{`&LeA@sKC$L=FT)FM`{eqr$UgH64CDor}j2H6R~I5a!&1NvM@&SRw3olESqxCgRRwgfpFw ztYZP`*KXMM86`%jsS`AfN_2j;+yFLr3LW3~_x1o{?M^Fpr zgDG8%vK%hN*>@rR!rkJSc<`e(k9{XPi$K-(hr(fjwe}Ol-m>8*Np)x`*Va13oK5I# zOrXF`;ycqy&9#JLV>#z>-gnw4_7_A`lu;Li-Z4X;(Ex%`uZAs)x1p4q{YDvHE^z+TuIs;tPu2*NCXq|qq zQ6|&V=UudrpvZLvZ@)+frD*w8nTOQHvvVadW=Q9Jk4Ooxtl zLn1@9ScKD+5$?X0JVTiJB#DKRe*7^=HDj*Oju5{9vbT?m@BCP-yA0Czi#NZ#OD(I< zPY`V7D8l zJRVomcD|kFH;=^aH$w|})q1S@%=2q+G6V{q!m_$QZ?a7{@r<(?B4{Z=P7UfOTKd7q zce=&vco^3oj-VssZvFE|vy3%;3=?VI``=a&UHnI&d&TGBbej;{5ByxakH03$6xXls z+dn>-JWJG`D&Z59x1%n&`~p2n5;nSYcUoL{Dv&gSniAI+Xz9@TW3(*OaqHS;Ra0Er zaoEP-TiBWDeupmjdRT%orvI+nEBbBTj|$5ZV{UKd7>jyk>DfW#gc^pOjrO^Aqg{($ zI;raC;YK!__d@WC8!9D}VFSv(m|*xDF(HeWFOUb%^=-)+eap;(y>2r+=VwLU^&nj@ zU}$z<6`w1S@;lQps+UvKDP|(AvcJ3jl|Z(BHJ#S;*)MQHl>O@=C!gsZSGrad9zTbW zF;WQ=Z7DKtl(|HzR-ecOojXvcll;NKp~(x*LoFU1)RDW?P+ZlKK24iw_`5CW6%%jC z#Xu|zK7*V8!#x^hB%i3a!GiyMVWu@`I9`eeJDM%R#RBrYM1>ytgs5tLS{s@j$q1xR zbG)s@xwRvSty(_WX4S!TSnb%Eu~B2e(B>@I*}wn-{((TipV-N2;pZuy-X*Ty9FSV> zlrDI^s>bc{HaZo8X|o+PJ)F;umk854VLz8&C`uM>^6(^{uO)w`j`qAd^0Qm&GOe0Q ztNheErA99-i(_F@PMLeu(CDQ+o2$Q4&E*LhMufEFWje=Sea$!3`RoVYS+^tRoH_k9 z_@X)zF%eBJlD7N)@|w}Uk96~$QeKk^+|%(kl}PcO%Xo$kkuh{Z_&Y^@icr|C13YhH zu}t*?vGDspSsaLbQcPn?7tTBHvE=u+k#}C4ZLC+fvU`Nrcu0!QS}~hyb7e%)*M&Ua zQ+;|4hF#P0guW^pD+NvG8v7*+HdC?@YDJmT!7viDhL*bE?7p^k1B1iS-W+%y8|PC2 zg+N~&0>C@b)7RhxcQ%(Dz3yk2F{|5ErB8fY0SjOOTNRxDA_VGy!d2-aFcjxELjH8e z+Q1*eaO2AG^D@M~zQ@NpeyfScA3-*d?j%vhQL*J3KnjInx0Y zDZfA&6os-~iYoW+ss>4#pRB_9JR@&ktI&mcUL^GB<_hVUSxq*je9R7a zZ5!GyiXG47u?tNf>9LX~9zg^3;*~JrLox*10*2Hhv3ZTWYX(ioVauMiw+{(0XO~ut zA9B2fidk<7CJR&zm~}{ltr&{Gn9~{XI=pRDB02(%czK*mU=AfP(bmiy?5l%}Rs>H$PuKqVFhFyN z@ZinmIu5e|$0lcOPkITvhn${gpa>*SA!b|%->pmMC>OF$IUBQJOtN|Zhpd!zw>4o@hQ&drKbnbtt=KO z3)Yh*N}utMI(&fKkC$56#H;#7jeVMm?(Xe(n7Q`_lDR|5hc>k2rsGhG!+(EL;m*72 za&EHF4kLb$k)|5iEhCXSAktMVdv17_NALCKdN_fbMAu{I_kA{B15H%{zukLX22@AZ zK-}dECjkfL@K_*Uyz34t#BI&oP1l>m0?z+F5Z<1uEj#ltOG{M0IZ2!7UD=8-g}t)b zd*zcr)p>*WQBLW2M~j{PC&z63hnek=3}Ya*A^A{oT#lmysoys~>4?m`S~`Fh>QA-y_p^nmzaDG$Ix->;5NSIjL+^{0T z!{AySV3E4S`3nd8+2D^3(r>vpSTmQc+$c6%cMarN_9i6Q*?N|&$hb?(CBMYugG%nZ zC!7a8XVAkPxdTINp^f4PHr=3)Ze44r+NV$6ecZOx!GWlfZ6QqW5}8<2eRa-ptCwsD zoox!LF%)0bPAxk{wjFxG69<0Zmx>R9y`2l?!6q2;W0yB|G8jDwrphPv$G`lvaInHD z-tZI4h7*WbZ6m$CQ$IF*8JlX4j?ur1y8*?4f_VwKq?k?bY6BOjKQKdmk0E2CQ9go2 zX|5PRHD|9$soCF}Dsh&ay&^J{NMoV2(L=)PkNi1!!f@iVaTz}bzFyS*bgk@(%{nx{ zqz>eSdH%if^O2=m$Zu3#t6AH#dNIeEes}gw*+NRgfj7B=zM4X6B-J;n(ga!ahPHxX z|0u17&nS2AD76KNU}0}-l$C`|NCx*(dA=-Rgr@K^6{)d*y#pye!xdA*C_38a;Wm8QXjAq`3c}m9x-HGr6e6%%XEcX$#fc>X zWla#SB2j;h>zsx*A@>xm4);y0!$l8C7S_59u6Pi zvagpvRck(>liU_PaG5GO^9wFI zrW^ZRqpildkrkAWY8g3r>x)C`Lzc?ieig40t1-3XwU+4nYR%-HFC|}d{|dmr>B$yN;qTyaJ?IQ60vxMC`2mYij*SYt zC)haT%abHEmdaSZ#vQiDdb$Qb&raXL2>aN)B@MLN77?o>B^_4;Fq^b(s-GP#a+V=N zhY%hJ(aug!KlcvbR8se`>yKp}mTS8hJ$4}GzUj0HuhXnDkcil>h#WF9+pin^Cej}9 zcv&*F0WLd-nGC&fr;`4(*1Xww03AsRO9`VKc&`9`(O_i@bgy9t6$YZp)b{GPxCRXz zf=Pz@W2kZrf$-a&)wGCyn_1vTkuaocANU<6F<+M$U#`FDr3*8L2u8i#)Keu4-7j(h zo2dpCNc5>zgmE+yUdbFY*&)o6SC#Sf3W28m(Pay}Y^@&&pd^I&6OpcsmBQ2ae{YmG z(?nU#OvNZ>cK}HKcR*jBX|G+~X2Z2WX&0a-`=}|ARUb(q{6&R|?G_}7?cONh^~IXM z>Ku7>xjr`Cf=nd&vlLDyHVsE%EDF~U5>AVf3$CKKRfOn4Vp_C`XT5$iB>hi6U~1R> zJlDMLOm|t!ctaD3)cs7FE?=HlwIWmm{~}r_+lD$vV**0LO=Cj`Jq>1L1c^p)RTvtx zx&J0cL#aq7)5ALFtXZV&{hne%m+1Xnek|PU-UDII_~Ml!aUC@DV;?nPA(v16*3(1@ z6&I;Ag#A<@-y4EAoE)-NA5BQObeTcZvnej<4SLr&>W9e466y*JD_Tb zCGeA}nG|jO0YI$Zzk8&;nZQ7LOWPe#CHo{?Vu6DkZ})vZM#>TwHg62uFXP)g|LbE! zp|3WWo4<*40>-`ZU!&*d&g0rT9+`A|9btA?qRtDx{K^=__>};cjM_w8s;d2^5%TMo zUnh0nqwYD5@jB7h##am;TXtsUEy=aJtF+?g?1^s6dw(l1)vo|wYS#3h(+;!3>y=?v+kN}c74{MF6lg%#``@dKI@Wfw$W`z z-qL`bukKQB`rOPy$fU*3vC#(aUJB`h&I1RZ6Dkt8=io_8UHZeazLSdw19;ZgQsGvj z@S#RqQ`NnCjWf{xn-TvvxuD~!_uz6vVwCGN`)xCpy$pfP1=D;`QQTuOQp-w8?c;Y{ zX+KMfU1N18^hBP2(tlWHHfIWJ zm%1^%u2EIa`?=`jtxAf3W%JaO7PifAZ?Pf^=e!inN#W^w-&`zJ_|_w#w($2O-@C=t z`<^{IPUhW^u;YE7AMbD2cqA$eIQPgF%tA3Kd=4|6?N8zqL-+l?+#Wq3vuQ9ZX2rBj zZ|(<*2KT}}W#Q$^;@O5s?u~1aN=MxTxzDdwC5&26_<7mJE_EJs{2tt5r(`u_K#U75 z>Mnc!za|QkEI#0vWfe)Vbl#4JDL5FdtbxUSZ^v-k%{FoiYODsSG$szJTIMgahrzRk zDR&Lr`L*_X!n;0M0&dBDD3qQ(5RM*L)xINUpDBL1k;zzUEo&vVP6G z$4(VJ8V79cmLm_-J0e_lGu~FK<>9R#(Pk-Gnhs+K3U^L+6snA1W|*VcM{WU^LfK&# z5GR-xcy1&`w_MB*S~}^ERD2fe>!lmm=(Ya~E3v-uYO~O?=Go-xYX6DNO@;;KW4z2T=}PrHOy;x+Z{?}%3%zcv=mM{Xd#rL&nu zbI*n3>KJN0CX8;yRkMT@t10KeXB!*viU2)tJkW3clX$ZBqEv&kw0zw$+|dW6?n^IU>o=#YWy^(YQnCJ$)?4_T-c)LL25wntShMN@ukd9X07(tB^dIyl|qp*h#(_aX#8252_2<~z1px7az>0eio_ zEqA*SMwJ0?ah{MO8X&PujLyX^#k?LQ*sD&@%3>?J7VW7V@|rPdlpp58!xmDyYrj6} zd`#?~!@jx|3rL(=o#}jN>0(Hp3Glh>!Tz|}*U|3c!}@DsfO|@F*>#FV;lx#Vx!M$E zp&n9M<2tCK-Eps2TjWXtKV{sTwd1vv$u`ms8+9KmsyN9)lt0n8+eE+EKt6(}rKKHA z=P?}{0Zp^~;+E`Nd-y9)QYc}U-$pY8fV9L;-5o)cks&zkNqqawDr8YT)+HDjqSM^D zGvQlxoGVc>2KTE}#P+kh*h-?m^>OU6F?#;+vB5|iO(ShDxMkHuTKnyzW-XBexE$tx z%P@5(Ro|NaEJ1*w^dV@TOab)GH3Lnm9inStKN8>XdtAF@n7wrzQWU~nyejm-m1Q(k z6!&7@5k+WqY2vg`(4jDK!t!cC{;_(y&n;_{O=x^mXKX`XcOYwft^JsC%91@{GQ;Wx_^#C%B9`k|T8Spus zG+6K*KG!xXWcG&jtD7%str}_jhk4va6iDoXV3+NsC&I!!-rY83jRhmcTr8@FMs4Muev;%oqUtUC`wlMwkmaxaMWGCG5aRxNRIchv!SR5?4aU* zJj_Go$59Qpf9tet6}=ni?x8~_#!%QLMCd_&?*{w>c5_97D;>OUIJfl? zGzDt{)h=#JaBoiqR=!dKFH^U-n5|ZH%D5Bol+lW6(>D5nCp~sLb&urOJ3j4FiX1JQ zopT$zEBosSK5{W^NA z#2Zp?@R(&jl_bBYLhp8#TfbK;b@3}78;y@zxSd?Sw=66mQCWh?PmNit7>V>Y^Dt>T zpMTAdB6%sbV~ka|%Z)DYiTSl1LU^&)9_l758f!*1r}%S^!SgV4?D|3SQ$-{V7{1bY z9W=5WXZJ3qLgW1*W;{G2KA~NmCvA0fM80_M81t~omigK<*gOMUyC>Pr!eA3&O18-< z?^)rF!UwSa>Vd<^0=;g7nRXH2x*kIx>e(wG-}T~OayE}A`Jv-o$qmvum)$cYWNrxZ z67Hv9UUJ(?<}ow-?&meNS_Md1;{on$ceXobjFIt`vvfo4Dk_N4LPsa_1sqDnsn-$e zMXJ(ZJu{BZdbC;XwPL#+GUuqc6E*D_;TFAa6cIfD_UV-#n#tFD89Qei%n`bzW?!g8*O~=l>TN*qHJQeQz zN-cvN>(O@F>qeh9w-2>F?x$us2<&(iTOQo}`T*us4>B*9-3WI}{^dxBxo_VLr7v+1 z$%Z;iG@I2E)p<{NS}xUDZ?2kS=9TxnUW&llDen=0>AnkTdAzSxsq8!s<;XF6d?*?S zfUr81PG|Ab9+PR)2e_i&w>mHG73p->z;@J&dFI{J=(J`9GX3tfJzb<1R%5F=1bX|I z1#KIBeQ(M_lQ${7r{|O4b#9&uZ*k!{LM6 z3+8XAPx&ZlbJS!VE>7OS(i;1-u2*L^a#Vd&+Kr;D*o-{_sCF~!J=QP^QJjTEA9t=o zQF>q}uRMlN*OsIX*4pJIZXK@oi|6}Mv?LjAIJfVy6;26>0RC0G}iKL zfr)oVV63IByGEn*>4E=)wf_A?YV@ z`ckdLfH$hUL9wMF?tx>c^a^jba5*-`t#Jx=QIo&>?#Y~j-=WLxxQ`-6!|sJY^xPo# zsZS|$HCE!xAlWiXP3vP_JG5}KHN~i8Yt173ytdo^gppIFM_$!e)XIjc){xsa(;Z49 zLd(2c?YJ7|PLXg%YCo?-Viz&XeRRq?dqWC2&~tNi;nZ}3ft}F~ypJFd^Si17ss>ev z%w2taci4BIPd!-`w@!?t*Ng3mh{zDH*Z0?R7nT0sb!^4Y8tEzpf z5v7&YCri4lMMkZMSG(_SO;tN?l!ok<0Aa-|4L*#e(Yg+h^o%#+AIerEY7jHW45V;J z#Ba;)G~(}0J8bmK2{P|wc(p$I2?>JsX^E=@9d(x+BA|G_=%&rFSpXRtw)S~UUQl2= zvFuObw^LlILNAtlBvgH^r9pu?R@fn;Zjol}-5&AmyS|)*`VocKqBVRZjMgU%NDcNF zV< zW5ky{_%_i`;ZaWRIqkn(q;;@SCyue|d((anPj8;(Uo^cttR1VFrM(4h4>v-u<_vE3 z_5}8GP|`ONj#{|%W>*(ob`qmQji>|+Zbzy07ROt2=GQ2%^33LU z4q!QHdlP=g&6W|VozxiQUPP*!ftO8*{t&d|^L zN1(L>pLu!E<3mCBB{ESugLkT?zpUrz)iVg2e9BfMN>eDLs)J|7a^RC)@b&O)=2?<~ zSM}P(eLW{B4DXeey(j#6kq0O{vu73NhC)P&GupgK?0^e`Et!*T`Ej!CFBR@odrVx| z3)y>s_*Cb1Fxq;z=4C{HPU-q~N7&O5SDS0IL;jR+FfCQAlczbTGbs*6P{3J_=h&c1 zeTvG(R;#LY&@lMN9pqYUboEp$37>%cfp& zUfx{?Ch{tjq6AQhP`GWf&GgO2vODFFvQ3tmvuO-AZ> zctD-+h?)1*Xne-)@4ZR(_eH3%2QILcv)i7}^06nl$MYImk97l@wl9L0N?aVYtyCo& z6dhyBZOIEk_MJ+hb;GJz53Jb9#@+2>Ym~yqmnpE{&mC;%vWBbfXwpY{+mDuiw6#WG zrI-q&SJ9rJA~H#5F|Ss|Tf00aDxa{pb{J$}q!Pft4fqxvzY24+-dNSQU_2bUTUfXM z-hpzp6ERA1WN>NOR{}dWlc&HbMpWWEk?_bk(~EJNCmr)fb7c{1i@0jU9ae!{3Sh*K<|{i?O{$@8*rXiZM3@b04=k#zoEL0WW3SIC z6$b5K!uY$}sENWHND#_zj1Iu*gyj4F55nSM8gP%j83GZVmO^=$1UOx5EO=H@u}fI8 z0n1e6a{qCF)q?LbBw}vBn~LRo<0`DTdA{b>T53$25axlj zxE)A&Q#FF-6o5eZ@zli>YucA8DGQ`XZj0Z?sTQ9jEk;tGJ@0mt47)m*OLOF$O0{hE zg=OShjC9eCtM#?-yA1H;Rd88P&RepYIOx{Y)>iB!Scb-qvbzu-Tud4d=ha&gS7~T# zFHC}sC9Ibk+k|T+EDcSm zQA~|0N(hg_L1Yq^-74^|u1OfWx_?tY{wpo4#&&|&jSIsi<;Hnf33^rTm zm?sFp8TA?z)v`O#irW)ch3?8NHm!!8LOuJ??%C6pN)nbL)eA3NOwsbrpL2UvJoYT$ zP>tom$e^iGW4q4wso?smQmQ2|x1omZvW2u6B>tLSZRW|V^c_-pUikQr$@+y^w=3Ul zi5LAg6XTau7w~ssioC%;Q*J+jWCdN;1h!aYU6Fs;Rj7{cIiB=xlQeR-AHgv#EBj|{ z-{yTyxk-PvuCL1t?r@O287;H7yMs&0kj@)*RC#boEO}TZZd@u|m(uNzJv&M$S);n# ziHK$|Jc)G_`DVOk2(V_WZJU89MMK|#NRWO%)`^L4)mcmQDu{^F^~%pe1Uf1|1udFc z+VbzT1e&^_xHwWBj5j5cw}TJ|*;NIZ*>g@03tr{vwm^P5>+iU(cIxXdEPRGqIHDa; z`tgXn8&}`HEPEFL&qQ;D09;2BWRCs*J1<&d-t&)c#%OdyIP=EbJmMI*%nY-^GywbU zh=Xz&vM)HPyzlVMw*{^ZjPHXj2VU)5T^?Hq$_1)nCib-Xex(Tz3&Kz<0F)J#RDy#6 z_%wTH^GU!H1XMNUr+z#?PrCaBY@^mPkUHWhR(tOZHr+D1JceS;@WFtGYca#I+c`V5 zrc-jgaco#GNN`x@Rh-j@GWr!P|7I0k5&sz;PBz;Dp1_EH?Eo5Z9^ylfA_f#5BU1b3 zz{A#XuR~^TSB-o}$4~dZm9zicxE=fZs&Kpuv3?MguhXeXPY>C3ba&3;o|?+9;k|`_ z+d#AX(k!NVC*=|Jnx=msb45oGrX8*qV8rjtIV$!f@HZL`bN^T>6EYFC5bAM{G7^qh z_8aumS{gq^;aOwQ{W0BXf(XgHR5W{!Oh2duz3{9??&<)Cxo95W;O+{lqV_H2yN7z7 z2JJ%qTztafbt#{%ctH?DS1$>I;8RKj`7bSED=ltxc}6PJ2LjX+oaiUr&U_y~s9E$X($ZPF?A-@_n06hsTGn4Y@zpKhx%J@uaG%)r z>!TeaQB2ko38O!#_G)H}2j9hI!h%S;gH9~Z5_W7*^8>_m+)AeIT9Q`>TI6+2bwm?r z1H^1F07S1Z3rL!_N493!vj{Y-P-=)JM9E@;>H7~oFRN8)Wf0LouO(XtHrwT>3JTB2BOL+ zcyAK|0&4zuvHDb&SM9gWZ`&DypvmVd{0Ohv#K7t2F?`ncj_MYbdJn&D6ub?|$5lyZ zDxkKtK>*uPD_tS|Ucl+JQ~;BUf+{};Xa*Q9FvV>xAunk_YWRcW&shbi_@r%SDwg?i z8wqWb%nRq7C*0|E6oz<}k)kY64^hF4scHx_rRwp(TbyKd9u!rQuRK-iGx}Zzh(R%? zZ5@-RiyQsr$lig;RSn+AP(zVX6ICB@U4nrX`U60K*!!nJ&~K72#HcM0kbrkSRkVH8 ze7H_FtALlDlPX0s@mekDF1oiCl3PkYe(&VKFq2SFzGN!w`-}@vnMy-~K@xf;8;k5$hD zGd?QDd!M0C7c2LmCjjcA5$lL?=zGw}2_CC=&b;)KKn?`bH*+>VLHRRQYTH_M9yrW0 zy5H|`igf@4)ek&f8vnvQK=f&lx&NC(32G7G(wS2pDKt2ZADc>YlN6z`{V_7l{m?DSJo`DRf-^Q#LHyGRZT`Y2eA9CfTvad7s-$5_fX5oflK2}L z*s*6>xGc<_Vm+all8@j&OB>D#FyF4?mus4D^T15ua33p*KkNNx(Q&YWsXj5fU?SZE z`vm6kA*Eeo4Ekz}eib}#@%vy_pH;pob7&Q%N;qlGp2bYdmQ4=No)gI(PlPL#JTU;v zzoPv+{7Mb03;~@67|;n=$!Jc#Bnj$XV3Ac11Dyr`qOcf0@DC)g?=`CA2Vj*yehb%& z8f|WSsIrc$zb^Z1Wf7d%UT`~F#4wybl%wQ23YBl}jkA7uVQ1-hsALfCl}-c3zZx8` zJV#e6(~#evtuTFM(Y1bJ=}jFSpsbCe3S1o)LsDo6SZ=` zQ@M;rbsj5hA$p9r{Qk1a#u|s-9BG+@+%`PlE+aGY?ekUj>G;4^wt?al@r<n4~$}(I`57yH0YJe>q`bwn+#od> zVAK2^rR=X5KD94pyo9TZzcM+(P=6Y(A24~u2QOJ7wX=6SD^X&-{2M#}&j+xfE_I^S zBF|B&#T?965&wy;{ZlWNR&FgB>T86JOO06#K>9yuJ0kxz4FFbYx|oG~<%<`Dn$587 zP+T!;=Rh6qGTFR4x0j-^od2Y-iq87{PT~-XY>mntWivk#p7?q)E9Mql)*Z9#H4?=5 ztupFApa+J1NsztsHx|s^M;ljd-WPT`CjRUC7s-H3NUW7IVc-DrkE0UXIXSSc?CIBk zWeH`XasH%>^N9SeHbNqY7%#ZmmnR5=3_yPp?h^BDmC6i*VE%;old}nvzfhq*MiAIJ z&-WyQ#+xsk_ZRxSUhJSfwp$NE&HpMEWn(>A@}cE&PtTBbxypKgKcW3b9(+h(O!gN{ z|H4WCvQ~etb^$EA5EhL2*F^v8@;`oZ%c)TO)e8U^t?Eg4jpk>&|MV1aClw%xsjztd zN$UUeTN|)D3~(C&h5nzw{-bhy`R4!gjX0<1ILzKsfbRHDL*kqQXP)Z~qkl5#|M^H4 zli2^q#Qpy%LEop{K+Y*dy#Jep66<;eQ1yoq!GByRvFviO#Wnv&yDP2LYSXY?D*Z#v z)+Udo)iI`jm2H1N1^w%``=eKE(DDlkZkNB=7gcE27RrIk3lKsE1FvDz!_WSUrzcTA z^_aD8TtC#wr}3neH`Nu@wA&M1+AqOYsy4nmC0SDZ7vMW19fJAhXWGptYifXg++)NE~~r0 z_W{7pAf*dKkC&imWZL;JxQ4nKxs9f#vEEClB0IYaJrOaaH}Kk<$@IUFk<{EsZN|0u z%u9_#3g5^TNLob<{u@L!yU1R`K-N0O#yMk3^T+Ry2JA8aGlc?6Y1gsRr@nsunr_6u zOwGUStRD7VQ0o7bMt@z1Q|+RlMrlFJGFo7jbo}2M=ie@?n%z^(O!za%gBEHUyabH> zA9?pZMW9%F($v?82wF;XK{u@?{Qs-~0+kR%x@%e;n}ok(M0|b4*7+WN802*3?m_V9 ztjv#8s#!%}Z=;s9xW}De=xHwg7e4<>P5fd}Qc_~%Z*GpuGNhK;zIkN+vzzOXY?2RY zh1(N>HxSa++ss62)cYL_zMf|#Ya|$)y*C+TTclBN(CsWx^|wrI?!6?nR$*&z;i}M; z;IL80I4C@#9DW)Mb+j2=Ol1I$K^rd$o+F@sm&jJ|k}t`MHHhZqNB%#E!kE>y&7vI_ zAFtaH&yNAzbV{3TT+@kNVrDiS+lNW^n?LVYtA-ijKxlgISIzG~xVa$3G=jWXqG2DG zI(%FA(#btVvQjdbkVAYvz7AQFUDgRgoi5eZC!MK^r>@BPV`u+k&;9e;w;=>F9($g= z?}S>7TAb|-YCw{mT;#W*#&ort*<$gPd>ghXsjjSmltRt(^YalDE$8x{difKmg*+LI zN$1#CmUmlLgOcBZF96d5LUJ|{MX1KJTF%CDVeLmx^7^+Ms`wXzK+$;%W*RdxW&-5x zxHSP$5%L6 zq17b}L9GVG{KguVZYG2?g6uhGpY~?be=F$JKXe^e=oQ(AzHq4e=#i;+^AVh`Hx@A) z$^ho)R7D>N)6Ja#rK}EZq`;U66sn+Kf}wv-%d=1C!B#^^i~HF?)zieyPg; z@25~R8fn2)F_UJl&~K!aFk(3M2F#~GDlB=X#sI*8nz+=dRL*OuvzaJgzS<4PnBD$p zfP3*H38mUSifbXyJR`eEnw+_6sgBz!1UPsH&ZE^?sj#^xsB-?>=}^_Q1N}8B>W|ws zjw0WHzCGtv0H;4!4ChIdFl_1DwH< zAB~aT0G^9xXLz%~za78i4PcCtD<&H8^Jn5$MCwuNbe{|1GuM7MH0o69KF6wUD!Z|5 z!PY*w)MAzRfra#oUJOjmYs32a-!ON8SOQ=wDMa_n14E95Ye)lzE_t&y^XL~oEvy+- zK`kr-v9QvxO~A};2CT6yi}b*MH_8))9Kd#zAba%=EeI<43B8Vn5I9&?D`9qy)^Smw z>e5+sJO7r?p##hxS%CC9ant`VbM2oS&mK6rHcJ1h#AjwWuFWwgz^4XY@?}^zO0N~2 zNOwj3?Fld}+9{3|;3WDtoAov0AIe}S{6!gHD(70Qba5~_hWfr_b{ueoWkSn@{Hvk! z4{W~u0)`$u3qVVZ-{hYd8UXO2<&Eei`GZDO_GoUdiFS)?iJP>vbZDJJrz(=A>Wim_ z0JMhK4gX7Q{{1Jl05IT$Q=}UUFa%-Y!w^8|N@mZs13YG!*H2&2?YSs)zpS#pvK4l` zvXwJ|-Pew($Lz&Vq9~{cD7}Ls zy-Dxz3IZxhCrB5hNC|{cLZ~7tB|zw*C5R9L1PBmFfKc{ieQU4pTl@1J>-;!>&N%xA zj10}lL*_l_U9S7O?s@WZA~3=wrT1T4`Hh)f+7f0DVV`#d4VZZ+`lU58kx%ulMYgrv zciwA=-!8Cf_#qn75plAgF&uwJm0Q~sU>D!~y^o_(cDcdgY#+%V#h?1`sKbA)(4kAb z{93g!0uND}s$B_Qo@b3X{mM+t;mUPyKv<0Ay^8oH020c!Cyrd;1J$iOpN8WfhW>Hg zHy|8;KIr2l^KXrI>Jl*8MV3oz) zIR5b+?zp?atmj0#erv_QRO^mbo`kem);{4)ZyI$9FV_g-2=r#>@b$(x7-jTr0*rfO zCf9@Q45&Q5PaMRijl?3`bSO$^W<>%QGIn17_lyB9-x|^Mplq8_ieeeV#YIapKbzdyzf+%FJ zC!^C1>zicZhrLG`wP6_xAW%Zl!&$&k45v@eqQqMgNzEAo!UrbTokzeR%6E*QAM+lJ z5_uA0*^I3) zK`TrVQ7D6_Zwz6(l!>b6y;QOYJj0#ynZ)aQgO}*~b{3T#W95V0($^~pi-uYWho4u@ zX0d8dM>4Q?3Rh^?yv_DoF&U_FPJN-R*(p(cLB#pv5_3|7k`xm}`nfNC_i*Cr z3O!%fXl2|zeVeJ~kX^cpu>I{+#umR_%Vpe_Lg42?%Juuj0~=g zbb9W(7W1$@pMlR$)LQV3%%t8xlsj^LX?MQup!35`8>rp{V(P(F#clt9lHyf4Lbg}) zb33=nMdaF#T+gku(FS$BcPqda?9Vx~toFOi>7v|;JiyXv_4tVUMp;{@LzR@}dLEpz zViif95%p{5X~WOxC|C6cQ?nM1R7H8}^N{tV{Ko2~H4f9V1`vmNMyyvM@B`<&?e?xG8@v&$mE#TK^QJ=_S0u2+qTeHfwGT^yXBY zAlA1jElIa}X5z3oRq0IB4g;j7eYJze$4Kxl?e5p$yKZFVm9njFh$Upch`{qG_;xUe zH(nbo)TYX*jH*}J^~~a?{nBtKh1BbFJ^)HnOHNie2}xS7?05L>UN~oY(XiF?^Uu1x z8`E?{Y)#sEnS4-QR42CxOT*f&?HwX1-wj*8KE2eTFx->A`SMW)#jZmmy~(E(3}_t} z(tvR%mVP~<%Wv!(-Nkv0WvRjJK07eAHLgNUFK7KYihm(b?+e#w<<&2*mv1Zu#2Z+FN$61CZp{ng2^^9*@4y8o4yM$c;6;yOy`oq*`54Sg z>#^Jii)`We{Lo;j{{oo*6$g3wiVe8oQH`0w8{xJIZ|P9VA8+6Ce!aLhlLLRqShsNp zx%G=2kt{aFA1IPqeovn zz^lQaJZ*d-Z8-Z{n;EH-(KZfyEE3fv`ko+EClvNE0X)SzS!XIp&c8y zo?#{BEvaW0lZq;wJ%(3@l<2pd-_a4Uy{+cS1hc6i7rJCQXUIJjA))t8T8Xu|6Luqn6c1r>pq`eu(WaJsmUK z8tUpH4FySf5pK&=DcxP08nn`+Ntp^$399NQQkEz1a~bYQCtAc_#A;>lhD$k)e;bR9 ziqK25|851#OiR1ovJY1c-P<(UCReZx)Vgww6?oZiqZ2|3456mkoWU|NF)?@=iO?0N zvU_dk{UK5UPUzN? zkkQ;e`c}mNHSsE+JE@};tPN_^*(4oh1pK&?|ARTw=X9L5NzO{SlJ)48N8fE4>S;6u0#1G;Ls(Kp^N!)Z z3G(4c#ni=g5AVgH+p$s{kD{5EL&hmIxXPm+n=flfUye~zv*~$$dQCR1#dP{8Z*Pzj z;q*)P>H|uBA;#4a_(`NnN8*(h)U%|Dp)wP-@oGoWrl8H_V~@70tG}QzMR_-FCCV9( zWOs3j5xKi`ECv+bHL!U^x4+bWt&S$B`p}Q-z!XcRU`(%Ou_R{5<|vnB;lseddW5m@ zL@roFJO7b1g0Y%~b7b@f_sfV$PrZ(Mqp~ft^15B2qDeL*=ETrF?H9E(93oSJ6Fm}J zJGs3|vhQyQyH3=yDdBR}#sWUpOcuV+D2orqccrWhX8Q2_3R-2tJRsqkGD9d20fUX8 zSm*NPV#9~+r;@jMdG}s#|3D0l5iF6Rt2J}cLkTMury4*a4~vV6I_%T@TV@bHbhPp` z#0Hxro9-n-%~`G4Se(?4;=$IKqt|oCVxZ)%pio(Mtz>KD$B(K%RFt0->aZvptyyBJ z@i%SM(|T=e$!ts-q(-m0U6*jJFwK|?J+=c3tOd6t5-q<5HYtb1QH_SiWE=6+%6>^8|-t=5;rGMM&2SurUQomneNxz*ueAbmPH; zq6plT3>kZQ+N5wapYQ8;e|i`VeZ@a#f?%6cr1eK$$Yoy9hnaKa!fP|Ek=!61FdTU? zW~n{z*JvKN@Ew0TRqX_7=X%mpHO+|G22!&g^NpN~r|daA+ino`6ip3Pw!IEIXqkh* z02y+1qAyqi&{Cv(Ynp^XNQ$qV`Eh@9aX5r|;mkQ1<_43YYd}fe5Vf*$Wzrg{nQCz? zs>?Sdg5Ju0)3O%9x(TxWCpnDEcYYg^kta$5VY{2~_j#>wF}1jra~^eNx$E;|nq_2r z^M2In&(QbR4pOeQuGe|6&emXxXp>nK-(g-Td2Y{vi62TYeQo&5P*YuWS}oO?gaO%E z$L_YuTVLM60{5IY+pSfFGN0w*U>XnAl(@3PqEwx)XIfM3B;hi;Y4>VC-7xL#t~3be z>$7@jQv-9~vG1P!kpy+~EanS}-lMMT%LuRF-r?*nRO$hKqYmd-Iez02n0H5?S=L+}cR(EXbNTKuffhHZmWUS0_|tVlPMC?UUGsK2`pj@n z=uYIMFHLErQnGhmJe8$>DD9lyvDvc%>T*rNBqZ&YBmGFZzHE~sI0>J z6Xy^ivWvs$_`#4@CNjmoaSc9HYJ}Dk+L)v91v}hXqlbo%Ycq9FClJw+jZ~@^0R=_i zmXEKR_Hl{md{}+gD1|pDW#$2svh||1`Angv(#|<_?VJ~hdF@5bmWk{Olx_r0dTj7d zi<&^m1~q%El6QYW<=Rko=efZSl13bG(k1o^^*KJ}bI zQ`UPMB(wPGe{qOVmnYhH&v3ZNv;|s#Cja(%bU|9`-ut4G!-jATWN^% zGB@&G+Se54m_NRIjGh_x)w$vE<<|AB_Vx$uu{=w9P$NXsN}uIIQTE7G(W5+9P}7dH z<|_T0OFned`B9)`?(}*mbvd>=_}-N0n=>NrQdwl>oq1N_4~;*=y=G5J=D_{7!3|Zj zKQbN$as?e5JWExX?LM-*qA&>U{vHzQ>s$LS)IU=iv(6vQEdn*I@#c$lgef`>oF6>3 z96$^_rsa-jK=qrM=qOVgHM6WO)NGeSmM!K!a67Fs$tW~5X@eXybrvQ@4?$09jS*t?VWUjS9XIJ1-L&5KIT{blR&}eUh|1%Kv*Ux`31TAo z$nAXibT^2VW`_Hqtd<>8JDah!(%?tlKBHO(xL!Lx*G}`iHH#L&+>7q!CC|6*rzhN? zorJK?J?T61+oD4E;YG_dm)q-OjB&&4GOzwiG5nxi!4(!qnUZnyYSpG zOT8Z`ODh$g3<1WC9yIiGH+5H~!A`xO`tW36aI28ZJw8eMw6Y(Z8d$klS@x1rT$E)t zjq3`_a9L>pq8i;uwb&#_tc&`{h>_I8P<5fBPkjO z;Dy+dnBLYh)R*Rldo{SnDQ88Z&q=fc_Bf@u)DU`(2fmZx-jZb7B3|W3Yosg$y<+5$ z7+tvO!pF!aLeJGZUM|o_464Gh^N5XG+BI)R9g=m%-`T^sJKVdHjb?4h8tm@Zf_EJ4 z<&+ZQRQ6>TuoDL?b5`sk@kW+n9E+_JpA;so$E@5PN}tSC7+D5rS*9y~$1!eOzBp|- z08}woTD=aL2>%{9bpb(cjCv&cbgHm`{>X)^PO!4vrIKOi=h*Sh^`bN}@+TG*$W-dy z(_E*Dx!cFYJ^oVJpOx(I8?=-PUUn}WU+VwVFc0g`g&3azqZ3q2vIBe*eF9u7Y8w<| z8W0qGDoZ>WO>_U^G>KspG{(W6ekwC0_z-Ou2Bz>?ep86R3&ayY6xaV+xZT*|CF-Z} zpJ}Zk;X}~pr3p8ltDwet=k>^lmqXIBP)D5Ov>8H24u4l}zwT3kGsYr|`yo}s3$W~m z-x*jh2c^KxfP2eU+>V3`Wh(*9cx~xaV_F*W`y*ZzD*#3?R0d6s7l=2`AC)6_C+hf* z((ar<@r7fHt&83sBGC?7v=~ETX;P0^cnJ|9yqVX zq+E;NIa?eAWY>84T~Ly_Pl=J|4jh<pyA?&^ya=bgU(|iqI(xra|D>H zqEuN35r3?BML@yX;Es`l5Jaagnvz0!GT!uJxzPn(^r)ACgX%NPoVvw_HZ8 z4G7}Tfm8;arJ%D?Pv$L>vuRhl)UX7Psp4_LQzllDfr0vk1}Z<1Zj8U-79k3z?gxz^ zTirx|R?65tO4-=(Ci>U4u1lh|lSiOt+fc9lUB21a36f`Q({9&u$5B{rRhEM=DCH=3 z9a#Q7sRt3Hm>*!82XggowxYReHtj)M&VDG6JAlkAkw)UXQl(Y#94K{%CqU`By-y1m znU3jJ{GOV&{T7R&EGx`n{idqs7HWcSHN+7EKR&*7p>@3q-E<9|rc!UNJg+MUgi6rO z_1yxckec=iu^GM~W6GF4>966sAon038AI)i6Qkt3q=SSPoC>LvkPP9lgS|MP4lWU5 zugA>Q4(AFTM6AleWRl&(K8rG3@1Bk*vc;xHzrb(OBgtoyhN*R(n8l}BrOMbh&rlJW-`-)ad#>dh1=S2eBN;$*WlaN`SFf} z$Q|ghP9CUs)U#L6hc$xsw;Zv1VETNoJdqLk^D8-voSK$<$*q3vg1uR^&kE3+k|SGs z8O?5d86zo)Lzq{BDC~t3?@tfen5euTBE1@@oI@*7T^1^JSZLLMRV)D77r{bap&cZ7 ze4(@aW3Gtu-hh=9KU{#(RmHXBDil^=d@M~B80pl;4TsaeHPZFVXDY16R-U>Hj(G`a z)K4i-DWl8VaeO^S?sa88?atb^q>#&Z{v&3mZEcXX2eno9&X2jbgyieBGY`OR84hdg zg?PjhZDqVj>AM!|F}bm_GIQj^3nE)up*!0~rS|1?>6G==Hh}`dZTiuS5ob{JikiV6 zp>cZ9`WpI`3`t7>z4obj(xkd-*7`MaIxPI+t~nB6M0|1*4H>&rUQf6WA`7PwI)fVF z6JAT4^g<^pQ70{Y-CB}5bJOpGUg|fVM_@h_*2{tzKPfLoCF)JZSK4S6nuKihKk@vs zPuscGRb^mqE&}oiM0S7hpr%E|f+a50x#WuxGf+kdt;(P<<#C#?5ireu8- zsgr{Pa05;I38mSZUdi1p)){=iM*9xqccq=_Cx$;=a*{h*;k9dD8N@sS1Fb^8UzA(> z#=2A(y~pVW3*N>b!+%TK4VwvdnVebQr|}tMHhQPIebs&$gGNE@sQKyNb$DM2T?Wmk z-KWmt_zH+CEKrz5p}ut3OKyv!QZD^u>-sQ;@#d%E{pZ+^ScZ`xgeGKHjNQr;SpzoZ zlCqH+o{*g(Qqd?sJdUlH^6*6Us7>C>Q$9}_ym~pT^%cJURXXcf5&w}}p<8SA$eqQ+ z%m-@JR*V(T$NbJ{^>>L>T!t%;A3aL`I3+Arq*JS!eya+t$j`(%B(8_9c69IhprIM4 z0j#t6>L5ELVYn0txT@uw7*if$d5hRq-WqxMe`XZLYQ?SqQh^yy>XavObW zvxOPa4@^zdrwEkUBv$+~0_PFiw7#f07t`>vibor^#1o5*;b{I@-atCfB=6JjvFgt4 zTP1SSYQBD+`-?BlyMbnj#lNI~sJxdqQGXc}zZ-+-be+BK>hW<<(50=;`WM>1XZiJkPo08)u8%-nQpU z-p01*OrPOOOEa5&6*p*|$9N+4zJ3f!{_Gr6b~ z#OuK1>;^Q0n-M2=jvTYtp{n$Ie`)Mh)?dLqXNG)|m#F8jF{!B5kut`W+P=L0;!g|>ssH$Y+3??9uW8MvOs)ZlbkD%xaJcJ6Nor- z5B$Y`cQ^(9eC-^EqW|Zn>-1E4W4D$jZ{yV$5w=iOm5VRh;)z+SriAa!wDKS*!Ny88 znuBFT0V`&dRsouG#ps&fcK`2NfQI0#e=SI)r*6#JZLc&SM@VG*=Is(RY180IP@84Q z<{9DO5m&#z7MS&K5JA)F0Xs`SEk=em_T{k)Ty>;m20H1)m)$Xj}esAewO!AYDTHP)M z<^4KuV@JQZ!(3`%km^s|wQORB-?1n845^AP0m*oo(mNd`sP{v|hF#61oP<~&h0%;& zqJ2)tbp*~AQ3EBb#c?T&7cB?=e9z(D*~SuA)ivq#b&*wDcEq%z!wTTm#E^+5h_D0( z&hywg#49>g`+DibnrTTejGK7_7eu;3jcY_-FR^-VWD)RQr@~t4Gnuw1yOwdw6#{Z~ zudWHz zWwIk-1%XDOtB94Lz&OO)V-f?<7z%Vh-x zv(Rmy+(&8a>8wJ|1wd_xaBo~--tKy5SfYt=vna?pb%iSBQ(@L%IoF#Z?%uMO?X};L zK<#*1cq}`}1O2p2Dcz^e(L#GUEK$0ps=)>3(-WwzxWHKYI%;zN&bl>$USW>OYg0K+ z9$vBVu`Z)bhGz=rKO<@{>(RjL2PQ^LYHM(NeC7@v<-aPhAaYPF2=58f~7eb07iwq!73#9)|UYbcMg@m_b2P45^u*-IB9(5=e~Gu8(rTX_I(fQedHHb#>s3Voajk%1p+Yq+V*FsgdGpSW8E9@! zL>LBOeT+auRma%2v331xWXxP`iP?1Cw@ja*Dac$!7_R&;c(yfBW2kNhQ6qBY{y%uuI@7Ye2ud*UA8p`=(NCETVib%=;Nwr+hScKT(m`;x zeP1+Zw`SrJl60=0a%yUYL#(Ropp9F5?u0Af(b`{pUuPL0>E<5To6XG&cs0DYzJC!C`6Gud$jF%H{Ih@(2 zk;3jbcj}C%Pq&!Wx|rDEW~?_INvolP*=}t8Pir(L+4qM&j;onRQ+lPgKQJqmhe?&b zr6dZq!k6==;XJ$(w4+T|?|gJYytf{Me9p(b1?-6Q-5C~@PD!*P@jP<9M7Vo&7n`|U z(s)k9SlgF99REBlyqfQ~Irz8puMJ>|GjA;iO1%a@Om1LMw;lrSf|-wGyXT|V#Zvc< zXQyvv4y&2F6I$t4Qq-8Qa~!#q;3hsCZd+3mVGDQ&O|De)!@fPos2}2PAmfH{`BKr@ zQfnlfB%Dy81Iz5tiLU)^1MM%>OQ-LGv+%_z!ip&+ng=VrI1L}!>s$IhA|1tx{a&wr zsb0N7>&=Lb+&EPuq@TSHnJ&)dUt{0_T4ex!xvDW#s<?P~VG;jP#r{VSnArw+H-< zoQw}qR1N=%HP)w=)iKgq?q)XK;en?Q$8V|p0e)q8-TN($cILj@OW@Ci@|xaipe@g_ zaxXJM)m)7VBCU}^O3Uk0M9@s?lEyG^7XK`h)Jbx#X!>ezb?Fe+{xJwhW3vRXOAqK_ zjx$#QW_*h96)<@qfaYVbkY@D3MlLOf*Hz=+!Mii67d=iqV*^Aw9zlB)U~iC--y4Px z(~H5PmMizZ-vbprtK3o_Zs1e9<(ixNU5tQn>}z2=psBM_=d%3H>DjzTQ81h~gqn4N z`h^GgCi%{nIG{n{JMa59_KO76E#XIhrk(~mvaj9ugl+>G)^F-@0meUPq?i+&S z@_C47^>6v#FTWj<*_H@4h*+?Xe@YJnCLHU`n{!|hygss>yn7FDZiL?H|A*4^-}|q; zfQrX*KimzxeLf=BgbjRF`HiRgLV(6~S|H_O{GWh~_hyaT;lyvZKrx_50!n4_|J%gD z0`22MK&ppK{D~C#?Sro$3JbYeKKy_C!Fd3nvqEb5-VF`ty>ZlW4&CfZq^r3<@f7m$ ziXy#?+t}6b-zSuU#sGKLnBgCMUB>+rJj7d%RerlFC-V!xr2BB?fBtOjgn=}3|Ia$-GKk)uV2r`>O7JF6mDT!<6xLuf1#`=rSVd8bjp8c?b;J&HQhp= z_C1sXa<6{&e`xLUHCmoI{i}lRpRVwwkT@(r6EZtH`>S{Eqm zR5woiha|73$jA71NIUqqmktc)&!6uC%Z;`& z%rjvvHYl}=$KEG|INrH^?GHfOQv7|1)*;c#5&w^sOwdIkPtA@|*ji$^yMW96^2 z{hlcKm&TWsnoI};2mvrD9ajf7^nchr0!Pl~>%)SS%a5N&eZhgBkt>`{Da#WdF>ukd z`_8gWDK)j8VCv1Ye+c}DLJUVwetZ1UuxD}LbLi^LWgEbm`@QZ9fIG0KGj^RQao9?o zHkNUW-?1B+sfx}4L~LFyM*stUy`l<>_^0YQs`YqUFH;P*0I}<#F5diwlLRp6XfhY}tBp`WZlw`+nl>WuU-b zkMaHR-KT*JKO&{g&zb-x8X; zhobPs{6od#Z)=W!QmOMHV5>VR>8#NG^GN*7tJn45twRrr&-~5*|FfTTUljQLp}_1( zrvK^j7ti>Bw|-92<^Lbw8Gk51FWyu{a{WH!{`2vw;3OPJJrM+R7yjnx{ns}DY9~Oy zCI)!Q{158{bTC68*nQG|82LZH)9DnLpD;q6b^D(l|HsSEKp)wv|iexAgD!Y=VLhYxTXfpra+D(ae{+K_BeMa0FTD#g25d5wCn~!J@U->Nch(~ zX_EGqUz~5b4j6;ESDdS|^#62I|L3dEpH=VBqlAs1DYdSqh8w)X%e#4?OPirm^}Ic{ zBfYQispwdN2@f&fmW%ux@V}-`J7LPM@}v(zE^IpODDgVj+qoqK(~N4nO%hzQCQ8+g zjaB(X>1#50M;Y6aPa|mOM-diE#i^FqaRk)5p?gnZg5ck?=;#S6R!Bz;W0;dj`%Ff- zAo~(2=MgEP;lqzZD<3=@2bu%QLcfwj*7xt<2dbz~4AU(97A}joY&_~)2gzIzOykYTM<& z)3#GxwRoS*dIQ6~Q{eD;#nMu?A;#Qa^}*^v*Y2iguV&0-G<1_Ej@#7E>Ynd6A!A<* zmr$L6ad`*p?Brt*zH?ud%z1?4OSUJG&VLieFD4v6cgc9<<4Su@Sdvc5mLWvcYI;Tu z#w~B`8?kI3cs4A=f(10a7#i4@}TDpK*mAWE9jP`Z9CMk}!;KZ!)JR7%y1slfA ztuLNvF!9N}xge89(Hr|i(IgyC7U#Ykb3wdj#dhQKwCC2-Ce-LCP(`?&vb5ocg@U1< zyc;WcBta0RaX{-HXIqS5Ss|YbQNk%f?3<{$!E1QT1=JUmSYj-XueSH4q7ZqPEx5|Q zcdd_YRoaE0CC*VMwE4;lX$s4BxBc!@CuFpns=!M0jdNOSlMgB z3g(WbJi@zNizz8%Y7IKew#{t2%s#m^WQis*jO+P5@(AwkKPHB7jJ#HBj-my2|c{##=s7KG(zX>oF?wzTuAo zY)cGUlXMg>(UvDZ?WSD$lEx+DmT03Ri*-8H>UmW0O!~jq=l{r(^6OL!=un5FZAV=N zLSFkVPu_-z(FVs-om$r$QkVIeoM;p}UpLBB$_Nu+1r*jmMM;lGIAB37g=9fp(xqBKR&4T2PubGH{f1%N-VM zOBz|cWAV_ZFWjNlxcidoqGFhA&GOzFo8S>2bI79!qQQ-e0c!b zfWSj=rC38+%>yt-sVVTMJs6RPbDPVtL^`F2&_a+>4#rl##QM@9s2QeQVz#u_9|ncPJG!K+&N>1l#ysbPlYiT7e4Z$mAXtMwEd zhf5-BREDeqS%@Q0#CSZAVThVarQU*?Js{8Z+{xk`o>A~!im!3Tm}P^0nV6$S#>haU z$snwPwYX?6`*Ew?ocuWJMyl0?hi&Ri1ru%-`M%5EPjYNg`|&CVru?oG)d}Sme%Hy$ zon5_<-QACM}^6B71u*&P`?rv4OfLouSSeAHoczjC)b2)GL}{GHAaDj(ud) zLfTaGqlq?`hsYRPd-s&73!=5b@;*x!UB{~v&B@IwjqWO?y-n0{z3enuG3z~iK-gl4 zy}M|Z-hO9Mb`k;?imw0hdK!UJrY$XdHE~LpLTrk;@fuhdti<@dSEKm#L68XVpV3*(3`HV+A`&{43CkzG3B>m;yPK~dFgX%0>95< za`5A>oCi|PxV>SN^6x#_mP(M{QTO4EgRtF*`}&2+xh9T=chfGtWlX#CFzTw~U0qJ8 zYwvaQUcWwf<+9r6Yx+_SefdXvyzWM*2V?B9BhAFjpp{^EKO)WapdD}(DBdFhD_+_y z71aH*$Uly9h{qSS!uRR%Ok9Zj`(Eg`{`NrS!+re}#E0Z;WqMgJY+pbYcDo>0qOo^d z!hO%JSoR`3-Jd*|a>c5=BR4y`XHrD#$B&+yWrCe{ce*0>Dj0iHjbY{!-Bpdx$-Lx) zgVU5v_E>M&?s7FLUu&W$T+G4Wd(Q_$hxW-e$~m$=y`>{CvaOTIeXB@KgSGbsIw8h~539{nAZgbnT+w{C+i$t$6G6uT&*FL%F}z zgSQt%mn<)hwO7-jCVB{^IxPfzhP(Y`|M%%KQx{!x9IgoBI})tT)XV#v2auXNX$tPQ zzd3IHi0sW8e^{z9F}C^U1ZrVBI5vQKEWqt&{A-NX;u{>}jWx2zO~3v#hS8`n*DbSC z&BVYj7By~%{^!b$QcS>rclje{FgqHy;a`~3Z3Vk_o5ihhTHbM@FlAOkCg94j_s(Cv z7C~3Gnvk=<7yzRuy(@%8(S^k*X-Trb>&|YGU8;}W*5Z6^2>P}HOH~VwP$AZ7@Oc+wr;HANkFE>FZ=eeKQG>6XDK;(UK=;g$Gg;qkZ= z40EYB-C0s+;&fygX+Qbfo_{m@#LCUu^s8rD+}5eC>}gBXerJt6%{R5gsC{bVCq*ZY zAlbBo(%=i5(@y!-zGJ7na-oNlo+jfmse3`xl2cK~<#yZR%d5%F*Git`FKrS& z%z87z_+W3I8ZyhJ<~m&7qFe<59~QQ^>FqetteI#r(08y+2z;>ii|~DtL>31!V7De4 zw-`%~JsyvVL0X6(VfyU9me>E9;r{FZP=kHmX2}%wLBIXn-Nrk1jFC-*FH@dwh_Q7- zvs$b3!efm5tdE~$i>Hx$DE{rXR~Jj1)t^fQac=UK)Io8uP1M(BaAgLxhXulr4Y8Dt1EN>YQ{D_qSB+`fMT)t|f0TKH>)S8su<)NvK@gVBCThKCnpKq|Om9J;z+ zulq?|I;iXW+%CH1hSbT8>Sw0HMV~TC-G(;}${Qc0D@{H=oyPwbg#RmgEoovQS_Q?< z*!t?!4EK7RQcYYatW3Cie0`^>0mW-$h(_XwQ6ZC*wdzBNrA{FNAObl^eyJfUc;jVV8$ z!!&rW{=J09`uXkk(u#<|_S29-+V&8CZ`f3&fIrh-Z!l&1Ijzx|;H#@#va`44(%#N3 zZ>d4|!eEK>2f~&7+Wf_Bj%`}sRP7xLjnl>rzD-IMrbJ}iT}@Y5nHqK~n5c9cvq27M zUzYCaTd_y2v#Fq97>$mB0@Si^NDir4(j0CZ)5vj8#SDWzb1+fD)+Jx^HGsf&c8|9- zi6%PC`u#LS@)wP_@zjNX-FN@3`xN^D0E1U`!QU<}oN1=r4e^inHriIqBVTKPn16@& z#_WDk5T2zCD`5F49v%&Eb?3^G^4A)K@tY*vzBlNelREf zrgOR#&n#(^ED&^WNVxfv%ZSE<8H%-)#r$LFU%{{!4fC1ka-KKVa`O6awo!$lKlkO! z8LkID(t1H)iG&JbZcrfUGItwe4V`WU&L>0UfQwkpf}y?CFoQjKOUy_)?z z9HE_+Q%U=Fx^&mtMA$l-C8$42p*7}1r1;fP$LP{wANk9^2cb&4J?oQywMy8zO!l>a zh;@dC4&!cG958iE>-lhrw6D(wZI!BpUVnwY`T(&KvUL3!Q&y>0ZLQ z>LYa_&-U{%gMip&?2%<}GO@>^%*}|}8=j2|e_A1(IT){vZAm+)QdZ9=f+@Dp99P5* zWVi42BLldkoX);N?d>RvV)UWEZVB#uNJkfC&o1zWXq}+l4N~UOZYoAPwz|ugc{S1V zp8neRM8N7MdFBGu?*SE#8||Ca-*y_z;8p+d&@!yqo#$6QsR~Ws_61l=a)(^}a%bYx zR8x@uGweA>S)Hsi~22%35dBgz4@spoX$Eu0hOvPC@sEsmrEz^e7ahLW*BqgX-xk_VWi5i6y(pn_s#$ z3q+aWg)J6b?6$T>%m=h|-v23S_%HWdgc;WK53{BEs_N|xhTkuFReL=X!G~8A#m9gnTlzQ@c}T>SaU(cC|5vi){sZ7+eF$SG!0|EMrcuDZs;<9mT_xyz)zc}~&c zX4-Rd;E^znW*(vJZ9KTWX@}Anr)wR{Ow(KP-J79DamqTca$|8JB(;8YM(wwfu}^Ui zdI(;Gatqz%`iZ6M)entTaw?-#51hJW2AXFEeOsdWO2k1;seWrp%CM%fd1M6}G@HKY z(P~>}E%!+!$B_x*qYNf!QZtBQjSi7+Wph@v2UhPSho_5vjEkya3a%PcRM^Mg6kCN} zlJm;W{HP+I7tze_S{W)isl`^WRLOPDu6Kw3gMxw~EE7j+Xuiet_n{3yZP6(g(6?@9eb0ZR}K$SWM-;djC-RWk8 zNI%*nY`-%|FBCM_&chER7}!N`8la#{B@HW$TG=kN&n*bW;heYy623c-PiWq0h^V>Z>Amk_% z1ir81@6=2c;}?i($y_qi>>-drT6r(}<49qfpUS}E^aldwG2Azc&NqYk&8|0w`6gA9 z_%Q5Gm#HC@DZs2#Z*KY~oz8`)^e!KaX9PL!6gsTef77>XTwdya8)qnltDMaTyP|DC=rRhiZ-Cw%qdHb rw#|S*(tnD9!bT;hn3+St!FuvB zI`qK%^#Zj$SK7HemJsx%#&IG-Pu_ z0LSS%caitP+JJo7D$q>`r3hPGP7$`sbYXKaRf>BKD|3I`rq5*XDS9B*pNvZy3RYI( zJ}BN;l>DL~zkLt7KZ5)&2``vcD!gpm+kv(Z47Y7svzgq|S?a^?V6{skZyN9K#&ov6 zmyJ^zf3!Hs5C*7 ziTUk`OLKS2L-&g%#`J}0P*U>d!2X~?YsoqxQ-mSA?wbF~h3@mUp5_^jE5#w1o5m-K z^>3tFP=FpqAke)jR}zmVct5`IbDl?o=<{G8{x!5mjDP%yy+n zw$5$%^DbDA>Q@;9Qs8{&a3ijqt5JI6%5V ztqfhRAc6efo~)*;aav6@A^>7>;PcsIuiU|F%D{rz2Bx^UTq!aCJ$GE`fy;bwL;vm? zp_W3(qeOQ$&rs3_D)(kq;$OEXA4dhM1~!w;%cF$Rv9IrBKlc}lE6BElwzJ^8aPPe0k`G9muK%wHdDALZlz_^(?!XKi?I$0oP zjQh_tctj(*cx1Z7Vo-JYM|zFxa7p>#VygnF(ctu~3TF8kpEy#=ZFIqS+WcYkSM8E# z+t4SZE{2GiqwXQ^0lM?BKvOJnX>6t~AWP7c5HkW9YlVlLCEE|qiAwmb1S`a?AqJaS zGQAs^(-gNN?FHN}-mf4eFhlfodx#<%H>GpX3GVL`*rOYrSXwqAT=?`7gI&Gv=E~c% zN%*g)6J!d$P2OY^){{6MhQj#IPb;Xcgt2AnIzu=YI}uo=mGX@Yy8F`G&$ikToq&)GR!v6V#PxG zy!Abq4z==nD0B+zq4;}t1ZawKn}AmIqmAAr>+PRY2VH4}cece1fj0qGC4J-=oEA0yo=WR$XGxYg=;i?xt@DXd`P{F?veh{KV9 z58HOAo#A}!ooX!YXsgLWABJKtbk^Cuhbs@9!W+QfF*NjB+Q#JCsr#N+Jz?TzGkEIiUI z(E=PIH1e-P?~{wbM@j!Nd{KX*$%m9U68EF0)Y*u;Rb^*;gqXKnr8AWzNzP38Ih+L0 z{~dA~1Wdy4Dc9XppYVe&g;KMo6GhD?nPm34P6xqu2jOh%s(XdCC~5cR+x5O~1V5E$ zV-=qdLU;0i?(MXMRJ!?k2tIIyEyWJDCkIdM?V0?Z4g#XQtmk2{aL58%$ak-RBP0Ji zy^2O-Oof^GMWYkRL zV#-MhF#j&{ROvc5bE$XiFZ-Mz{`m@(U}u=boBxROb$M#jp}W%xH=`v&>ou&ly}#b2 zmAFnQUaBEAE6u-D5phi|8t)X9c0XB*jLd5AeNaPmtJ1NFlwE)tk35YMwz9-#TSb$z zwzg%d183v*`?}H$DuYQO@9#x3t6bM$R(M{=+{%N!s-1pG$LCFHoxY~Dk7!eN+Bp%O z?V;Y>rJZ?RjV7C@%c1;k_kYbERrY)2q?`k;t4}d=tFU1i&K*|Swbs93UZs!|r0BmI z53PTXU#2`Dewrc}il)D2w&BHk<9-p{>e|Xj9&TVP-ybmB{~&mcIt$DikPm7KHmVx^ zJ9P#=K4A!Y^i1H=E{)Wp+wz2L9lL(RP=qu_PSuARfPBh{@&a3i6A$9k+JlNPoCH~8R&J_o8~+2H&D2&=j*daPf7tOh{jSPjuiB*)BkU{ zcPB5t_y%|rk_kKakE5a=c?KI|OwN)yQx#~>O-3nsKeOI{sJgcpXL(i;6Up^3`>Kza zqTa0)#f#{WCjMXk*yd(uTRCbnt9O4bj2<=KDTWP15v(57?hUKDb}Lk%#_aJ`=p@=q zwDp_FD=XX}YbWwL?S7QHc`)T3S_3aXc=na%+xrZG{@?Z1V2MFRvOUe)FJoy2B&_)} zdRROy&7&>J`7fpE89xEa18^UuAURr?S9+c8x<}~RYs~7gOV~3O*SRab~XbSRqZ}Mb`lrxS`Q8^Ieyy!GY zn&G6EX9H^4!_iCmbR0Zh7=AhtL+^1cf|p!qS1g1g{A@?X1P^TVISW zhbb8V9OJ%SuoCK2I5j>6!JdZv)_)h)|5@uOG>b~8wH1eKy_?^}pOCRKrqW4S??8W1 z3;Aug-Y*lCG8v@oBZr);9r) zzB~WUHSSFq9hW>EkM)Ru!JizI7u8-*3P_x-A+09(Tu;W-i4|Afz1=4CdC#IEOa*1=XSshnGw=FiXm7^X@L9km zjDPM`kOFJlRqg{MbC^$QHO^>p0L3+@4=q|`id^;VOLXseiJJX|(2ED{MHq8;FPg7M z(65kwtq3rs$g)^^qzrA-ca3(KskI-oVROkbaA8o=?aR-UNcxWxg8-vLd+;D)NUoN2 zQAcJ%M7hE7kdXX^<`8OfM+`E74%)u3{Yu%Fa9en|Txc0}kJlu4{`?MUFdtRUBk}R9 zd-vO8>@viq)#)E!50b=Xtor7$6ksa<1CSr=1k^3uois0RGObFb*7dz;ap_P9z-re1 zaWlA7w`DS4V~X~Fal|2{R_=e&Jq^|Pzg|}szt$Bx z{(gmMu~(y2mS|PgbVqCRP+hT64 zhX0s6z42T*&<8t#-DjR~)K~F^l%o;nb^xWzo&FP0&gr)Md~C^&uY3On^EutRRIK2jSmpC?u`d0iuZ8Bhx_e|MU`^-#1(t?+kVfqr5wgx zztoi1sZak!n^rlYt|R-KEAgVyp~U=Ef{|=r>O^qZtxv~G=~p^2>?z!fBhP+ zw5y`ihlotszY4T$Jw(LHou)90&Z0Kw#)RF6SSuq2EGAz&onfyqU? z>AG|Cdu?ZM#OdOVXlq}r{YZ*ujyGNnp_ZTp(BaaRY* zCQ=OgmJd?mV@JP-n|jVD!a`c>oQ!15j954CD_259RiM>ZPKj7Q)2L903$Bvr3c>qS>QzO`s`N6n=?G%U8Zn%l^tvXq5u zN-kC_NS^x#>)gMz<(3kOFI_cL`VN*r_vqCF1)DqI<6#ZBQiJeDM*b4XY=KCH!N~Uw z^7G;Bk%EBr?^0vh0CN`!=2HvbJay!-M6t%hZy$)_;U5a;@3vQ_S==<{1QZ<(TNAWp z%u}J-v0j5U`r5zBM~pe*sbDj}FSXJF(=^9Z2vmEdQY_*yZ$7W&#@Caru@=V`0|G~? z0l17yiE0t=;VE6Jd6)!_fsrjf{C*Bi<*kiPBMQSUMW3*RHJ*Ym3 z*yIePF|8G?@z=K8?)ogHtNc$P!V8y+ zM{#w#d_%_h8?UX_at>`(hnpvLv$dD2Cf;Ya2N&c&lw2|9)+^@Kftr)TF9XJ(;-$r-(_Jv{f_zGkb%@HTKe`phwLy`hqGht0-YK2 zM#1NvJjP<&iYe?^;peKpxNjC8K%Nr?b@TTQDYhv!9U@V+UAkX9YYMAweJ|vYoa*rr z&i%TF-DioZ1BTU29gVMYw!avI@k^9kfPj?Rjv$A1$L5en{=vU4TsR=0`l9?8-6SoG z+Kd!W{!3yo?$z}Rk+WxMI4;J~ozB6yFC=5Uzu8Mu^$$wFCM`RN7N?acT*PUcwW;6Q9gEENH<@SEku|>LD^PQF^9O`;R*Jqyh<9q*Bn6 zO#Jre*2t>9!+a6T{+)MPcQYdW)N|VPe!Q^p8d+K!s~U4ogwA%Q&KI{RF897`1?TX* z<5S140%XyhK%vYStLR6d!1>o=5KU#gnMqnQ)-xybvIxKERVUwuQP-$vIOkQ`Y_EvY z$#|`3iv`GN!x(=!ir#ZAU}vE~hnnyM<`H^6^v#vB{?9wAN&&5Eo+v@6+p`(tVw5jm zs`7K3^G5AD2^HNK(+K+k(BkJ{X&VS~;x#pqzkJP#hCncGzo2$>Hy63vCYJcgJ^em< zZAV}jfQN}ddKc7oWJOfQivJ$nbUewqJodF^FW1D#K;Fu@w=u2`9CiV$7G|QCPJRxb@~T*UdX+H z4RlyEkZ-xWwahS>>vcSN*^y^jBl{ zvgLD*`_o7KkIALssOLwcI zbh2zW{9ikp|1$x6Qx;eVQ!(23fBmX|-soRH|M>4m{rj!{y;lF;R(}VpzvJp(ooD|% zWd1&_{ytd$hAV#~tH1H;-_ZK+gymoJsDB=j|B_|>om>5#vHr~i{Qr@%@axGD6yfsu zbLaoFP2ZpDG}lk?iqa^nd-ir)M;r}>{dbVv15iTFE$SQp2NCu$Kwcb>-);ZjLeZ|h zZL)qn6Y%JhhctRDpSS#H^@o2QZnp-IZbpaDqm=(Bmh&%B4!HHN;r1<=TipN7Hpv1? zGksIMpS%CFb>qK2fHN?M&(9RhLH_Aw{_7S0`yapy20FRc_WhXsOJ?+cpX_l3aA`FA z&;aPa{|qsJWI^@z?w^3G_rLd!{o7Mq9|bOLM?9OC_$%`9pC5SL0sOeu5xf3R*1__x zb%43K@!xGvzqtV2H?l3ieY*BvAK*wZaA}3(nVtWr;`Ptx{Ph>RRRXK)gyU}x|Ix?k zPbc1iX5iA%@+0j3=<)TZ)ly{zu(&Y_zbX7j-M2qIz&{D!|4%H}t1hhpNyX*WCx%*5 zGd}3wp&P89^ZxIG4@bUT0)COhxjhZX#jg5Y2P#*x&v*r2y8=TQ)5bliyLY|X^D;NU zndyu9#{dFQ)|3ocwF|iSEFgjMYAC*o-j)8JTs*Gs?B%HctT(jTO3y&i-7dm$-%oM& z<5x$esx7z|W$ISmWJrlhSqf?x$@ic%+8v{hi)Zd8myJpT|2)X!<`fic`!W@|+RLF) zF^X{aab`o+c4~kutOvO&%mD>oEe6#u@F&|p^LG)#nqMdg0)<4lP_T=eOgc4%-f8mM zD7|j9tj3#g*ZA7y@Rv=sK)38EW@{(VAEg2-nbqDp$#;zi8ZqM+S3j3$kg3os9(GBG zqHYsHz;GX}sTs#d#Bbujvq>gw@kYJ052Ydf`RZs%(a z6vX@IbuqQ+jUm&(K0z5|QoNL54b#E2@`;`OY}Jp3Q)Fy1wU_c^*et+cdjIBf7&|tO zW4D**TxI`>({sTuM&G6(kJovV&JTI?Bmnn#BWGK0(DDg3!F$X?H>9J7@U3 z@`5G9dBym9@c89*U367I)j`|pD$>qkMCvD&D)HIFc#DJ>UJtu@jbM%k%x`$=c|Cr; z=*jxB4@Iwpw>>eF$*<3DhoisQ(F?qO=WE*hKIOCiwTEvDmLk%Zc!&YrM{KLPG%$Yp zXV*qhoxw_x&>I6@)Uo{4yaokqMAMg5BJ}3C>&9ctOq6BOJHM^MH1`};_bysfIYNqu z%jsmvyCtfMIHb*hKV1pRGqgUDG8u@jEqSX!zu(M#q~HBvD4KUAfUF_&?d*sdN9?2) z+mhiupc}qS3zAP@6=c43lNfz|UH9Mz3N-yF9MEWqo=QL^cR#QH+BE@ZKB?YVowyee zmaSiL=4)GJUs}vPXC^Y@}xzV)3331V}tY$`2$t2+%lp^6%Tp?P;!>zOY!V2w=#f;G+>NFky3s24fIe5<>ByoeEK3$d9XO zHqCLG%f48#deS+(1H9vCI2gMDn|S)3xa?^d`Qz;scB!bk_XkpKIHfL!?1>*|Y&1r- zhmh_K`P^Ce?|+S-p*UKM!8d5D^jWq5D9JY;uE6n~$6`v7Z)itJ!uqU4nuEO(r@ZLH ztn>=6O&b{EXT=ZIKIDpPyrzXsy_bt#=}&y~EU?hfTK|tO5x2TW7yo!Ggt3<~L45jS zAf?Ow<{Vu(Exp$$&que0;LExoeBaAG zME>gDE-9rv%?q~1hcqJ=6V<^A5n zqA;lLIMN_vw8^W#Ue~uTA;hD!vJ+Q&|8|t>$BY>mD|ThV@w5U$kj_Y-Xlc4H`{j9R zi;1oRmyN}&uj4`8kF}BAY6Wwz)#t+4n7WM!h*Fy@sWsq^#-wy|V)1q31;j--sb|!r zy?DSC^cKc@Lh>p+EYe+b5>G}*f#e)7jucs39s#d~I7x<9=04~UfFj?97$y~3crlG4 zv(e#n$pelFyh9F%pr^9-b!}1oWiA6C>VOw3w7Cpd(MPK97EH09!pw4GW*H#)JbJ)iW^9kZ(V61D?swN!aWNpyWB`oe_pE@3fTk+> z^cN*{BbX%%Isry&8$9aHyvZQjCi@)|YdOjFEH~*F2BTg+^p{CZ&xit?9u07oC;sd7^ozCksW_)2ct^tXq+>zazUm_ASr`M?7ngfy{aUA7Lg&hCF$4U*%{@<734#2J z=VMpnPg>w1myij78v#O(IL!^OxHdBW_Hy50qw?%=8a5bIM7aPGj&x^hO(3cErVCTs z8YZrAJrHYNDRte5m01hSle=c>;Wy!s=OwCV&%KqiHx-9OAYGaz8?|b==RawBWb|Zc z;t|H1`wx_sgcwEhN&IY+Q3-6R3D7AFF_|GHTL+aaN&NfP*NgvLk6C^#tk7$O7fBoa zLzR)&rMLf2g@l_D` zQ3}@bR-E^CFANy4D*-(x)$WqBaaI)>*1o4@`ZPl4{TQ2g<$!XA07uADR)N zY3b~)xAM*uuyM~VY&I>S%)Z_sn9Atxo3yUEeM3#g5yy9QeW2jD&$Fy8Ywfp9`GXeP ztsm0*vNd+lYbv1^o#)Q_jFmW7H22w-$|qtJwL84IMtxc`<#y1}cR!{&UhdZ>8(7xWu{LMWD?C^i0;{L#sf!)VM0=@O>Aj9%6F`)_b}Ew3Vf9! zd(olf_7}BC)qv0&J$-wZUEu?e}B6 zl(*wR!FLK1I`W<`#5a5Fh$HQ5buH4c*1<~n&^6l2FQR>_I5i|EsP^EtN~+e(TbH@3 zAa5E|91N?z449$Cq9>>ITuaQ~hmc`0BxL|wn&#hs@w~kT71L9(6ehQoPQ|9ejO&>h zVP4A&BNxEG{&*8T7he$gy#aPraZ01OJexc=0>*#u%WR&>sHW+UDN3#csoO-l!{fyW zT2nM#*`<&IpXk-2xgu^s6&V0(3gn*)&djPL4}psvE{2SUxqe{|Ny%TuQ_V41nUdjK z4OCwXvGn(m6Nl(ySXac^O}cY6BXjT~U!5=4zR1x`XFUy;YhTUX&o1Ha_jzsK985ej z5F;p;;WUaMQKMm?t}`^vO=LX@7Q9Lj93MAt8fLT_-G(%C&Nn*`Zk~TO9T$T-Zh&Df z^(>XEU<#3GYF)La_Q=k@p5raqE#u688yf#6yGuy=S35|4-OlK0Gj1GXUFkxmmjz}k-#v#@v87uocTzx`S23=dAj_Xg_3AB z|5y=C8LPO2RTb2g+>?!zc{%B!?${o(7j%%zvN8>^v& z*|hUcab*z>EkW_6uW*PHn$zc$FtrTd!~#kMQ)*L6uCs7V6-0fVmmbnB4uwn4V134o zli)mD{k6NJ^~t?5G#vFUV-CH@cR&aG?F{~uhM0?R|eBhI|8#`@n>X{_|NbD(7QUFBZvR;AVy z7bU58vv7J__NuJ9aEzh7hnDtN^Lk6^fT+^Ac;(o~OVQ#X1`Ozm?|TGiu9RUfEu^XK z^;V7OK%dd7FKm?Gu_;@iz}LQlW~68gwN@4}2NenK@ie9=h5%yP`X8q5Mb6L+f98$U z5){>~aB9iQHJ>I9$A@+I;)(cC-??6RPWwDW!FZYdu zDs_9>#C9J&8ukhzd=1w^vrt?3FvE?1GVeM>8%NPLn!x1=wqV?CKlRcF!P$A3d+oRh zY>Shn=I~Fg#`oSAn^}uXxo!gq4Gd`(J7`QoOG9n?gaAGTKM|ofO!B3V2c?Sv(h9$) zaDG=sJ|UJ(n#4qld=W)h)8V~61*O?hxEVs`7r;?BxR+d-wbZbCjZ1rs_8>02bt-6= z8P8~{tXqm`Q7eAz}-Y%Y&G`5d4qJIEv)xgA-T!QEO~(8cD;FpgA!PxkC-v zDo2OsaX+W5#PV{$3zS7!?;L(ZgmR zdY8WMXTU%MT4Chde@bzKkBL5LA@1;#i;kW63jGrq_v3-!w}CkWkqh7vf15P^Y!Y;^ zyo%nq_)zWIfa^qRv%WC8`U?b8eN*dH5OSO=AE)KioQe!Z9Zaa51#RA9ethiltc6UR z<&nuet4+P=lfnjdG?^swoxdXZUK z4C2bJ7wT@NsSpcj1pD)r0%>y1Tig{DqF$zm)t(t1J-HKnSav=IPVN!1-znL?+YO=& z##xTyx0v%1<=w&$FyOgU#wE4P2Re6W#lWao&9dYnUlf z+gBRmArCc;BbP)RvSeiH*@FIe;BuLX+eJ5^%R3+DtZ?kW?VkcCZV?uPa7FY*$zv1u z%T)PlLa9=z;3uT{V2{4zGreK%vo_G)9HQ{?-Q+dGlf5VbQn^><(Ci9N*Y^dfPHsrt z2f!zMXcOIhB40XyFR4~iWya66wMv{)=ZSkg3IH|C7#30K8Rsr_iSDJjgL_^A?PdiCw#WgwibHX-~Zfqrc^cOmMVL7@e^92 z$6g4n->0l<5HXtb@~wvIWUNdfJidj#oow#5z2P`Jy-(y12Q%%7d$9_j6roSSpG$i&)@@^83~sAg zZpsQiIJF|N%C2; zr;%pbLm+RY+yPR?RlcWdYqxp2ZDI>|4KIH%cg>0W!}gr+*-H`6^sNtluG zJ5HnwqYG&{!{HvlzeMxfhkX|}vo15zRxNdMbS^5WC0N-89oHoXUwwu5(<|=Zto~@U zCu_F{(kCSC{lx#F{wEdpqIST99Pz4f?wjOi#CGguMRHdw$|*Qf5v;FqHo78 zy+x{27)PBfZ--*r~s-l$m42#U0!T*2FvqcnVf$RWj?jbImt4 zw|)GBgYS2fZuj|$H^D-D4CUpJ-b@qUvhConhD`gH%6n^wWIp>+ls(%{!J$dEJJKDm zN`E(uIhbg8Vdup`om_2`kdp8~!G*B)Si*zuB3FBqi)IsrtNAzeopFG;Ji%9Ezb_qWEF{ZZ%84&`WQf8VgWFD%Rwx3G zS8$Db0D)#-UIica^5aWlBNl@}7eMs4%j@plF?ki1=nUL+je6tGYl&MjMXt(Rn{&Se z8amXm<8>)Id+9QD9LZTGLZ}kF!5ogQMm1ddxQL@@aMUlT{|+kwmk?_Uv9EByRM(C# z)4n{JRRePm--MFx6=Q=Q&?~O+(Gt?Sar-(E2ZTr=ntCpR*a2q#RwJ*7@&@4-krb^e zqO3QOQQ5UR%|p`P7f5AS;P$_Hk!cmFDqv@ffsprsL%zAArJ4)2?lB7|E5P=U zi!x%6J1mW*tsy-j$j;Ju6>l9B1hh{|VNaY0E_Ccp42H^6}9}bA_m-7h;PuDEvknYXbvN z*Qor}-}w4#Z7kEdslnx$$xe)*;~Di=TA`*ZiF#!Ti}{0N${;{L#I;FNvNgb_EGCP& z)jD)brX6?&AA7FOQc*x{Dml$*>|);Z<-#Lp3WpXezO@?)76W1z8;65q1zi}~1*_Y4 zBf|o2G&V?x@tvtXw3+_UKVyBhXSdJVSjJs^CjAqc{qnX@u};a`0|pRnq8 zq0~79#fc;>E(%Bj@>W(vksY) zm(`~Nx;l7CS0=CarB(Df#7b<4cLil@L}c%H|Dt+e^?o(spWw|(aY`D}n9s;o+4uMR zVN0-}=NpC5lzt;4zeO!<<;`Z&`PWojHHJMt-+O4qMm1#!Py;an8RUMAII@?95c~v* z--HcpmPrDVnTSG0-;P!YzDcLZ?w;C#am1BfO%_AV$-)vLhGF9NM=}Ol17erT-XMEw zQP%(#&UUk;@Jg(Bo4v2Y{Fth|=&*EXW;z>lLbe&4u0Ty)zt8MEhW=eMOrS=4#4%_# zF9)>?0A}7fnHxJFm=8KyIFP1+YvcZG6mpE8QjTT+wD5iPPQ^7DJ*iXApqiI2$&{n> zit~tOCZp-@f!0XX&5%A{c6bOMqPu1h9bUdG7&;U!ejuuqyHWjJajm<2Wv48Yuy^Q> zd1}4Is`xIde-`2eTOZ8hip5JRdE-do)__bF? z<>E4xq^)q!hj)paQ8dE}|4)Xhw0q!=@?xi!&JCSnYx@}f2)%Jj3$L^z*3oTbKW_X$24 zk=h`;suaxJ2b*0Gi5YY5R!yA@M(u1+-kXT^_)XgPP_C!*yGLZErm(`5&NZs6VBc#P zbT#I?I7DBhZZQ;E>n7@dPw6cBYK{Ts3^$eTt#qi}aN<5u!)klX10hQFZLLI`E?o}T zz5(Xg0g7QS6l8_FGw{Ji%IxxDkw2S0!;^`ar3@M&?{uslk2I@m&VGB%4|de75Sd`8 zYF--Tkb3m!F(%+rYi8d_`2ZlY`}H})Ee((O^n*^$F^xt_c#(SPukjMWYN4$k_X$Co zMr`tmwf|W9x#{^%#5qni-fKk~BGI&X4VI&O@#2H1%ebyZ3u;f231Zwyz!y^FfKoCq zFAL(jRc>%(<#YQh=)F-XT{`)SVPpB3if)T9Prt_B4a+FB!zJVB^WjUdnwzWzVwe7e zoz!{16{sf+RuguR)LXaFQ+aKjX}O3&f3Fhb9i5(UN%SxufLLLlGNF1xa(is+S#=a2=W_ zLDfIR`Br%dk>u({hju>u2XIsOoMF=^1aC}YH5LW<(ySgWNwC%{j@Eg}Sj?ti9ChWv z?rY)GT9werE$Yz7etq=~-+FPj&yhlNh&p|Kp#UXKJ0V*~y9B;l*ed|Be^91<-w_7O zdmjUMLA#n8HsJ?_9)fS`mo`9+*b-o3hZelT!AFB$r!@e(srO14Om>VfTRGO_TyjRg zKpwpkz7_4V**?udH^1vY*BpLP#7#jFD!nxRVW2%lN>Fy`AmSuDpZ*r;oGeZ*(32Xti&GY}vCuy2qpq-?D<|A+wB^r|a{Oj# zRxoyFCyPYjA+n&0mzISyc8uD^OASY^gOpL-zY>WvdY?3LEw3h=vQd{Y3wE=@r@=Z6 zeU=k7YkL4enf={B10k=tzITn3gs;agnv?Q>3>Z1TS4}-UPp`2rr^(bm})R-Oy(XsEpRt#`TrrxfOb0vEO$08ltEAdX8vdSXLWHhtmW zrrfrXM-Kohp&a#|pv21tk&Tbo;2iV3AVc$lC_e}S&@;<@;+_fJiSZDO?7xr2eCc6_ zuT?9N48E6n7Vq0tFR>d0AA${**}QQA@u@7uS)IJ*_;beHw5_LjnNS;vJka)yE|Q#* zZgje|7esk;IGGll;ni*|mc?wwCCvf6IERX+r$l}A23Et z$!I3|42-mTRF7W?%!**ssKI~|%zD0OS=@ZNg|?%Llf_;8YwXH9IZ2ZQ#26ZD{zk7?*KuemmflXoWt#`I3Lbk2?mngDsooQDem~nG*1wxWx+ZautmYX zNpNE170TrGjxx4})mhYUx^K9d0fa7}0QRH8Rs$eTN$hk8=fhG{H^X2pbeD?d=1m#ld0I2L{>yX<{;z;B@;hJhqEqm|i54hPK z%irImP-7IH8k$tth0nh_Z6)Cy6$o*AVuN{DQMRiY%%O}hKGeC42|V`<{?yl3y3$u3 zFFdLO2gliu6kEU^!tL#zuSyiOX9+Y&t|Hhfd^%Gu6n_1fk9XE8{{$?gjxUmloS zmdv%8h)rFsgu95PWCcwdKC6v-WOKAc5JDJdWeeMBvG3awJ?X-nj4njQa#V0!R%+o5~|-O2I~DIGzSbqUhza*oz&(*i>B zB}xj=gwXD3tY*{Z0{|JJ4bG08ww>4=c$<*pShi=7z3=hUOBKTeJHNM9%8tdI8e{Cm z@lu*eXEd|PwD@Qz;%6oL@#lTIO%s`B?I=o5?7qr2QK7whJfyG)7r8s$jeN$YT|{^-o`$Q?!Xt zrFdFKLwZOLw_zBGVTnB6-?ClAYVwNB+9zPSS1h-b>-X2Csn>my~2gNL7q(Lv{e z#Qxl$w~hS$0JNe6udUsoknEH4rdoBsdBFg_aMpZZ9<{V1@;AxcC$nGX8aIo<>_?*l zkCk7cFCkgnx#2EiL}p8!y+RVzO+iV+=m^Nn}YVMLaGzLV3L&?9Ib|kY>A&wC~@TT?_^H6p($D(WPbnjny@fxjT#?g^=&MJ#EB9e*PTI3!u3&eWj#7cVnT?m{K+?N0@Q25z`fP|#^~#gM26NTj98{Tv zTmr)J3%tw|6ok%Hg4LdS*>s(Zig6{>xY*JbYg+U`+rxL< zOZU>#Edm`0;41q(r~r~%4Z#q zd!h(VjaHq29vva+WyTqDP4S5gqTGYpsw~@di&F4rlL`RQrZ=|d=QMVJtgGB$kPlYKW0+jIQMG!zK?4y89_c%?SZYH?ABGn)}>dE zkyQZ|606w?-elKj1w)v0A8j|xv3ri4_Df&dyhxpx)ax>bl^gDqH9B3j*d`Nu)`}>@ zKW(*tS{YF`5tDiN#4$i!SZE8Hb> zZF2z8l{x1`y(9!SxUiR5LXorgazU@Hp>tzu9qcod6%iVzfy@uO(p%(;hDcfkl4rjl zB*rl}I8@sW{SC4N`x=<$yU2jd2`GjwM+g7D&k{afXe%)=AS0Y!cwnP!4(PDp0f1Q& zbg8}F4c}al%$Gp|FR=RTd!O%JC9B*2k_Rv?a`P_?o!oa!^T}h4IK6*ks81XSuUIBh zC%E-uUZGPa#Mx3ck;uu&AXMGRAp?5)%P|s=`#20w23$;*Dw}VkcK)XHdtiZyCBa{vf$AJ)xKRn+*a+%5-#e zabvCwniRvFgqpUl%h2B@^GIh-^Ye;(rcZtgn)maunJv>cZqBFa#yd9XoT1<+?|y$=je(=W(w`#%gm2Geag|YuFbgj_~)CG zhiqei^~{_b;dLxM!iTNN`{2TO0zvVZHz8X>Q!<>Q4C5e(?PZ>BPUS%7m#`K#!zLhL z>x15af&6$x+kH!c%jUR?J`e{oz{eVpET@Z>UCkTL2eqLG*RP!r zs?)_%GjUF15_4+mC0mZfXV@lrbnCi>5-T)Y3bbW{nyHe|Vz#aiimS%(n~0Ml^gvd| zOF;waUJ&7p+6Z(6_=|xcHl@3Vn_=RW48r%-uj)PkoV%Rv5EC!)bgM^KTfEAvVL(mt zH{R7OtNwUq={B-h8!)|zf5}fO?d@%cQ8h!JTb$Ob@5{ew*1ih_q?U7&?Eb^Nc4dYa z9}dp9{CHilw_DzuUp@KD&A^TZ{@#-8o*rxH%BLJowyiIr+Bp1H2f}C{4%pphQw4O0HTKZ>ffMnix?WZfpC9<=EPiZLukOWpU|VzqBcM;AmX}GGX2CPH|GL zml}|?-!6781!Q+$)TcTR@CLJStBLn3mU-C0ly)S`Y3Hjc%*}54P;mE?&E}b%6vXoe z{Z_Gp$K@?$Ku9jzBs_5n-9o9`pLu42o#fYFzg+2LEfCgq|58X(%Y%eY&kkM7*kwzJ zf;CR~#{Am9#YT5>CPLJ9}z2HXvOdGMp=3Sw+9s%C%p0#Isgv$;Wn zv2Ma<)3=efY-3BMT{P}!^YN?21>E>))PXZP5ApflK3j5CZh#Hqxg_t4H*Xi;(c#U` z>Hp(So%b8f!8_Jm!H35QxzA28)<7twOkWCuGSS#Qo|>PEhg}QAFxOcK&v8ZzhLUl) z)FkOxemq5#H<>LqN1yy8Dr*wPJr1L@<9(j{dp4&}IUFiA&wp%ToZ@NpgI#*?6Pu7* zc7i4;lqDmSg$&Nu_G^ulkZq=EOm64ufP0z}ET7EkiA$U}Kyq~AH1g{naC5AaG-JaL zPD{uWkkwo6tf96JMamNOR(kA64!#hHDm%(n@HBU|PCtx0Z5*SrSu*LRd_4c{Mf)zC zDgM3V9;95Oploz{Mx6$E4GT1a&aP!+M=M+MsTO=g(@7;y-06zi+YW2>3p>6e596yODw#%d4c9(f407S6h0GiFGT!BAmtBipt6CrT%8U&X2vD zJIdE+wiP7~2#INSk7i5o3RTgmKT+L)Fg|?KOEjoKY(fk;XL&76K6BNorD~>8Afxx= z4?7J6IWa2DUiRG4w)Qn3Nghx>f=4;B)#5B_Qfd5jdPOyBsra~^!5#v+j#D;xzLbqp zT8Uxf$;mOnH!56arHO{Y&(>$p6%&2{vUf^sONFF;`TFbDb2@PtLmJIys?<|xO5#uF z&uNK+8wh0O2E(bc^hUAr!^Dfi7wDIlzz6Z4jXX34l1WbD5MLsvJkxyV5jt1Sr9k;4 zCn@67>D-HB@x;v2LB--15ET)z5A&_SzcF=eG1B0)JX~54!IHMvqJg@hBEDJriH!s= zPgSdD$Od(Jmc{OTZjSlD?KD_c^c@c@dC*@diP_sN!KyHpho8?)1((J~0D68~EEl#} z02l2I#TJ&)Uo;|8-YtEcU9FZeK<$rqCydbZG1G~`8zC8hXZHNdvop9M#gK~>RTA8U zExrDJ0c*AAU6*G5-pRGg(W0jNhiGe;=lhcw(X56-XGO8=gM-s(|Ld?6$oqS|XGFgnn)NjuLf_Sk9d zEqWj~y2l9B2ox+Yrtp6c_0O$;$Z2S2Xn;37YY|112ALVgA<4V1hLMhC*{nFhKCD$*`xAuMl)HuMyFUoaE_D?;2yuU0 zams4)i|x5QxgAZj*bm}q>d~p~`u?*yOm&M;J%^WZHRkby+#`|}W%OpZOc;l*uey>|4Dub8f z+Tn=xh5ImeslN4=z-n?33iyn#afGARj@wHgQ`>Z0oW(dC)_;TNPC10WKc>M?#^SHwlYj#j0$dJr1PK!nuw|COy2nC#UE)GXHUht@;7{r)z6J;!;k3b#SyT zyV~?h)~?yw3v`7fZ;`#NwGJ;BawDUI_lI*B+pX)i`Gr5vYor~*wMI~ixgy;C zV(U%D>g<#1Z+rH@)T|}|_l0p{V~Y9U`GQ*O5x;@_8%X8W-pYK7m9cv_<9@C$oJ7$^ zLKJtNrTpPl$zT)f(0O^Huv=e6ya-oTzg?d8c~rt|$zPi_1$0}z9aA%n5*uxZvUX?h zr)!VQ8OvCze^JR4)AF#~W1em|l5bi2g?G2v0&p0f!RI^Gjsv|vIlZMjI@({PqO@dm z=wwG@(8{?&*!{05%6w)mJ9FV8RbZoQm2PcYD>wIJV>5L?InKVuwZN?Xe$BLud)XTSJSqa~{0NU&T4yzT z^Nc~`wr+Nn+2F+-x0M_!mbng^`Nl>MeRg!WVmO}`G~Z{#YHWIoEy^ot>>zb=c=TJD zl!53G-)_sU_B}kzY9HDTp{GU0Mw>CBI&l;}u=r9}XzV1eI)Xr9C^!Va~X1>gdj^QT|=e7ryGHS(6*6BC1pwx>wsIpg?=$nuT|1vSY z@^m7=5Beb|IOo)|O)rPM^9MDzRNob6xE0zPN80fPvQ5!=$~rO1CwNd*%p~dT9lia` zkWX8Y3n`(s8K!v+h3 z2&>H$Q6gwsXLlyGj{+V-1(0qy&Yyd6;@LvRu7Rn_N+TQwv0s17WD!U+#NX=m^x(Wvp^?^O050y z)R;V?({3wt_54stEpHyQsIIDM#;tpzUM)^KvzcyA8lktl4h6cZ`T_8Hs7!hn{+CS|e&g-V&7>e1n+ z>(q$)aif=^r*Z~+?pg9$kOk=pcU%XESAEJ1z*af~W@9y?U3nc9=VMLQ22d ztf--5nX;{bNY-%3INO=N(jdfk2cV+t$-DPndzEKBYc1SE z1A9AbyXlb^A%aY=1#Z+Q9xi9PyfM>#{j$HCs>Pg?{#=P`#&OTe+RgSYp?Cujv!)+l zW3_|+u$Ab^n~CB4RA32>C@?P3HWoGwom=WGsGUyK#*O2j5N+t2rg!>Il!-YYH=!?}Vi;dAXM|h6o$b}vJSo>>fOG1xA!Xw(_QwpI08RuG zw_Ot&Z~V*>JC>RsF6NZWfu+QF^yUQ-;A{HvlW*e+bS=q1C*GUwLfS5d!zMxx(GXDh z;iAn1+#(vSP<^ptwLG_hNlLVGR-@D3`?Md`Rv3-^z@wWY2rYZ*cRfA(C6f}HBjQnk z_dOph!`;%!%8iEk79<#+5HkHZPTV~-YDQCGD_Dh3lP~)qrtfLZP)ESAO+utzN4Q`#hi}KV}#7oiq(-p&BD~6p2u@l;FekZMNh~;OzS;5VO$MFiwL+SH;N*#_coBpYg_Egi%1u%rrU!3i<(R>}HMwCF(X_qA8Kszw4N^KccBw#QIn6Vm2hmT58%a>-geKzsd+ zm^aLNktx9`NKxnWXyNeLGXC^Hvn*mx;v=sv8JWUwA0ALRSgqRG2~(l(T6s`oqK9{Ky4{kM#aoQ{ z;arK&KxJrzku7Gb&=wz#Nyw+9Vm}9q9g!tdh(XR;2qMm^)@Wv^FqT%Dp@6KZ7*N;4 zLiCl+CD=YDWX}oRUj3<_irK#0ua!R@VWusKV)XMwH4XYg}XOgHLn;J6bO6p0Dx`x z95bPn!hQ&qfJ#_7tP~fArf%!hiL|MBtsAQ>A0W?zGM)T|*QoN5T*Ob66C# z=~aH>-FYi7+Sv80yK#`qYIcqH^6DnN2vyh!;V2WyK5FnTNv}LtfW|hL3HyRwCDefa zJ7Jwlv-tG_8Ft!{a4QZQCz!&%nRD_RFIHdZdF2h^Kl&T|EXoh`=Nt(lB-!*yoy%dE z%6rZHt?gB{U3y;zp%-e`x37zDx2*s%(RE8Y*jC&~Ui2v?V4IU!#rhmC#7BkEX94l@ z)Hetu)F4UAI7Br2w4h{pFo97Sr%b+7awTF^sFQs)oNw%i7Qi|NrweV-O-A&da>@}s z*odes1n%`DS8QZ_%&hN%ae}}Vm7^^ATlmYlCcO~;n&kABd4be~#6jOSqZQTxkLcL<;fI zts=I_+*+1}9(PGT*iZr@zb+x&LAtLr30M^PsL2ig6SR@Yi%xKa%kWNpF@O0+heO%+ zNUHWi>%q@+&`;9Y)3~2Wh_Q3@1E=R zxtEKjv*dOmo!`|s+6hCaryp=n#SfZ*NUQ`$XR)q(SX5TcQMRN?Ixjv|U!kJ=<22Rj zd!aWGaFU%S2^@StPr*d+(^lLW304gack$AM?{CsX2Y|O!908Rd?1Ok|6GA%aV(Rg1 zP7q%#p_ra`ZYSH2A-{R(dsi#Ys=)kQt=Eu^Ur1Mez>RuW-VYX;@J%?8kPs!%^Y}Aa z!3xESPMD~V+&Fe#m1C_dhMjr!W%fd)mYUS4u5G}=T8=^&H9v=e7^@~mSF$ep_lF~TaR(qKVV4}072lg*-~*hEAC zmZ@LAL0r3p5G{}3bV3tvox<&)_EAM{O%z%;Hg#Y;>-0YMBlX6Q`n583+0G1SA?B-{ z6ovU$g0A2jT`SPmgAuttxubKRLWOo{aWfP#12HFFh0w_JqONz1_^aO4C0W}Ea%pC& zB-<{ziT>!z$G#BrAqUN9c8hW>#A4+P5r)0@?7C%ssJ__hy5ZZ}qchQ%DujH0_=ggk zu~8|V3GG@|+t%fLz=w6wPd6%GKtFsZs?a~+Un?Uwp@&V42FB6|-g#Q$ks^Sqoa}U= z(RMc$U)1eO6VP~y_>9}u%z~RPont*ojreYhd=|W{d>-jUv{B`m^9^9!>e1Yox}@*C zAQ0E;oJLjuJ{-6N+p_&qThR6tViQn@s-)(DU`O0vh0@`g$Aqbhpvc?QY{NFb#zB+@ zI(A9|>bI?rz&Jz83&wY_yQ-yYiZlQ}6%W;^ffdo56&dGu+isK%!t56V>MOM@WvyX1lsn4jjz(O47UT zePIdLsh*#oW15lM>#v(lzN28XptRH4h%f{H`n&o_JqOLW z?wiGfxtWvW5lppAA^h{k>eiNeY~Ef`YgF$rBhCT$QuLS`gf}jL{VwhTZo5y*H;b72 z0(US{TSISYiAv@%;^J?e7%VfsJLB@m!{3HS;g4Pb*SjSf)S;|UU^7poZsZO~jvkSg z%5HNh9uOIYW!dKRcQfG_j{cfaq>VSpGVBIW+uAEPh#^6pKq-V&b2xB@U83 z2Dvvh3v314;4&0j#pngcO#WO`ztEkSD~lL_OdIJy?`?|pcczVQ?(%0THxM88;>=T%U9#wg8;$v!n!;t!7wm_D) zbPs=4;5+E5ugaX}#4Nq1)!C{~rO%)f$Q8&Dl!ZqkPM@uFM{E02mtlajnW8GrxiAl7 zOY9mP+EOzXQZ*;tJy7MjPFLQ>%`hxpG?z!d2rbMerEHFHP$n;jpek)@In~wmE_A*1 zpfU7%aITU%#-x$2^+^2s7?Dx4zjD|QI4h5>bMkQ&i(cJ=8hQ67%~dy8#(kile3p~x z=UtvHi6+~yC1A$c$=6`R$;?Tqhf5R?mSo;;$M3u8qUC-y>##fb@=(sHf~t?9d~UFW zo8j!da}|f%7aBW`C1SMW!l8)sECM-~8std#Q?_gZ`D3r^;%t5CWS1wYz3Qx9cMAx$ z3~U^IhE-)BMqUg>&VC?d)zMD84&EqfpvbSVMA!0HEzoAh9ChfE_FtMN`{GVpPGC73 z4>#Us$56Vj?Q75%c3~e34}+(+{#zmW2wzBS}VLPQ90poD<;ZP-Z*38DHAb?O~O?&|j_$;QOFY;zM;J z{&>i}0C<@rYd#|-*EYfsP9GiTjK#Wcv04Abm++6RVoqf$pLFZ?-c zcRcY)h`waEwW}vzIdw^%{(>Xw(r`U#i7%9`#_2QpyWOXcsJ4~`8>D2fd9Pg}FWHm; z$?Qho*2wr$S4e*O9m*q4aUe+ynMdB%Sos8- z_;fI+Pz&GrX~ViRa>dy1{F|0cs}BL32JXW&+D^a+Q(t(HNgY4q@|!-u5w~AuZjUip zhFkgCUd$ys34_68bLD*=C@Z(iVtNh&1#O-0yjy~rV(+5VP4K*BkZ8RI3qB)e3;#)! zTXXwLyulr9`yMlDu9g{`xs05W9^iIwjnX4tnQ z(Pj4^(I16MR6>43qe~U*Qs(sO&DiZfciOzqM6-+6@U%Z7!Evd>#b2X|U5SJ8h{<~K z#EXxiEDWNmZ=sS%yq{pOrB}*qmFu|nc(K!+3-goK>y_W`j`=Oe<+DLr|ioq2g9poE_O!aoyrl>Ri;j!cy}yY8F^{RaA+MV#UPb$Z?20%zzeri;`q7guSEy+&m%Oou~e`kmzl z&eXfg)qG4k^UKCsuhl>sCwIGIzCQtme~#FHKemhP$4~K2HH5cZ|8-xhnX{lNP8MhX z-?e!9vk@rRoMXLXCrV)FE=wf7)Wbr7U|eQF{pZ*1Fs z>|d(!zx;Gh!vurl6M|FJS4r%1!uj{X)YPZP{`lxWb=3Q(CI^4LSx>*^+Adyfs>DZF z?xp7qz55?d!ryLb{HIj{kL3^@jaiCly{ih7)xUq_-+$L3g^@X@x;PIs^)%4}G`jh5 zD+J)aVbWG`u_8oy(P>92Zf=LQfNWvjkcYRZzFn zlHy+NS*{Ff#E+lfXC3~}r{8NP`BO|jTq`vD>grR7558Yu^r1;^wJAOL!^8KtKbItY z*!FBbt`KsfjMP14{YTR6o-ybZ=~Kc28Asubqmf2ZT&1aM$<_CWlWhwOJx{fpxM=u{ zYjio`t6{Q;%cS#BR&7WOmv!u}?d}qs8Xc`J^@y@FCs4hbV4cj-5JIaSppX~%$HV+f zaL?Whyi3FIAoy_H2lq*;xL1l%q_yz%6~fK~C03uOgHyZg0j=w>5{2J&!|GmFW44+X z&0vP{maO0Un{TfW6ZOK)l?uMwgt&>o#leM>8^dsTf-F~hnPdBcpp%o{QbOp?MwCXz zgZsmwY_Xteo0t>(4W++n!8vg{aQ-mHZ?Q6K*`71fuvH@TRw2ug=e@u0-<9K<9PEP}3h)?9W( zhf2tlUzd3&slMt z>1gVrv>wVgM(g9lTeDq~`rO)i@wS!APvt3F3RZ229DR+gLw-wqO016+*20T%C);zb zmY@@at(tadndc|%x;|ctQ5nY0;S1t=ApzHwV|k;vALqs=mNz!nA7N*icU|a13Hfc< zZ*QeXT8w=u51t8T^BXwH?$PxqlRDElr>b5^(F~V zAaK%S3Wem9tyvw>mG@Zl#o1o*&SHd^<6GO#tp1s2ZL-ct$ynNN_I+L1#oom&MR0pl- ziY>!gyygY#vBQ4DFxLj!{9u>l;nBrv%hidO0ndXOt!_fa=$I*`T{(|2sVpbxScUF{ zoM2>mNjl&)fE}ty?&Z`>ZO@)BSn1Yj-W-NIi4Rw+L-aO?9jj}dQLqEh*MqtEVAmB6 zMo@#xfYr{|-w8b{=pQFJg>b6lFw-wRO%58`C=*-72rynMG?<((nNyDDRGq*XcsY&( z>r?c8X|fGL*c$f~6C}?+KkeFQn^paIVQQhgJUY_zX@`!nN6gydAeUL);W1X}A%?Zk z`40QOHKv)qe1qlCr|k$ahn#^P-zg7P>BYNPc{@Xwws)l1@&VV}0Ux}P<7raDe1HcB z`AU{`2ixkR^wJ%B%qzp;v}XfekjCfZ9>lG`F)^NAffhjo-3}`@W|4a#bnI5wnT&k1 zle9NQNcK(T*asI2>cr}`J#Bt$+38ifX2|zzi(I&2^3F%@@l`Wl?vC-xVk57%8i%vp z7&Nm-iSNdq4bvuTAD_YHLzj0k{ZCnICi|d3MKes)biv;Yh!G)R!G2Exy z*g=wnG8QiI;p=+apkrFVEc*o*N$#xw_Q@j{kcoLEv3MtlptX^!DsJSDeRldJ_GE=# z$(;QKHV3xOzZ&DfLA{HhJtJziJNW$QXMQlnH^NrZIm3ZgKt{dM2~9p(neWhVqMg$* z9z^@}@a~j5lRGI!(6F$^WA1$AZZT#|PN1sF#oXZ0mxa&6o@pKX2CHT$S9R6Oul4^d za=vEsQ?I??I>Q^uPEA{z$6k@xQDXBE`Pfamxw)7IJ5`qn&R-oJ3EcbRe&E)rZsa^hz>~#Jb7b5bi-W zW>Gtf#Cf?dY@dD++N9c+g&YhI} zB9_gXcfoR`YA*3m+p!t@g)5C>g0%s`GTRdmS2lBrou_#P^h@tC8M+oZwx1`T$%mg{ zasPfx2(}}FE!Q!!b?bLX6Ky~}4Yn+Q>C0l)jxw(G)o2tMUNG1Zjny115SxJ!&goPP z-VcxEBdlTyZft(GVifnZzau;W?20gI#7j(vE$_=;ZRo{9W||Mux}IluUV>41X{(FX zOBTK-GtRN`i+5NA#LGiCRTC$)i}+5dCC6fo64g}JeGCihzdxbcH@E$`xSK{JP^@Y+ zN6dMYuK0i&KpwCs)!aL<(gt^k#ilDYt8}g-cuMNFG+vzJu~Gu*o+06z<;m{wGrU;_ zZx?==qq8DErTJ?^{c2~243FPAeqQ}>_gC2tq_24iX_&3AFul$I_ej~vX_HtnBRBFo z{>V5}tC(HS4ez-^zs)0e5FfE=C|i_g?)e2}OE8@95ScRTJC!K#g5SM+GE=m$y`AzI z!VN*(s1NC_q5|9xlbw~sBEp#8Nc@Iyn&CANk-!Ul*B>y5_Pp9I4;03mXf>Hjy4;2& z-|n2puDFCK*kizr3Yg92B7`*x?*!t;yX6n=PKFzUE$@H%nk~LPRdSBzx;@fILRXHM zS8@j-CpGfqK#<$_{lh=#!09YaKT7`qL5BvzBn~9M!`Gx9+YV=Ys8xCxb(jbS+n+n| z7q9NM`a##rx@>Db;~RP}&a_chm2HQ1t8OZ%miX^BTQE<|e(_tXn35*}85qy*SVrONpqiuGGfQRx$1 zRUQl8-u=xo2Tq8^XM#fzxmC=#XU776;{i_d5doNlZiLtK&EetZx=SB1u+2fQG+0rE zkk4wCMxM8exShPHeHUi|%BE1tWaQ@dFqOiEvcna2uCY^|i_p%|!fps8 z%~IU2P~slI4!DkeX{z)0Tc5oy5W#G{$#61KlQtH;Gn@Nd?nUScn8sS$1N7GZYBjI- zb`{$z7CX8)IfPN?1Tb_L7aWJTzZz^7IrNKV`^{dLvcY`u8j1lpk>&ANE){R1h@2v# zR3!nX3@T1M(4&2GOg7?9Lx!LuwRf-AFl^9q`)`!_YW_{NgP29h=rl5zDetOS+Zp_V z#i1`=d%n-Q6mToJ)G2oyp`D6H91?bCtT#BQ^D>cc{m0=u9kFYA339^9AEt%Bp5xJO zI;eR}F;%kaM*#$mdC-~Wp4~5dP zpT~-zNS{;f3iIGsZTM!;lpP)kiuZvfF4DJ_=L_(x|$uX168xw9Hhhm*e+hlLBM97aQvH zFSJ)}`aXm)pJU(+O?5g%^W}nb2i2^#EShYsEI#BO?LO_S-t`p~Q(rzDfaxkr;e)`* zk6S6RAcx#6ci^S(sYjeA8HIF1PBu?-cgX657sr30i8cC|H+P0tiQeMpAcvc`_9DVy zubUt%lbN%5tu$2jN0B+?XoN0l2IjKseI2;4H&hVbHKi z98%}P4n)`J!tBygoA}TTQ@2JF4XGf)tw=d;b$QtVM`1&nzJ=*dbuMLufs;piFS4j6 zfb83!=T%sL@NkO%>~%@f{7!-$Z-*dRc?ql`q3l$fkTj{AmDg}9+P$VAJv&2AypuBU zlug#}0q|`GuciLwzCXEHce*LJ-qkau@oSX?HihpQDEf9v8GvmX2{u^3w{`1Xi#zs_ z9lkNET{Udy*5%ZS;A-{%-RZou=gS$LIvGoKT9oN^>yTZD+NuAjc0tJEW0KoqWxd`O z%3o9hwj0J3(W*@i-e0;8^kA?H>rql;PSNg{k$P9)dMoA4rFO>X7;%8sM@2++`FzLkc~N!i zJ}uX!5w#ILpCV4fY~KNc(hEj&Yi2LvIZXk#q<7P%js0rGo%R2bE@1u@Bo5{&Wy|#GUsbs$t7P~69T*s=) z&hsS089e-nh}~Q4lxvNH_4ckiN z2)Z&q{Gb=6O?>!FD?Sj|y_PlG&iPV$HJL|i%2q&}URg{jTjr>mQn$tM6_w4oSIy%`2MUDZjGIri!7(Q9Z_Q%y~ zdy@2$2&t!6KBE6q2rcsm05zQ!{obHzs#Kj)6~K$p+RW|m@-yqqRNcrp47-0Td;?ID z44f229wAk)*y^g?`YVdP8hz84Hns%V3*36xx_vVr8fa6=$!RG^t;NbsW81-rkH%jORjtj{VTUcnBtNjBxxZt_n-&cCt;fz6eyPxU2gyd?|m4&Si zc`7f1abbz4Bi?cFPI+=N`77yjVt4-w)AY?D>P>DPq8^jg+3s`#~qCB}6_&$_Dft>a)7 z2Gy2$X#B*#N#`H9$o8#^1&1-cK5PtgRbTwn7y{VNI^KO$J=w`wWz-u3c1R)5S(1g9 zp1Ydo+dqW-_f*Z>T;*p9n&@xEY)!a)gNXsR_G5`xaSzwXbaJJph3)XsXHyyKOG8pN z${?n8jbN~M4QqFR2C`zzRU8vw?2f%#+}q_$0sjA$O!buA z?6-V6?P|&%@Rhnkt8=^^|8O5h;zf?)C)4`}1FBj4=8NjrYz`baa`N@*4msx5A0Fyb zGZwwYmL;tssh)RrnSO7c@IgjNYU8K*eETj>{oJOan z=fp}A<{zb}mLM>WuLxS3(q>;ugq%M;8F{-F-!4H^*#_Z>L$nfrDrhISH*epF`Wq0@ zM4SPrzvAM|9Uzz-VVg<(1AV68X$ZhyR-O^Ew7}&htoG#=DBAn#tQE1i?6bP6Ujgb_ zpSG9^e)}C-d%ibBd-xhY?m1}Yv|n`((-0JmGO8Sp*!VG7)<5u3M$-M`3AQ8;YKueP z0H;@}6MJ6zffB2>H?e%k$oTVYGx8?yMC^NRn7MM3!Z)9Z0+x5fdaerlxA61)FC`*u zRK1mWO%&Ji+xz=b;YZ!bmLhl74_L|T8kbmsaq**v9tB&_F6TbZ<#(!CsTxcI8S$1p&Qf~~E5E`h3cnotZM(#esI z1iX%LW=CiWebsqZvYXk3f9$)^-qanjUV`S3FHv=-XQggL;8s1QJ-P|KyQ+Lc^#}lTu9ufpK5mzGvhKuDeiP1AFNaipexG(2>Cj(B zzW|ZczwZgB#JQ1IGa+NUZyo~{19@kzV0BpmMi`HjKdB7`Vd#kq5;rbHioO$d0xk(B zJ8a9;f%TV!yrOt&ca_7qpWiS>%F+NTg?2xra7}MdKaDAf- zZOX^8aQSm51J(Zm)!e)YIkwedkSLBemhzf8L)_i9Bu^Rcu-r>!u*EKZ`xX&yf2`nE zuyW7~`dVMvYdYeNEc4_2PUxsi)x?A2(emV0?FUCW;{z>H8@K0n8j2!@A(O*!YRr`C zv!nT~WAZ#dtV0H?G94(b-KL zuu^)Y`3<*qn&Cba^e(h-5um6V?Oo4h-{}@TZC6IL?d{TdPTo1e?n*KZyBy+^yW9 z-wJU>z&i0De*MjL0~O+!UA1i|r(5e0#EnXo+jY(JSNe*x=XD@vTY%bNxjx&iQEKx@ za^kguD?vA%RsL^+{QU@i zi~A4W2Ag*2l;Z!4vLCMj2yh_!(7vO4)1-ctHo3OvE*v7C9N85<)|w=!0I*$!8|3Kb z)E0nxvc44l!wUS>nfv`5Z_!`W<+ssEBHsP08U7D+%TfO8iEh3DGI2ksNaB}kdiWjCC20E2P5J-&P_KeZ5Pi`6 z&t;4Mv7DbT-@OAlB8F0)_U&KiFz(~ju?->2-JFx$t zr$QiXqnB&;5k0SxrRg7Oum2GLe|!0GdG9QSM5kW<`Yb+xw?>bdnf_9~-i_>?MGdr_ z@z)pZ{vJW+>(|e}K8yN2rQwZF68?3<^355bG+u;1`}LZFxPa1FMoL=0{FfyEm!Gcg z>o!W!Q;{q04PkDz+2i6kfOpUM7zr9JxFU5oQfGhfd5s|_+RAr!9xOufAY+WbGa66eW>Dq21V(ap9F^9ndPwm zA|U^|RugW40RFc*co2itiVykq+ZBL5_V<1y$?@woB?B!x(`8}sKON1_F9akM0Q`h9 z#AyDSX!T>y9;&I9{Y-)V)1v%qJ^%X2;?fP+DANp;;-66G>~LN=3=sJX2fXSZO_pfI zVhA{uAi}^KD|sIcllq|4iAZ@|zqvGJ%RIw*^mAoE5<-l89VEKTNqmsHJP>6H;v)lD zCn13#rPGr07;2VclCLjnK_zcSO2WMoR{*Nek|YUf1_@!VuP<0HDzSP@EV^%1`sVeQ zJH>(QJm;hwoz(QrJ&?w%-TEJh;a_*==In2Aw8>}0wLA^*&fBlbtShiaI|-mdY6f#& zncBg1rmAaX1JRETT102K+Tyr>b%JTK4$|*LDe0|h&LL9H&<^FOi zy`ZWR$dM|wyuE&R0r?KaT)h`dSV+8pvj8%?(G2*e>A?UD(rt#>Xm1~#&@^G0*lvb)9H z3O+oE*yl-{(b)G&-U%#~C5@*9PFf9>4R4*h`8UMOx z{}9@hLu1`4#co6u#eHdc)f=3gV*X+=4nzkPYJR z%mgVnh;8TOdCAQIw>|bm!enRzQU}5+a5wz?D_uuSET8K(f$>sQyF~iX%#TWC#eK|0 zCSr=N+`1d(%mNixblbljvgoe(Z8R0Ne0*iGV|mrbCkfC;a}^4cD$XhYH9fCeTh2@ zC(9ubxePS3?$Z_b<2F1;n8h7m_DqCN65d`!OCC`dzH1T6r?Qm>r6We_#pm7SZc_=<3pN%8OP%0-5Qr5&SzOKs(W*&EmMU^GjM zU%Mqv9%*v)O`BqXl9$)+l*i4wR?Fzt+0P$!Q8mi)J>w|rz)gB(wO!3<3hI1&iz~ab zH5-!yx;kVxdK4mLR-ZrjL4GSf+^8-icr8AwOjJ86Aqr4C0%<;9`j_?s46&5c-b=Nc zggI8DY$JS4!op$xs9?W|sK$vU2ZM~+_n{9uGhfl`S(fW6Ld@{!FajDLEj85NR~9w) zHJl5jmPP}#)3A{BhK`UE#eaLFf5|=$w&Ufy2S&x4&sxlK-@q=+y6SIR?vyfm#(d6X z{^Yl`@Va&OQJm`33T12jH@8K%3F>5fG<(3-tsFp~Xs?cCfw{9v0(L^$qI_={m}3yB zk}5Z7PvzQPX=GO|f37?MvKw(yR`N>S5-k_lSy9~&;PS%Ot#6y7<=45svdC@LxDme4 zt3Q{NPhj*#M=*;UQwB<#k+|6qZH+Sfgs0*w>-gcOT_2)lT~RXhB_>Y5DYY4&=kt~Y zzUz|d*L?Yy6AvzEX4#`ZMYwU7r4?Zv;Zet{Tli^+&NmP6g+3&?uih`7O7W|@OTW{l ztqk$=xc%_frDQzk`vWHLf8!XrZAtyGRJnXjvbL|D(}tg*K9nC~^n~T)lfkt+>YTWp z(U~wufo_7QUg7-CBQ>3m^9O7y=6B3DvZg|TxGnaUR43U}tY6!^)UO5}VY@coh<KO1M;c*Y=Bh_U&UEpT-zF71{TX~T;PCT0HZ<=Y<_ z%3QU^HRnpDYmczc^FS7kQq%e1ARpKQt}C;Hc5EjuPYy=QyTGiT*5ZAeUacf3?dGcDxb4`Rm{#@|>Zp+OmtW;8XY;LglZ}bJa1s^(Kl! zpUb7wd97S2+ovts>P5NtCh~sKAY^I-eU8ycdigVEwza%b?6^5j@DF@Y!>kwl&z2xNEEM0VU9k1 zFH~X5DT-fV>xcv|%0@RnZfvGbtKBH30L1tdGpmC%A1sdK#zFRU>CKCV|}mSF@h zsNIMavV4%R@D{VG+yD$a0hsDy#I;9hcb4xue_cIS?2NrlJT$W}`{8^4&+vWIapj^= zT)eriX`Ahq!U#{UyHM;{fmu7;<|a*Y@xB^^+s-2x#oU)P{gSxs42&H(#zo z3v1kGG&*5J?<%GWXey9n?h;%Xt9~WmUuZ2V0VH7#=_gdRxJ-HOBHcN%62hz>zppqG@peY|u{xE46ITEq%x-T` zps+MlYq31?{?^Sr;dSq|Lnq}Qy30c|RHE0U3cQv=g%V~v?oIJ;DuV=7FzgHSJyBg8 z?s&*a*2Eb)?!jT7%&@e6$7&86kBdWsAEY1!_t60d?jjJI;Y@=*)_yZTIL0~RK`PmE zu|s^wTQh{#@7u6LY{O19pMLJCsy6|*;B)z5t7GBg!SZ)@Uy0f{7da@(<2n-1_EjN( zr4dIhGh`EWQkq+ucg}`T?foEM{M>X9R`A72!35s5S5sFaWn^sjr>mzLmnCE`2e`{r zgK@cK%_4sa_--Eh3;_N;Y+A@eKC6@b1`=#ZWhr68#f3W62wwT#HO0^nBG0I0(PUR^ zG5Ly(oV499JzkmNiSn8>F=WbDxMFsqr0u7B}9khHXPdDx$rpPbWSNRCr&@=xTmLC$)RuCnVAeb@7Z+e+(fT+sFJ}|zD_HZ)Jq2r%P|?c z!Q?cNhP@ZB0?e9Cp29)&-|+F1@m=y1JeLMC%4b

    -JH;Zl#A>q*hnY!*RO&gO(lNw-C#Mzt&!D%0JP2VZ-Ywc!_Eu6 zw>28d{h0kmZ89;;_Fv{JBpx z-Qo4R@84w-0_=if=r8D2xrYtF;pHQdF?rg@sO>Hot0{_#PGJp<|aS8AtD?*IXs`+AVxpsUW(1ORpv| zlxh)LLL#Qv2CMu#QG5SEnLwNRVoKXam!?ATJOuUb2K!) z#xCo~of8_VuVgj{K0g26xo|7h#TJg&FTKBdCneP;+>+@%G2 zv9pOgs@%~DbsWtGQzKeVNd?3m+YqQ2krC$^>m$=b&nZTDOI`lpL0V}b+tfK$>ZUTOWnu6ITU`YZNI%-s z8?7%m5&|q}GcB_*7~beWZ~P^KBrmYzzksY>EIelfn3JYcd+@EfS>nO5LmAc31%mff z3^zp0L40WC4kl|XKb@%QcwCwb@hQS{Bv*PpOw6Y|ITrLe8dfT%%KK5_@qJth<&H+h zrS4=&XyG!L5q}7?W11&+F6~lyESqdq0;g#lM!4f;;rNcckt~O$<6|^??5zPp5*5^q0B&G&@fU%-w+dm2t`P4Tp(o_4y&<< z&Q@O3weJ^mgK#m)RY9}+zf+)V3+0#ZzKEZ6i0kcZfCNY?Stl@caH!JXOBX&g;AtgB z#acOonZvC18J3tyz(BNaqGU=g?Xk>w1kh)&S{=#)ug*D`;RSkEK%+0=4hbVB@yb52 z)AN+)Sn@Q3fdkHZV_xzWUk|u{J@OAc>S94jIYeVFp`*$l5w@=db7D68qM_T7oounHA|wXI^=1(v~EOS&X`c zzq7<%+ejy!m&hyLb`QSMdv@EtMXTJVE3>e~4{k?4HAii-*}3l};yDmr&)&o;HE>-N z+I4iJ-~b@4v!G_Y^Y1R%6+tf)pdof$5)Vo#g}A%@`{hn=3~pd6*e9~9RYi}4UP zg)=s@D308knJ;a*VPTSuF7*hG14!#XdI1#rreKW~6NIO7-^Es@jB~^pS}d*n$av5k zNfS`ISut#PyVOLIReq^*6eIWVFoMbEx#eJjKFaQJhkx;L>ot3$*O|Ahv|+qZH!Ns0 zQPlB8%)ZBl8Vg{gVj$7Oo<-+k{ebx2p=ygq+NjQX@|D;Rs-!gJsfCM(sm)HZv`&!}LGJIW{I^`kJrZ9qoN zA))7uuXa{!JkI5NWs5z3B1<*-^jb->U11V(yqf`s5UIq;WnrhDk2glX(xV~8BKko~ zjew#lLB|22M%tHKp1M@*WE5!=e)R&NjbKk3^~|`LIJl_)z)1@Z4dGu{Bf%9vu(P{>5wJ#FBrm?}E&)WQy?HcZVh$}^daXH(E->TF!PB#D zUApLeZ7pLCDEct1F}g!oyXv8#v3EiY=SvCBCI^>EdWyU^5@kqqPz7s zf(fyFiOLI7sDW&&9bVg==2y72=QMuiWIFX=B+`ieXc@k(=8<<_g!pmPw~_qvA;&3> zRgRHw%udICJ402D1Fh#)TfB*Pg~w@c(mp!RZ)k(Ki=|vI)tYZ*d;odLSlX;@cR>k`r?4=ur=)dNr{{u!k|2b=1AXF z&&|a_(8Nj7zNd2)JZ0}FX>rbkvQJ9dtS(^10lyJ3LgSDpPPwf8z;q%#QU!A~vzeJs z6(J^eiMW2+06Y2GZ*7`!tt3rAB+0d&8)g2{f`gdkJywFXvpaewb*>P$ShTScMQ$z3?o{(I+VN=e?KU+qofUbJO}&9 z=ocmprh1hN$o(n36{!0g7&Fr0rDO6PT`fnzX@r&O-caR+>B6rrUUFb8-;=jWO@-r<+v~ zc=`jAo%dCE@56KS)>j`_|eV{x=llyK1LA);|swL-GK{Vi+szr0YQws_boBIm?Ke`Ihp4?;QiG z8%!vn$=Jv+bROW;CCW8jTU~z zWMqmD->^aiJc3kjcu7Oh@Afhfo;}pYg8T%{wu`DsTaFJEr1f<(8j0hPG%AHM7`DDJ zu9RyatzzLE|MLo=#Y)9EWF7iWRHuf~4FcqTX#e7dI@{8|)v8(m zC1CvDcWjN{=wq00A5P6aj2VV)){^zO4J!(56Go;#o~5Ah;*oSRH;NTuT`|Ryf(f8M zwt#y%fNwOuu-h*_+?DriKnw3Ce)DxgulFA! zk=}bR(mM)-P7qNMq*v)Ez4w;T6seKkOF*R)Na!tud>8Lp-&)VJ_q)cke($mWF*5Fi z(q+L5+W%{OHG_ItP zu|Q&W;bX4zoyi;*fW27#0gB5Md zysY!@G_u5flDhd*rJ9uF{`+&byx-Cp0ibPZDeu5@x&Fi>aQbJApiJqx2RHddypqrJ zG~2bwAx<8Y|KLuu4Q!v6!fU4%U*RV~#j$$javd?sT*kFc0BBhM1jLNL@kCBTc0$%r zveNcknNSg?)xf9YNh)x2!7~GxM`0myhI{g@DdJ>*Krld>@s_fZ%{>D(NdgtUic_${ zzaU)~a+zg~pfht+lXE?d7d)oekDYlRf(h_v3x+E?SGec_-wlilc$TmGIO`Ce?sWPz6>?j z3A`fSo>nAZi5J|b&qFOZ0Xy%!b{0x=901c zs)qx9s-o2af6W$5;s{RFhni1OLJ4diXmFUgZ7F7IsKKmvL6=w-6yj z)F<1u9x_#$&m)K;={MY?NtY-d`7?icJ0oO@QuCB0W?>(b{%R?1%LUiEZ;9J1eF?*# z3N=OOFPwZo`ot3{G;l0Y^J#Ur(>8;K3S#8iDz5Q?tgm5yKV!`H>5yVcFTq(=;ayje zU~3R|A-(3SkzS1pw89qNFG$UV#6>fEj@J3gsJIM?xLWLA`LE?hEZ*Dw8Yba2tZtAg zV6PF_tUBZ%&?RCDyoS8WYW2^Gz7;TXqn<3)P9s7y^u8iG+y>Amm34F(Ek;BL?#CG| zPL;-jo-dAO-@YczfJ~jLgB2l_Qh53EstR(ok&4HZa~*&0looL!>95EC(F3m)HRcq0 zkng+OM}y0@>-Ly1@oE^$hQgrC9(nj&)OHsUB1Zu;1Ih4n$#H%Qd%fWJ7KfrsxlD9W zjbCuDWwH!Vh=MH;u$&0Uv=^_wwl}LQj0X`EFtN0tnUv^`<0R35fVR5 zPB#}G7l2>~dAt|{#isZbPF-GXdI`?DIrzcX-ch5Z!lCQVX^orU{Wv!B&X2*MwGCh#|Xy7SszgjeHbvyE| zaPKMoX$lfq5Hld(xU*z|HmvvQVM>}l-d8Q>FoAhSHab1v+e@C9ZFG0GraHW9Ffl?k zH9bGXy!A$qXd?UCNLtVcFcC=2f!18Ydp2 z9RQK^#d``NMPX&@UpI|1Bi1pWK$h>p^u}jU$^}G-hkci(NM&(vgn8bd4@z%gA%-ZQ z{j6|Z8|03Q*qm=+|3Iy|L9iL=n-QH`qzRC~V*&lh2@fSsN9w^N z52+gsSG1>voEq3HOEo5H>}7YSSTa5jb;(x*4ZdliSv!#V- z@*Cd>q?-#7Vysw?4>!A)qz#piB}U&OO~lzm(h9X`x-@*zd3OZz=gB<0UNXufr~(yB zYBtRus%8z@SFDbFSAV{6wy|DmEl%8wP3)vbZk&V293+L(N8^^YXu;8K=%nw>VKdQ^!7shh2~VCY8ED zwRO&sNAW@(RZp%jgIcnzwkFUpVH1dez<1~$SyVO@~P)GGiA04I$__+sCsJG@T;1jxYdrU{-hqHX^ zzgMe?oYyd*T3x$aEA!N(ixS@i^a`qb-nhINGau_F`|T&Y?p$88Cu;PL-YR`K}Jcw6vY| zi^jd(%!8Y`;E%_DeT(XO8KOgekkI|ChI$&(4F3O6i@Ii$w=e9m(wJV9j3cvS++H2hk(HCQx2i=yX@SNB^yWB*{Iy{Pzp`TjJ$gZeW-~jiWaby%x5|l^{^| z${nR7FC1J{C0 zk92Qp#T{>aqZ2-HQH3LYYXIUlmOfUyP7CQP(=@69)f&On!GY$3>5*^}%9MK%q#7`DH7fo7gZ(v$9Qef%tNaPL zm)U;5C?>^9Q!3wnghNGT2;eq+;9wZe7i*Z@y^KIB-B@G5RUs^D|1?p&vY8FR0D1%p zqJTM5b)2b-U^uv^8$3NV_EVOWa|Wl0^Qp&f|6RQHj<{h!z~z(G!~slPj9^9vC*4zT}OrcA+9q;4e<*%Nz?j)6eyKa))mj-IhV`EL&}h zH)^G(x2IZaPTA{UQbh|ye`<4MUxIMF0ydor28rS{qtOPR`l^KhA;Ez}pgZE!1v|ef^9GtyJs%WsQ%N6M_8!oOMN+ zfhP)Fc(iD(#~Lq2-z}hL(8R6JRwNh2fpnku80lkv<%+l`evQFXKgh7`YBAO+WAeyE|hR7(mB_EaJN#O%mipAWIFcRTZG+o4?I7adEuhipBtk1FT}f z1{SE_a<=cn+0n{1EL!?Nw=j?hkp#B`=-v$Zalcaux7j`(lmo{XzzIuDYN zWfj#(3xj8~Wo1nAOczaOrpMkkUL;?;#k>)QC9PauJ6mO`JIIE`KHAtati?@LrLP+eduBg8*G1* z-#urp*{@)_ww~xHN6@<7r82Yf#X|ZtQHn8PBJ%Ys7L}ANc5T+#grmr-oN%+KS@qWA z&eV0ZU#BABJ( zp5{auvhP$!Phf~tGQ|hG=+?Pe$W`EnHfEUVn<>4{g~GsHYuH&?U&a0CQ@R|d3!|3v zDZkeoiVVc-Q~0}6G)_FLyNoDIoNy+%cX79!oSfWFkwaJX=P#hGaD+K~4seY4_(ZK=9!TUe^0BNu zaivAN*t<3^*^E{EHh3SlJ}1BYpkkxWh8$j(qmGU+E;Hnuf7KqL=<+#7^PM*I>iY8P zn4Z~K?MDnh2vm#1w;l48#oce}CsxMgR)qxm?)2o3HN38TvfK`3z+u6{3U*ChlK==u z-21zMZyDfnE%R9l40*ZCZqZ)qC)(qU*1)Qm3uT3s45I1w8#+LJI{}b%=h&zxR^r*T zAVOnWecNA65$HR5FU22p2gjeD)$lNdS(^KR?qw&g?MriBj{{D$YX>B--U!d-I5ml} z))5Oz$Z2f^Qt{?X%H&6kLT`8k-T<56i`3@hpL;9f+rQ#IC)V$MrKMc4!`QXwmk~HD`PL;d7sA;$(kO;lGQzu2PFhAazl8M9mYg- z_SZ&cenv5fccOHkrhXHS^XU#8HmR@4N>rfzvQaSJi&l5jOboh=Ev8Ttld_R!5gG*9 z?0~Vt{0w8(Qa@5FJ|OS)LH1th=^vmMI$2-VE6md~F<71DHMx(u3v#BI=K8e9XHeuP zi=~<>2>ddm*imFjgvmxrRJC*4O-4=xSWqQL)F01I}EA zKjx5>x3xzwn{otK8kTTX{8kmss}XmFjp-rU5%Myx1IcgdN4}8#%1<~<8m`D;P0Y@7 z>d-9g_WByTQqb3oB*}CW)lPdOo0&aN;VI~v3r|&>+t;#++%1=1lZ?im^|=(5U6KrA zrb!*kQ`*;ZSW?_HPIiHpRyz!4Nqyn8>7Sir0>WLcv85zea&4RS+OkUZG2QkQK0~T1(cCXNwig`8p;&Unnm(Yc-uu>xA9L3@y~PQFXY`Xr`Ni zOa|=t2YoNYon2N*y$}Z9f~1JkxyF@@z(Q&>|DKXvqC{)!x^lI8x96i-Eux9dr*}^r zr4T;KX-^IGPLk2-sq`UpMlsnmLW6$83M{KGX(udVu^#q3;y1btlstxR{ zJI`wG=3$D{wQEIdwxb&BZ%Hh;l5m*L*)~kW>%&5z1Ne-4d*<#3C0TDC3@D4;_Vk*^ z@xX>5S|cY1^5U`;@!71*Li!Ba{a;ywMVD*u0*5X4dN^Ody!R>)Zy83B-zE8|^xXb6 z9e;E6s)?(eKx*CxvflTksN3-&fhHnL31YJu2ZqK-Ws|9zP)SGEDQ8lyfz6e)k--^e zz8wbd?LQqbi|=bR&4`%8JLHmLJ&VYCH6Dm>>Byf*3dlE{Fwh9Wi?s4RXUo>iIs4uIzCJ}QQ5lgsp zMXQ?E41M-7eP+<9gZ5P=)j@sSY22bOJzqVtvZiY^&@^ z;KZS4tFk1~+>`ENLiUM?CD@qTeyfh2=L1cMTRiiCasW(sK-mO}-sVWEDhyC^U)HWRvBM)uP zP<&c)Q+QxF*XFa8phyC@Z6-c^>0ul}GyZsB_fOqKfkykOX%$EA`vz_Pn6mFyvF3YV zqGp4kpHn`NyS}{^)rC8|UQ~+KwTcy6^wI;IeJt3lyI9 zx-KO>++4Xu3h~2F-guKPI`8C5Q_Yno>d~N~aM;zDk<(R5c0!+`EJgajEx^Ag@OWn_ z?~n%@&fd<|Q=!z8M=p6PFQm8{!)fsA0LVE^Noo1-y)vpwSWDE~fpC04h|;c^lxY`g z28DRcZ=cK$!J5N+{HOvK9vqt{abY_bIR;Qt;t5m1z1U|ymkECg78g)&?H;1+(Xj(GzBanehGr7 z#|;y@>gK}K6Zsy=7+uY+B(~KLriLRkm1U=En^k_?Chmh(^P9{)4181h+Jkn;lavOl zOz25rOBoSM6Zc~7+fm*!K--vY@qF1Cp6n}@_uBKxZ(DV91J7;{`23cd`uNv?-G}D} zQw68|&g^uaiV>zc7IabGKWp4k)D+T4KLL82eT)|Z+$I2=3n=v9b0|*AX=5p zzhghOV~FwFJ0y!PX&>eGqiD1W@$bM~+zP5I;I)UA)V?(BQ{nHU?f!I?Gsm4@Y1 zR+cyWZDFmcdS8zpkZ|VTy5kv)&^*#UAQhQPXRFwVToX2bCJEW;VIfR+`wsC8wTdBCYRhnJ|3WA>cgn7kPCxbT2VaP;-6jis9cqKe!i9gdGiuGF*KfdF6z4t)jY zLLC8$dPNa*T`>2U@f4(=$ETT$wda_*faa7O-np9D94yPB&GWiruAJ0|Yk5owYlZoF zD0J;1KfFQ8pXQS#NRJQABW{CAm;;YeVSx8`M?^e9Vmosoji~FK*#56zil!$jPSaON z*E!Z`SAt%X`&u3y8A-r{Mqeyc%ah0$2?||xh+--=${pX_m~!7>-(_35XHuyR98aV7 z5)wl0Y8oURLV1vYcQ^*L1!yk0Rw4n^SZiv(4azCMCPq^44Gq-J-b$+D9grovt#L|4}R=H}!X?-jG8sQ@H z-SDvP+vweufkTE>E9{f5yQMB`qZ-KZQh4K;^>M#w2n?QbtXwwmJb8_hm%q&e$vfeR z0}llS4=$tU7Ccjc!DHvshpl5pdoCN31X%KLp_as~V>A7{u5UScpBR0CTVG9e9nPtQ zwmE9pRck+rdEEl4rnm;wbLcXd1EncKu6e+Qre!DT)d6yS*02j8Mog|Ze2daE85>=+ zH_{{$Pn)LaJ@%>h`F}VU35Z{S97)SglgKOc)7=7aI>Fq{8OsW|a_!pIX<3(af|vV2 z;}eZGyNyYr(QumWX3eo;Bf0%mnllEKIWL@F|6R!Jd=y{8kKju)QdDP<%)qnvu;XR@ zUn-Z%q?&b(4__xOnY4po%|TZ|U94mYbT<}l1rUXir^|4Bv)9$4+naf6UUSV?X1#<8 zmy=3&U?z_dySuHj^QBz^Ws8(ufoWNaC>Nn$I~hk`LON4TC!Ltlq8h>UB4@0%VtW{z?{F}y!}khRai z%P#ID|I_wo(@jZSMmA;QGjD9-8S47z$+iKXYDBuOPczg1G*krsdkU6cPqA6H%xBaB zRtv$4KB>naZ%RW5gR(cj9R&I!}nmVN^tOpmvNoz zO*A8yvFQW!2bs5ZobZ9XP&-2HcQNt#BzQkx;v zluV@Hxnb>}dZ($c^b@O!5?ESk;j)?G!0$)c{b0AHcO5-ouX(xb^jB(KP}A*K!C_~* z@lyAO-@cA?>9J!0VeL3VG&7Rkb(?)vvA&O#HtpplH^3I2kfs#>l7li!Ru$iWgzWRg z40>HStMs+be8CkmQEF7qaj`z$%4U6{ydDIf+VIcA`7?G8hn}?aLJrkNj~>|t7H%L* zjnvBPg@_$`D-0A{Gq0xt1DBe((-`5)ROt!$#4`{LJ0NwcB_$mQ#SS*?qLTk zxE^(ypbW}KcMl_lQd=~C<9k>#>p}#jA7yy|Azt>@oCA&{Wpc~Irh4(QE5-zg;vzO~ zoHwZMkUHdwWd}6hO=9+&3Q14GUp*i7nfeUoS!@Me`PzAc%`rJ?6dtF;UsQFc* z`ZSfZBJI^my8lA|GMkB@!oWo-O1smAr*JNu*G_rz$iGl#|EVwzvUv3C3S~Ap3Q?AM zggS>@Be}+jSZm~Z+^(u;cu^_ZIZFKCs*=UHncwDqZ{2jl1=QZ!$REQ@m?*r|#5!Q| z@k_LX)UFGU|M1uj(kn{2v7^!mt(|$C$T=lp>ZE@BeLR=qj%Rt0$3aq+6};(I0fU%# z0jL+(x2ukY`vi2+<5URIgD;2$yPsIysYo*QWs)kI%(J4roinqIM*8_lQcKY&s{2e< z-1nWVcausTF->wXHz3;s2GX@Rr;|BdPU9PjBMT|5wYcmk)rn>i)t!UGsMlO{RcqK$ zEk0$8_uNHkn;adyz(mbQ$0rQ2qed#`Q`zRsU6-VSH0l%F6jl#JKC-Sh@hVW)l$U(+ zO{>6Vm+=g@RS8$#Hsq2M4}Gy_SDGdt;(YZs&&9$*78Ck>UInEw6ECDdf{CK?bUKLJ zN`rG{T-ir}J0xS6)P>zAX|F~!ZUv}r@)+NSq`GXwy)Z>&l463Y3I1}!J3gZY8tbL? zRLSKffC6}OvBpsWf5IyH*P~1Pvx`Cncfcp-uj*!ylW;1D*gJEUKAiViF);{F0gNTVW8;!$gfTf9>RC5-4Jjc*ui0LAqqrla(6EkkW{-R#dqT zwxC5On9{HP8D1Eb#QVhb@W#aWDSMg14xE8Y0nR?`FB0_f(0F9Z>{{ZdonFzmatz5u zbNWXgCjRI}GF>!W^EJ5i(etT^B=SUbt8gaAk+orYV2gkU173Hgo&)crZ=6-TaepJx3RlIN0f@l=bo^W^g zd>ng_DPC^H6`Gw#B8pc&Bu7lNwsc=c$V@pTkN2LHr&<%-U{$yzs$|~yC|PU#D`j+L zTvt?Jw`f6lgb7$jJeKePKJG(atT_WcJt7+R9r|tEfBmmd@1A-%VUF9Tp?G)_fzjcl zrI${ng$z?dnY*w8V!iorj$=u;bBaCAgJ|xn3`d+v9CO6}(@L(Y1c>SK!}ysev-^A4AJw`*4cDZXzjO31 z*sGYd_#exMP?v)gl&oiOUHPf1@UJ(x%W%plk4ul9XMH4`9q&09WU5W5;%fDmB|SPB znX(ot&+?(Gs9W1~Q=fJ!;n-=bx(yqO-*)(2uT-URHYm1nr|%2#u(w)@3Ph>lZ| zuigeXy_b_$lqZT$}`?1tb)dgU$4@2-%yv|o>_*-fLj z{g; z-}(JP&H13m$MfqbpCyShRy$u`Xzpg6CBpyp>;GrQ#xpB0 z6k`hI|8|Q0w|Iud{0l)#r>Aqmj(?Sf{oP>mpGAm!Rp8QY?MHtNm;Uw@|La{lT5sCi zpioSj!2j^v{`;HXFaVcYvETZu`SaiZz+<*&pz%wqS3m8)|EGT`m3-s`m-12)|4rKb z@7@)3di!2QTIRf{ZN$I-r+>L;FR1QS+)WO9_m`Lc$^`iD_dkPPyP@AdfJv|ZXCBW3 zk{kMw$}-RY`Q}R!Ugca_IqWz1XL3*sxKu9h!T+MqO6Dbk#NA!Lm6CtHQ{CXwhgvZ= z{@rc-+iTrwVC)?qH@lDj^PT$to)H1}CZG&}pL#{0h*tIzI*J!89s(D*DL{NF?9 zfB0*${Cu+F(8BKjJYHhKrSi&;{$5)7%N+Ud$*Kot;2HEVP57TDs{*+65%Yhp*8e?; z|KEQdbATBb*a}Yfe;XVB^-uruMLs?Pm)@cX{mV=L?lQpUJEa7ZwRK+H_MbU+FK&U! z8XEfIpNHVt`DC5PWbpknaqj^!n5;iuJox9EA9y}lFx<7R{8VE`XuJ+Ki>Rqv-8P{MY=El^PPHhzNdVa{~x#Z{~87V|1kYu4paMU zoJVfyw5NY;6(uNCAZLcA5PWNPyno_iXq%hD*#Rq@Wvhc4uK`m1!3Gj*ITWM7+_~<| zQIy=5T3-OV*!;X(rkNXj9?FHS58h=VCrIQr$pfqqI^R42M~2b^5at$H+>qzLgf03K2tMoF$2c#JSRO})j92c#Os9pe`N z#&csLS-7qa(D#0x21cXjheja8qaH*$*Yg%-W!-zcDKzh23_V$6saClpL&WI+${gmQ z=eap;LZmmx2Up9KP@(t!b#cvxvVOiZf^O{ld)ESKGT|3vB|gftg81)1a#sSX)fT!! zplsHsyy!*MaphN9Et=1-srvq^OTn z09(uH4u^^G0{Q&(RtI>!07yO04khRU=SxkF06C=__d(zcJPn{<=|IyhXfc_^_FB8) zHMQfkfy(-DeiVw`q&|+W8UWp_;fbe82Zhz|??~Rt(g0$<_qA zoUQ5yR3Jshjhds2G`Vpr2@upcYC$K`gPVq)(`p4!YqbJSw^>eL245T|%5piXT{`e+m#UQ9X2K>eUu*K!#K(mtK^Ps@ZwtDy?b@? z6(~S3*Q~Z_&N20|j)b9Mju0O@mc9*;lGdVNRvtYx8M!^f7YqQ?rGEGaj^dW^LON@9M3VsXA{Ada;9 zbXaL1^_l-bM)TM?N^6|yq>SlRAQ0N7dT<$;f`Y<4RHgao4<}?LJ-cb)Me;VIW80gc z^)bDb_h|ORA=`{&a+8Y%p~U<|On1e+Kc|TM`N_;?@uOkJdUs;62P_%hiYl3B-m~r> zw5#IhZvt+fa^p^s(V6y2u22>ky+|@rRd~rt!28w4iGl#*@WTo|;_mt5{0!geCbg@+ zjQ5(yY8)qY6$n+B|GHauXFRJ)>{+{#nEQ^YkjY*5GGdJ&9%p9XLh$+1g9<0-rQC57 zuZZhT%@4WW0=-}H9285RDz>F%`lE+L7{3^}JzK2jHJ0^_=5-G^!RjDicbU7IC|u(N zpl4GB658ZNru?Rj>#0{cLwYJC=q?RAbkDOZvgbD)7;d~53|iVtU}D_^PSup_hILLZ zzK-&e^<#1Dwmemm~H5Fe`D8 zwS5nlRGV#vPk+r~%?cGrf>9ple4l2?^mp(gIgQ5_WQfBl4PG^}UPhc}k2p9z`Bwsv zkW$FwGdDg=SyJLW$i05HwZwGH?lUn}Y^1--Seve`7FkLAfM(}2OS^o!@_gO<%W+%+0Qm?gN8C=CzwP{abJ&-W`&Jd zlr&W@UJ`$-DSl#CGCWRWJE{j(7qD45p4SxBomGKHH{>ei9PWw3S>`42PEC zS}Wgw@(PVh>aTlxKoq@svMBsR^W7%Kj8UNVt7=kh+s<(uqk3I2N?GkWtQT5#3`w@p zDmnDJi~zDMZQy?~{=NhDOT)2(?fM4(x{fM>NKhCzo2!eU3mi1D;L;h%6JKw>8Vh7p zok+6+IyezyTGohjpwGk?X+OwDrN9`dG8ZimnBAx^4aB$Pk%(ZP`F1kuClWQZfErw& zlL)c#Ojbk*hXQzN5&Z7U)p?Gp_Rdv+ATu?>zNcW`c!QY3jdQC!5LtY#2N%8uMo1+F zj=6{wK`F$2>yByP$1GTm43ym20NsC}<+MUB7K+c!g?D*=T#-}KG&e9Dfo^>ZQIyzx z)GjkgUAE(O1eUVu)SvkT5U}8*N)es&UTc56Ky}45gLkAJvuVm9PpsOrMeiB=`p@R) zUz*#SV3(^%{UhOwlA+O+Hf%o0H-QNK5M;#Hc_ihIyM41^NrA~SW0Vi19k-ysE?Lb z2@v__OxL%5`#At4$Fve+R3GCh`E112kk!*zWQsu45G9kPeHHz=xmqdi%Kf+K4kQ(V zcjH+{7-iqFJc-j9%m95K8Afn8Ckh*^J)gcIm)1H3haO4h*CCExtZ5>d#m#FRz2b=3 z5d*q#&xzER=8N9LVFvw{-O<124nX)YSi?Xr(hq799tf;H3G~p9L^0JVUAsHl_Fro7 z-6qzxLH?c{j?kIvHG^9NnY>*j4GCOUi2aHBq1M**_4_nXp}=fY-!F2b?j;hmQtSK9sK`;h74nm@f~G=Q=oKUT<-lKA7U^BjIUUCCwfG+A$V` zFH_7d1QkvNq3Nfyd$O^(*v-#(m(G+k0I-km&mP~zip|Fipi85h&yJE%q{EBbGwB^Si`w?L zK1`5f*HaweTT|UJhQIoqI=yJgG@!!!F1rby7?_{t7Dl7VYA`2bFz0miAwBAP&=bfy zzpFb)!^TQTdWWVbJ1s3(^>AK#PvE3Jg&f>$%GO3c>`hHm6P{%utNtYY_{WwHyZMF7Z^J;i zN0SgPpO_Y$b?L#!SohnMKki--cc-FIkWqNReg~u4dNQHjCt8c-8{reLhHtcH*w$-7 zboO0Ew3`bLj_kWK1BOuYXx13^LrMnUO0k4)moyw?0%n%D{+&=iXTu#xLv;ymu66>!J`^}Us)8Q4CN z4*Z~W?jSoVQVUGD-MY%1?<71$Qo$cm0<3%EHR71XzaGrGtMLp39ExWpavH2VTUu}b zk#T6n9q?h#Ls*?^o6UxWBvV5Lu>6U;nB~!>i+X8cns&?BY|yg?>x0rt)`nKAyOO6{ z58LTj=KS5c#+r`lzaV}QKf3XuEtp&)hrw^-wR`XC5P1Qpx^n$oBNXJBbM$V#pYF3v zjgCF1f3p3cno$Lra^}=in?};`BJZ5DiVLx#RfQ;vRLa-|M`be%Iej& z1N*(=C#%_G8glf7=QfyXOr&T^p$Np~AQH}T?%ws`-iAv1ZdCddS@p!eo&v|5VwcwD z#wjl#UMZ}!WYj)qUKwXNIn6@!i1vYoZ6yb@(~8HbnLx&Bmw7>;o#lIowjb|`4A5^4 zq4rG@au~${F{rBEIT5eOa9^Yv5Bh{?g1i101qv(L=TF`MKtF7oc4renp}Nqf56urp zLbrx<)rx<&V}!cUEm&@yQbV1?y_^}Kzt-kyZTh(gI^0#Uo1CmRxSw}J6>z!R@}$zS z{g7~;Cix0ELtaMU8DTXEZUY==bQ$`me1V5x%*eIbc@82SLZ+LmNN=&;DGr}7^(z7l zcDZx^$cfNuUdze4<5;$w5CpW+J+8w5Goh}~+#NrLei4-Ne%s}T{=2Vj9SFMl1XN|1 zk~Zo=n5U{!&fu3njlu(f0AK{Dj7CYG*?Ed@|9B2*yGS(IZeZ|d=Dq8kVZH%Q4Y1H6 z6vacY1#H#^7H5cKKXan75<@XhC;aXNS z>Ubus!Pi^?9Gto@#p$4kCGzLv!{?bv8}H_!Se3(@SRY=A)>BXcQ3tJB6Xw6X!`3++ znNZfEbB_loK7p>SZClKYkaUkUo-RC=(RueNea_2Nbz^REpS{VpmGOR;#I$| zGT$pd&IFQkDl^*6)FlA@GK#{QSnLA;BppU+uEL3OfpXok@dQsuFNk#?n=3dvF}be) z%K*27(3yJ~PNN3GMOV>;go0j$T<&GIX#z3_zWEFK!v~tR{$o@Ei-idc(Pa}EWiTCa zVDeVxIDkQE1GIAb`gH*&PxE%2Twd5x@D-1n9Wro34UPnA$E6+gXSXl9g(-JE9Wu|CdGJl>)yL@ zV2OP`1PWiD1>gLB8A5OKZcTrgcgoXgvO)t;l>2HHKs9n$mLMc=HoQX23rXR3$C8kB zL$}Ph5STUZjl6Ehyz!KZ-T~|eR#sY7k+mOUhdl?IqBXBh$*UO;Z{B)W{giwyUQ2c8 z$^)Y0{l|D-9nSV6D-i1CaowK)9()NU3n#cBObH`GdPGgK-cP3=CqP$KxR|zx&qI38Fx$mKv^~Z zv^7+@RmK!r(D>_?n_6aIJ}(emm+>nOEFts?S^|jfJnOlQ0L@Y(*N2xF;tumt9g3*9 zEPDG*Ko=pv`qBWraMx&P2e3FagjB#DjtPJm*@q0#l#baZ#Lu>e^6Cg>iC3-+k!h_Pgtp!7L00a`e>0DF3XcUt6d9+Z-zGOoF$7E zB0ud1{9)go>aJpdwYJ) zfzU9P@Z^M(nLD>$p1#Kt;Bq0T7R`{mZqaDb%=pBZ7$?W7{RlcC`-P8xU@f1&y{I}R zJboIEEcyL3-1)Rjp+%3u|5c92W{uXv=0|(k+s2as?o2db=hPX_<^ZDax{#l@WLLnQ z2%MGhYL6V*O3R9}BW-xw+1wDdl{R^Pcq+R2(87(#-_~)WpJO%7($X)(c-mGV{jAA9 zP{H8xLAe>e^aQXNC<%`5NK0S5hy6D7i>>?<%7<9KgEz8QLUwKzey~v%W_{-mDLt+0 zQ5~viVIt=_2VIzTw<>5s6EcB+bGrDQ1g{oYuXyJ;6v8T1JY8d->o9sXza&1u+0Qze zH8IMCtCDe#+nUAN?mZa_^>!t@^O&{F(R3ac9A_>3zeXx@ zUVhUV7w_eaR^*LRFVycerwWTBreJRDV7(iBK_n)YmdZul2K?4ov6Tg{cl8Dlv_f|< zH-&6WS^Op3^>Ztx-Q}h*8zhbhGw!o%ar3sZrG${H_Do%Q zVzC{EU~jm8{WPoCLCRLY9q_K*WUsS#PnzNq*SZ#GK@(9w*QTE-=JWREhnMsU>~D4u zmQ!^Dv?*UPS7l;S2k|d5WFu1SWqN#>A{a|5km4+Mbl0Bj30r-4_e7OGk-pS8UzUwY zX1zuv7CkuszU^kOwYT}qy8`Yz?SOF4fu?(p@ql|eJJmH<=i!)G3qinJB&>2s&`@>h zP9eXOcj}X(IBX>Yp_&mh!SL$9%vBOk$wYM+z^6Pj@tn$at2c;x%W|SHVM<4FBK<6& zUl`9X+a>55TzY#Uwm)68$O?BoRdQqa8tU}`@>M(J+6OMUd+hgJVv5cXTC<$*GKrar z&{-5f_3ZUs3l0JFyFy|2!*137qwLjSKJ9Bn%3hp)-D6aXl=};r(I!%kP}1!Yu8tlRx|nwpbrr;@Sucoit;x~UCogTaUFXgb_quz{&Ehlju z$uqf9zoW0;;ikRRE%+*(;O$icHpMT-(0v4#L*tn=yVD#(YJ&2|{W07WyU^H4`?8Wz zrI`2Bt_Z<_`%9D(N8khyb-N@Kk{DmVBw$A} z>U-q5Ij7qEpj-xBkLnR|(X4bDAU6Dt0*u>?7J3H%MF47VN3jwm1gQ$%j&YHjW0!ix z@Qwu#jhxhP%6{g5`Mv!$)rep<-N6XjD+#dFdyCu*t}P&8OP;OJ<}m;)>&H$^<4fdMTI#Yw-KOwKn8>IzKVC? zOY;jIjNhAV9+m=YDESSzZ*am3NCW0>s!$wOi%i1FCp2*f$+r6q1z4YDEn)h}@+3Iio0pC4#4`+@aFxPhjPU%8TuQ}EQUcA7p`3rvOZ~oFUeFo z&lk!VoKN`X3id=CI`?tdk{n}tflXhEWmx6Q4C~~_1Zht!6RCQYcRi(}b!*qh<~-|k z-AeTnKloowzE&>JAs<{t#6V-4PDzrV=&ll+^Ie$Dzc_QWjoZ%W<`uEpC$5F9%2@WTI@?iR-dXVTzS^bpp>oZ$N?j-9r+ydLKuF z?QTHr*GEG|9`rn!`%>WTOX$YLc2Tl}x6|n}0~y1LR8LFlry>iV?&y~yJG~@V)aF~7 ze>OAxU`OJj#h+gZ#bif({gX&YGg0xf(q2$Az4uzoMUdfKPYC!E%Hq3lqp!V67TsX5D&FGj5hRaybZUUYcgoj zP|D#I8uI=jLEdv>TSe+7Zr~R)5vf%X$rgQjy*=k;8UKq8;s3+lTL)$NwQIvlNhvLz z(j7`SB1m^g#|=m~+#n#0NOzZXcQ?`v(k0T}9e#`7e&6}N{mtzC?0NrvpP74{aTvY1 z)^)9Qo#$~L!48?%)r?zl`}Jjsmu#j-;H%#eYR^YW1Drv{SR?u^Axue8vc#OmxZQESc)=&SR-on^vf7l_N z%Jgw8O~p}{Sp`O-a0?=+y9J*v2sp|%K?}VB?Px@%rBmqP!GrtFBaA5O8Y%oSxC09l zp&FeN&Y$XSqVpq4kw$>ZyupItkDpdb}1Ix{P2bh9@%k@43eU723&&Jdu1R(X2+&Mpbp4P|D>G{omB z`Q_Pn`_iV;q=sI$t?4J%%R{?gM$y>33m%I6N43SY@t3K)f-{QKt8uDhn;K!Kce*M# zgG%UIZsDsBXf~es^JrIo7bH$c;rVqX?bPZdgB+$K6GxH#o9vrIiJ4u#o@*C8WAat2 zO8|QKY*fr{{>zuT?EIWbCK$5Q9e@J8oXh)@HN9SEIEReJ#zGttxX<-^Wou_L4=;z1 zA8GZ(ZB#&~n&tq|SY+b*_sy z=3iiBbspklx2WnY-Z3d(5kf1xmFiO3Q1@H>r5Jqjm{+?Ewo_+41cL)hhT#eN&Qj>M>)4KKdjBm4`et;HeT+;tyz#BD6@eQK6UN#E51(n=Sw&38r z;`qI9Nat82gRCZ1?4*l)pfi8PKS!S%MHgpq{q+dn*zh`6f6?p7lm@_A2`=SY&qL6R zPQMDAU*&(+7U~#A(WJ-~@;yOL!=D*%A10l;oSBcVv0Gb^wY#nz++ocsggwIPBZ&lV zJYY>yksWV7vy!HK=!syegGyDJSVY+<#nn9LOptJm*X zg@ftDXH_UfwQv6tB{7^$T5k|gcX*LP__{5 zOl{|Am8{&m1G#D4^XT7@5aYW7YrgVOM2;GZ74ui{p>sdX%Ji1(^ zBGI^3u7X!!{(#*=uN5XDMMPni3Vi~K@0ESwOpU0!@!EX342>%HjV%Y-yEu5ZoE(jj zoVrT?+9oKIbhkZV3|9vl-lwEpHdgSfjFO{3qLYg&abXSHSW9enj%^UFOtJiUaTs(B zIak?ci#7LQQ>AF0#d)ZSQ1Iy}RWzH8!!W^xYCaSFb_$@gB=inE%Fd!1saOk{r8xzS zx$@xrXRGPt#ctq@SMC4EEb^Ln+iCX~r;q9d5H#aeE_H zCxjAl-()C(fXl2s<1{zfnyuFtn_Ed?Bd@qp3LV==ZYN(6ErCZ)Hk0q~M_*XKG!OW6-kYcZXh6s888NF&%A$fo63ooyidVn<*P2;F;z z+)d?zwpW^t4%t7oH(3;4uKS;kQ{U&Zrj?6Q3w7K7x}t%bKHKUQ9nCdA3Ar4&`V)e$ zeCk)}_w}XJ+w6?1Y973kwsO#hOSSJ3B3;q&Ph@|EiDY5D%EhS{{{8(ZKxRUtT^>I` z`mLi_tfgDYpo&iWGh!X_1yoEkIs+N9fNZgw}(rCOh8BMc@NtrHx zCgsH%-&{-+l-l#G^YroI#pjKRzl#A%nV(xmC$01nY?2NFGtsQX*rh}$Jh$xD41;NC z)%Ffk+7^hD-;e7>t*8ykvVZN!C+0HvnWjG!~2eCU4up(LjK?A>s%5OWTF%Vk;w&wsQc#F+`e)PKo7N)HR zplqy%c$}8?rq%m1$X7bw`6r$!elW8ejSsFp-54EzNLb|g-PdZVP|aP}Vo-N7{FMj| zY2`FSQd22yuKM#EjtlG&Bq>{p3Iv;Ni&d_;E8)hY-TZlmg?Jf3Fv`+qAuzr=)S1Fj z*FMy#eFs;~mvN6~Xa*kY4G-r@*_vwa31VnviEhh>@FOGpl3ZEBy%jR*-l1r=jxghc z*qL3f)J(>Cxji2bPX{lFaa~P4>b-(vfR_lqY29J1_1DI;=g3xRd#4!tW}(LkQ}P3u zDm>#rL(W;geF58tkV~_h{Zj1O2n+5C&yPGNLl!;Zd>hr zRI@s6orv^eZ;%1!?SVQhL@N1<)YW0Ovf-cK!hh;S0c0jGT?y|F!#>x1YOma!cVF$= z#Ch9mJKyW?gIX0*Hwk0$5~V-G9I~%pd1GxA_BkPN+uPFRPC#L~7lJQCOJ?5yyJv#1 z(dEcfcNU+HKk_Q`i(IxLKC&sI+oExcSNY#lxao$|X4Wej-bl`0Ip?BL8wNU`SP|G< zMvtavtjz#h~!^)r52z9#w=zITg-efuQ>#mn`UZVh#KwtOW}+K6owB+X8Zy%VfMy ze73BsOBfMUFYV~73gbevi1gx{ziMI(tRsUrK3d<K(x^MWAJ;!)2o$2&l79;FOx{Z})x(xw0C&NkBkHRV{jtTx4KqG=3=L0N$c6ZFE< zIp(}UqnM8YB#^;O40!vNIG2O6u7`L|<*5b?c*s)!Ji~^L5(&*<`JVkf|0fCWNPCg> zZ&@w1r84p0G;o@2<-MFU6H#)GDm3INv^QR;mxJj?%r5*IyCDhB5i^J*&$~#g4oSP$ zmA`0I8pd@LNK7MDu~xYgV|Y3WP;>14*W^^b4b(xi8n5peY0SY^webFfM;vDO>Bc~r z6ML|nS;AB{K;eT&m8m?}>uWWI1SVq>VrO6|c5mY=aJ%>ves&rKb0Ay?zuzUq#TgD6FMYL zTPbJqD-}wWn|Ll`YLM-6m3ehu8J#=g2U$UgkTou*QJP}+~MV6%s?fpi?s8pzhUsE9qh zmR^kXO)<6dmk*j{dYX89oR+h*&1D_tn=*ZbMi}e&r=ux63(5AG&@;Yf#J8s;lGJEt z3|vzl1=Z@ye5GgKw8Dw@UWB%sw8Wq4)rDkO;e`O|uGrvB8k;7WO$z^w;!0&U1#TOx zF}EjOU!w5lOY^dW9~3OPeUXY&>o*#DwU+*Ae=6GM4z{i_A?{0!%1f`hX5yZbFD-a< zUsTa`c?YU8N~nfe;l@)&(0e=L{7`BLX+%zXYA9mOTglS?*S1z|;no1%<>KG4OzTr-D%o^7ni)d|5(())L9jQUQ|; z-v2}N#Yd6yeaDaC5I;u0_I)t1I^C`7r2PCq`s-u-g}+W63`^VDSm~SaMI7@P7WuJO z*5D@4&mYU-vFeu>Ow_PeAG9Bx`K4VAy|Uc*bV>YX$g|B=ziOp_sTOGiwi0^>oz|CYeK@Zu%i&^(R<~2JtLC^B{(!&;uC%!+uTm-*yt#F2kKVd~_1L)+iMCE7y_ zI2F_kon~FTujyzR>BFd$8Q;ibkMa$CV^^#wu5PDpJZieXWYj!$P2*8hzBwpX7**xW zwYQFe8v6xi95s~42cT8omH#C25O7;ugVIZ7t{oO?^f(gCXe%%2e}(CHo7Eg8MD!;k z^0C9(EDDUev8Ra&RAC(SEHM7=^tt>GJwbM_ePbF#X)L()Tj;URHhU16U(67#IDOnk z(^N`|uh%x=Z@y_|3C>199=mAHc9K~jg&L=P=1bBS>z=n=uk|t(NLVs?BKf-U+g8n4 z^}<-Esnq2>yMPh!he3_@QNj)$%fFT0&I8izmG6Z{3#brSIA2!I_A{HVriC@nK^bT# z)~nz{&@f&wEHA{T2SaX(b)E4ktPYY!`YrTdt7YVO;2i=}rYpFe{9IPLxRcTj2MguW z*nxa59dX*`-J`~$A6!CJEeNzGC(Y^Z)=(#4fkmxCJ?p@($S5(M4sbX>_rO)zMmn9e zooRLK(dab-P{dVKro1i$Q_fr#ikn%%e7;_G)H%zaOzZJdM$kv#DL>VP)=M$cuu_fW z)(WK+i9{b4mpk$IV1L`Z5@XgE9ZD%+e72y4kL*fI`1j#xW;mx)xWeti+&i8rUAmfh z(ts=pE~@zj<^>fjiu|f@;ep3`B$5eH2nCzJ2!)YE40KAhq9KyQV3)_Ua?Sk0xV48lx?rieJ_=(d$^sMiQsThOA?+4*n}E}B{$Z?YIe z5?TE>ZZGzL?QAXa2T&aFc+T{CP)39Z=LRbl=rc=4JE6)Ik2;a5vX#CR1bCdyfEpiW zt!Y4T$$pVhGfXY&NWQ4A(D}7#VB{uJM?4qQUlHtTvwo-h+9@FPd&a=k2u&RHgyo(e zwmVedxR`zaV-K0 zyb$bYOwP7lIgsqCav96WG+BNHB{{57%6Y|>YDYU>JoYv@81m~c;8-QH?j2K{NGI?$ z0Z-xGcG$B|g(jWK%Yze5YMZ$+_1OIFWZn|0gVytOfcOX_J{-#iDiP5|UVZdrh&yPS3#h3b%|BT9xJG_% zbsSo|7IJfDdPo0-UDc1kjPLKprwSs7S3g0AY$E&A%r&ZqKf+7v$$_BVRwAZA8CR)} zA53U#$nO>C0M%u_Vh#65a9AFIs`(Z(ty6C+nqQlkx-X9b(t-I=9&YHBK+jutkrR+s zD*NMC%-!k%tWms*a)qHg)!SL=niS(3Kwp=&L8cw*@&vGEbSmb}i41mTeH$*bw(;_V z=gTga=RH~q>@-rRcRK`_RJ<&OkIgFjkiz|bKhrFWjS3yOY^8DfI6IXO4xLS>;}X5S zJalk$aXDwg6kWn0M&x@H9;BLeZ_7{I<2DkUeg+vJFF!O$MydXN^!NqxZ(_cid+bG= z^>wvle>hfC+)sNp2TSole)Gv$<5Op=p~tj-dd; z-DjtLACY3Jz|g#+td9K@*taGmRwA65eyOvVE;h(L?K<+_LPXeSHLs#eg*F_xlE>QB zfAIwT#JpaOrrhsCs(?{60BQW9*mJ(afs-;3!P;#+oWzynbRYfDWSm+4gTA*Yg@F>? zM;JR;u}Hf#e~Um@TX>T|?xJz=i!&d-Xpv;?&4F{}nY{nJy_0%Ko+6rf%vidD5l1}#htuBc+$!4GGZaU>(LfRc(!o&>ew%XaiwC+FJ4y!T1QEYUmmOSJ$UB5zz~apmWl4lDr-?g> zs4z;CBX*8VmlAV4J8iP$xsD@O+Ta@|KJK$GK83lv7d04c)}}D)w!8?U8=jyK!vZzG!Cop$f#c56QphfH8xA zJyfji`j7NfmbgVUl@QJ#c+%eKmYD^mi+1aZQ9qf`>5T3?CY{LC1|w*aY$k+-Hibnt zwK&a%pEy%%BJJ)`^DzA~05f7hKnQz{o}4GBLXK||CW?U^PVZ{`ho^%BW=%gaijHam z6lhcFd4`&gpWwZ8!Ys1wahc*hI9;i`#vr2tS8s748?2?hmFL_oe;-#(+@i^l*~|DY zt4Wf$)d{k@^oIim0`6_V)J-?pXZM@oU!b|FHjqG3#tz}nXqUnc;|}Ka9f&x?_U*Vn zLA&|jT)ic9w}0mL>8ty2t{$ZX6wP=|hd8&zsAr%3ckN`@?4E7>goaRJbcB29%4G`A zSqM1|gh$w|6i0{#e&>HElk+Qs#EcByck=s}`vjeiE23g`3HWoqEX zJCb&ta*|krKId+|h$WnQ9Q%iMh*Z2lAB;5|9TJ^McKE5FGEtdF^V*`WGTjZ;$pvS? z3LlxzJRI^YGP9Jt-OmS5ZH<;oUni)zYOr9q%P@l`U(@NRrGhn2Ho3=r^qMXX@J-|| zeksda9F4;ceW|Bqh!5VNmL(`~yRe>mz0tIpB}|A($8WpH;;*=vQ>c-~4oOPlb!sel zm0BH)!*Ws>5b*<~ym4$IyEMzVb-yyZ=hd($ivjls`+6kloh-54`(=ecH<=o7sAHJ{ z&sJOSKDZ&km7)tOr!{~r(($>FJRhLT=wlkbXow@9s_T{i49)Q#+As`tHJ5lAJVw*S z{b&`MRAhXkGG8;JYg6A*<=*KR=uraQaIYn!kINC}(Ug40SKcKqE$_iYN(EPWFyAeq zUCtGc-ItrCQM+Z3IB>>%CedKMQlP>}nv-?iimTeqo*Re9Zk*8Y4$J?b>WYvZZJQZ2w6;mmW#-;!C0*J521uGnah~c=$$(p>yB2!!BL$;WWj^1r7U-RTLzlJ)9B6^HIt-2#vdO#^6|cBvfIn>1KP~ zuaOp7&aqp^728N;YB&>qW1R!KHntDE+yOzt3iWU~fR}LkI8-ep-)%p}qRe_Fos(6h z(R0;gwQ!kc=bOUZH(;RMUAaF)meflgNQCLSI)V37PRxKqdTj!X{utihiQWG8MRU^l zBWEH&cqFM@J}QJ?FwDYVH%g~DM-`f>=$%s9RVK=vK-P%7j_m3plX9KUwjTrC5J10Z z;3z0ml;Jg4Y` ziPVXanC50)K*09rc@=|X=Ci>*wUT+aOgHV!gmgpAX@i-k&T89c=pv-rEDtxpD$oMM z{OSc$7wuuT;SJ!C0@rfKA!SC_>7to-l)x=#bYqQqxl~>}kui;$a&9iBhuZ z%?>5xD+rCOTh)stEpEL4WK>Kway@D!(O|&xUGjStc;c&<@B?@;e+;}QF)S=lTyx{- zEh^zqPMcF;e8c-JL}2l)%`%L@mb^?m-Z_8cmS2Mqn8+1dx4hfb^t;X5X>dQjyQMf4u_Cv?dl^xr^cFXGPws{({Xg3jUH9?i%(Z+3w&!vYeRiwO^cv#S}jd3VP`#ZiAuhW*j5djo`vyx9vL-h zwmlGXH-N%Gq{g$Ql733v>D1**uw#6{mZ-NpU{B9eO%)4W&&d*!q^3T4PKPE&oS5x6 z&k*`DN+1Gm{Lo<+k3N%0zNGW33A>7BngfGhR6Rc)#OfMB7msDVWxxJpI zDfL0@2IVLD9(7%mbx1^V10Bv2ovhx;AsM{CB-PoFSvJU})eAS=zNtjS44BCm&TMQ} zEzrIJ43s&}wk0`7IZq;#5)C=d-bxjsrU3|)kvON4Ur0z_T+_5x0Q3@b7=GnmkM}%w z20rB3`C4_}TOU_HNgI;A=pgmmm9*Z%|EI>Wih<#?wcY9$Nz0&&)lX=HPs=G-bjzQ@J21Od`TOYm>q%R2d8cWuMTN)?=agk9gM76V_7;JI zGo-%gzNcY=f@q;MkM&YKQa=`Dm3l(p%5faFf#DRJK@8QP)!fg%noc>b@(tk>(cn9a z>9K(x4Q{#xIaF=(*}5U+14K|eD#llH_XyVjXL0DJUZBgfW=jkuBDIMpTXicFO7O3o z{ds?$al4&MI^Cc2LLqA{up)avx;-qYUJ;TCh8z>z+#M+PI4aEl{-j3frmx&GmPo{ z1ym}9fHjuPdps&%k+O4kh(mG`?2)HbAp@cpEc^l3L%TV`r$yfk8+lLB-DgvTP2G6$ zp(T|xcq2zTeLR!u=#04=C!`+{sc(IRizZgVnzV;OrA+e3wC)wY^F8Qim4MkmY+i2l za@$|wi_^hD8C^WfSY{8$Md-H0ilZ{IO)yp)&zP~$J0bqKas4l_$4u=m(6Nl?kutsp zEjo&x)G+<5D{r;(ES^C^71uQoR(9xBOGVDkiF}&;u4dy_svU2ol`uk1ePW(frYZh7 zoRPYlj9lWHv*_wjfe!ExV)Wbx5E@3RS;FgiJ%sKqHWbltmjVjV)o>v^B?$_xxpD;Dw?M+#1heiTzrk#E^+-GbU-#9S2NQVAAd#kn|R4K#~ zD=?uUr4oN?&daaN%>VHNe^vfpkekbPmSaed{AB@1Lo?dC<;`>5_XIt#I7XRc3;;8S zldAKK^eCb;>h^*#pY;MED%}wOF6R%z6>SP*X+tjWtBuIekKISvAHxfc!kH{iZyu^v zw298X>(aKp-2MyFI0fgoZ%roS`*IX>RO0SxCXeQ`mahPAKqzLVWlf)!U!+zt;c=KC zO0?CnHq9c^@we#G>_a8=n7EW~v*Ln}WxUmK{`+aRfea98)fAUn>%V*AIdQ6l$jS%V zHQgb%?PFcI6nGIyXm8>JeyMYo3%;9wZWSp0$IOE zJML~mk-lkTlR)Ly#fE4_2&Z&MvL5X=uCA*hN<@HL{EN}1s!>5{FcDobd3;;c{vKgQ z3mhN5hWM}(aZC%YtLhnJ;@5&axGbgc#L|wU5Fo|&-zo`xrL{AmRNfkx7+wvGsW-8C zxN9Npjm2MXyy#IcS6tVI>&wGEeQ%~veFAE z-_j!3yliz#YVxsk4k0^>`p*!`a1y~+l*W2AHIr;q$V@}>@Hf7iiWBn^5$A$fwd7;| zW2w4+oLSJ40u4en4lJ%~j_~L2dta~W4!99szUF8VQe=tgK^**y<;RRyt^XV42fW5X zSJS|<5vb+^TkEzUjr$9AZJkyAes_CuTDu7i-&Ru}*Q37+bRiL4DKkoVcma0iOd+5F zDW-lz=he0E@20N409x2{cY_)d{pldW1Q9Y1kgM%lOy<~fvzZ)HcXYL4GfvK&eu^zw zt9|uYeaE5sc)4H$Y!Y5~;{XX^8E?vtb`_eGjbC2e&iSOg`qQYv2ABV*O;bsx3tH!! z{dtR`q=qH7vT0z&{AW46{#C)-49}xY^z=s0{Hs&YCGo zjKN2I&5lC%tR_Up49V{-!d%M8V@Tm*I9F2>g5#4^G2vTSte|tc0g8-2n68~WZC@ZA z2_l+c>=KABUakCX$6EefpRR~|Y5Ha((C|h_AkZXFPN!wi?(V%W!b?S#=Hn=r#e=;* z5NSEY_pvOyP$S^@ApI}DblPqcb2^8yWUj`CG$;kdx1#o+b_E)x%3CG}Icc*QMen~w z<=O-}{E*ttqu8g(oe=TCBw6`pzY)e2O!DPLW)PTk4~DQ3L!+o}2(#M80jJ?1jLs^z zx5EV&nNl4uX@`pmS)8_%A^0&0+x8DMFrywQN`~-I9!h}c$>f=J*NH#M7CimXSgxb% z0JK#R;g*Xvs+)`Gi0J&rxU){nZAzj8>xhfbDX0YHLq>fYI{u3_nlGkSg<4l?4rQ|k z`Egox*6$`o4NxnZsBV}}IUWbQF9syd>eE3&vDxh<$RF^kX4`b>A*=d4*<&Xw;uyR+ ztOY$HKCjxHZV^v)ocM_Bp?pqiaQ{t^I%?%k`SteY@OXOnb@lsinC5%aq~}-qC2UFF z-rr;oc@odJHtIQ)yX|@T58goyIbc~Ln zJ9J#_;(WJ-5qs%3RjbH~eu=jlUX_Y&hr<3gJG$n6$mTx&fV2I&K(&Z-x&HYiQ^VS) zHqPI7(;|Twax@@Pu)%z`JYj-!|7NEa_d@awFVF(ZCdZgITP?h~YcCB^`+(o&u*-|P z>LR{^hlKH2>h?W7$BPT0xj#;FtDHF2wroT%3Z*XzFxsTh((nye~*;*-Psd2kp;P+XQ0NdbpcHnoEb^TK%-h-L$En6E zbD>KyByP)DbUlDnWafLjRLqV0lV-9xeCDu}#pth`ywX^yjWy|kIH)rM8i0F#%%|f-uowJpyNA#S(C(JDhr>3(r-yG%@9-L*rk^vC; z2qTGQ%~cr%kc!MCG8vU*+@6Y6pss4I^$a&t%OkKGEmE9JlTH=MJOjKbmK zhtdZ_WOx+7NmDkujvSd|$n)`HlNBy(GEZKP$|9fNY^&Yd7iTjcMM?pYf>(zA^IA`6 z&m=2t*&h7&FNvO$SqgshmnQRiJ`q@JSctOGRO(~9@j}FDtn8ile$FB5>ZSX68Km@E z<)J=#>4tM<#IrBF1#j{DH+f(2w&fqjui?;EEe5GK{Py|mcv%0#KacXVNI5KL|Kueh z=Qb^u3)Q|C1e@^3ih058$IgPY?x zi{<-QQ`!B@Z?uVghjN?`w5&L*WPa&|3~D^%v`wcx)A+!_W^MH=zu@X+ovaHW<6(S1 zVW5b(k!A3ACy88(AmacI^lE7*wU4KSKmWHEfJ)SAi$>uu@-$!3(wM8t^o=kKnYF5W z8vr2Rwptvt5b7tubVaL70PEZu8Hnpdv8zc5KZ+50wzB*~U>M~L=T3ulfXuwbU=)I} z2*y6A_2N1{^csIwE{*@)hz825clD%JQASomHm1pF((hl3`jJz%nT1%*FG5hi+TMfI ztG+j;J)T^5gnZzbAw52O()OTxNd9BW^d+B6a&eJ5c~NRpL||1x}>Av%4Jxxw?4`u%GbD zCB4-|%Z+zWRdgrvHC_aWNeb654Ai%+HzqB^NfW2llaHA|zMR1iU5$mKT4yM0{_}D> z0?FXZ@x9~rSpOnO0srld0Ua4#e;iUySbp9?!#I|S=zw)-Lp#^eBb&Z+tnEs&nn;I* zU@M#SmYrlrZEknuIZ&<56iL;Rn-TlkqmC^3Jty3}`LNCRS;fcnaYCBGJX6ZoZ&`&F zjU*8uZ((>_07vNwEiuGTbNO-c7Uw258%N+) zifajbZ;R3b?e-1uc?xltvwc-cDg~i?CZ(o^-FpU3BayUoN4L#HDrzae77~1dnNexk z0MnQiE%#QPfgtQs1tmAb+57&u(vvI|WI)xBC59Rg)ps%Vwn~(*q}m$*{yg;d)8UQ% z!hu7-QDW&EOOjg;pcDCa*jsWgy1yzACP{KkEsMQ8Swcz@m|!&TMl`VI`5_G)RF*5# z@02QxsL%0BXfSUBIkBB1*qN6eqgv!>oyqr8I6=WPLDGf!;mffS3lP9A64#T&(IAWd zMF^(&k__pz3FZmFXnma)q*l*-bVBP2_@+qCPOt#s97&V!aCzjVcFy%vM3es?lO zJ$B6o#Twiz_)T?*&O7tP`al^tHmx?T@bvLURM=NMC+9oLe_D%_sDpWd5Ie>vGdogCbtznBwgXNAXT`jkE2JOVeF39Th*64fEEH{WkmUuJGfN7SV{Z zco19MovlZ=a-9y#Y9q;_R1J5#5072EUp;QGj#}-! zvvld((I)ok7OGiB;|Z#u#I(Vv6-lrP|A zSk70^p>nsMwZr;unK&>>DHkv#uA(BW{{DK0+5>OkDgP^dF75_Cj+MLB39B$671_Ui=uipCqbDHWBj{({qa^*n+D{H0itP zR7xaepWG5yvTzx#LeA6%R5VUo_bbY0{wS?wk_u1kSVleOw*W-FvCk}i`}X;lg=1zx zkn$ml5&(f9o*M0`ojpe9lFewvhEq&N_%enf%dvSlm0>|9ZMhUyuw19wryjz z+}w>9c2Kv{YLXKFedXDVg_wa@MM!z7WZO2KMmwyJIbJy%Gp5O!2O^x^A zh94K>BuvINcLDo+t!=#Lrv#B*y(#KfwwXq!z4Pn*`n({Ylc7c@y+rdcp~rge9HmC5 ztzG^C6~`9Zhj~`xKGsnSyk5Y9xLj?OxHhxMaDTX%QU%^Jc=)fD+Y)qvn}wkwBRX$= zdRiT)4^i%kNfv&U0StY%0dtJ$df<9BAmZd#+o}|mENh2%pb{M-?;e}qWmz@ z%LRs8%xN@?tr!WMD5w1y)c}<*BD85H-?shj2}R2;(g=E;{ z7GTOI7hYzxhJc43M*3GF#b-*F_Et-wwsW>CcIywHH|_UJ!Jj;cY&w8JV=@$ehb_lE ztqId>a31bl-`}#us@8M=3vmeq*rpTuqVjbkx1IrfcQG2JuM)Ec_okL-99t1Bt!N@A zobv-gLLvN-HRX)!m@{&^NHzj$b`=w1IFoI zMYEb>6!*FGuIFhmhs@BuValF=G!EWS8Jr_NJFwKqNq5h8fb-6?XIF0m9#ArCGo{sfar0PL_H$UYo!0-Npofz%tn785X?{9L9ZQ z#5rs>D)ku->z=>7Ket;ZSU`t#3L^BEbvd<3h!*(vJEBw>eft`_D)F-YdUi2gN)gg>Q=(zS52(}6-J zIcuP)i8jul;8plCD-sDZJEc;BgUeNO6V@om{?L4h$2#lReKc2<%)65frZU$rSDEaO zTDn@Ge87p0o%nT;R^|;*mw|%-hcJoyIepwDz@#L*j#Iy5lDB>MVlk#5< ziwKQlYk9!@R90k!4SJh~ro{MW1qgjg^N^Brm%p4bS@uxLg!-Y+dMej-Lw*s0iRYAV zLB&oF>k@LV4w6(v8<VhSZ_Yy8?HeV==J+AyswMk*S1WbbW+)BKcAi5-Z zhga}s9cy$Am!^~O7K8zh>HdEA3uyf!s*6jsyjUlv0SDj(6uV8~lG9Q{=uJ+g)w8S` z{Z~f(>PnF>#rf7zfVM0q3gbQXf&UB#k=b;aUfGlTQy$ggyQwUzJe?2SGjUtYIih%z z2On>~Cv%NEt87pf`t}BMJ}<%(53E+`cnk;yp$pIH0}EgC`Al3l-Qa=D_Jx zADL%-POO1)k=wjgSWl&+ zsfD}T>Z#bF#6+Vgh(~2PS1JD2qby9|QiB+i&(*y8{l;&+Lny6|bR6U7Tmd^VI~qQr zsWr1b;3sUB8zT)t4$82f{LJEdFnc0Bkdn1d=tvA6ZTQ5m=W?|NjK#VGwGSG(g~v{3 zb5=mO+^DLtaPPdpV&i!ToC88WsHb!4GW|7oQd_XuG8-|cVoZSpPhh3w*WeAtPRKZX zKzo+Bd_wsrk(WMM4Mx39IGu$!x$nE}4Q^{@ryUU$z-a7Univda1Tt6?<;zvS z-O<|@n{}F;xo{o+my^kI5ZQhe<>n;JGB0bnq*pkQ&0*8?Y>;|N8sE+EeWeC?p-}^LLt-%g z!RJNR7F@4pZ=f!`h!g34Q%N00c!dYAf{APRi;A@4ZmxmnEvL2bD6{J4ca1^hx;?< znyobV{IgLZ`2&y@Vg1@V`ahRe`;-bsLzUT55EPVw4e=8Cx#2H5uO+)rE%eG%yW-k@ zdv|b7boa)6XA1w$#HXHbS^IsW4fxO(f`(S|ChBV5Lf#1&PW5c)fq;|SQLjhBIy6EM zj1vCj?W#JP{HK@HO9f=kg22Nv^{0#t#gH^xQJxxrOl=f&9nJ!m&RgI_Gh_n1zki+d z_IfJ2FERr+REvwdBd>(vD5_T~62u#J z@GZ%Onr9{sC8jBMjRb6l8o!-zFJOTUvKp8sBK@=lol+t2V}vK!@?M8KF6rTN2cFl% zi5=*z0@qfNVbd#vn72*%oM1}+iO}>u_Vn|UUpA1l@41t9UoBNi*I-1Q{C&IAz>GIT zU*GY1^~MM(bmbcOw2vQL%YlfIw(1Ive*JcSQ5Z^v48&@X6w%>IxO38r`?Ey|MMZ25 z?>`+BedON8Y5mDZWr^kQB&zkc-TB-26T5cNt=6t)o1N}foZl2~l;g1EuMX;=s%rNJ ztLJBB>!Nh{Te+ml$#?s1|nLbiD4EgSjz1ZbYdKm|aTHq(Q zXdKK3K&AT=*H2P#i>AW@baM|on=E|*^UHhO1Fbu6k)y*%3Y)ZSDyLX1gG)03-(`p6 zcO<7};6ADb-1#`R9Cv?-0812T{@H53=aLgw0B@en%JMGoAUPhfTjAOHvckkyHb%)n zlj@k|cH9w3J#)3DgVyAc`#WWa*t>K8De44F2~*rw{o|j&kEvm<8L3Zr z*jdWCGRkM0Ovl3u%{K?T0fePj2M#xB=p7?r#7%e`dZi(M*7+XmjRp$?d&4hUkl&Yr zOzU`;Vu^5TkQgdG#2G>cI7ElU(vS@RW~=6^*is6;!HEXF^v=nf}GgOroHwpRf|Nb8-oije)<$oo)n%5>n9V_d0i z+W{>EQ4!xN&+tH({G0;RRL^hOX0iW2kAOFf7}@_r)LDi_`MuFvMJY+?E`v_#5)e@7 z9-5IxLb^e^LmFw2?rso}?(Xi60fzQ$e*bf>^PLaC%)9sVu4k=#B^ui_1RHAUc{s5X zAOp_~sv$g_mL7FW`MCl!iCRLeG@v8~*f?FCKwFmhPmn2)Mss%-E8Y?9p_CvPN$RRrIz;J5X;4el+yx{byxUa26Mxe?vi~S(4iVX z){(t=>Ld=1x?yYap|LKy{*gjmke$@gJ-{Q(D&Y+r@mDAj=|nRa{(%D zg9}jl0CL6$QR zwIe4^yPfnR=R?3%KYr{@?OJJ@Lh)m#el_dM_B0dn=|!X9+jy{ZRV3jyAA_eWrZn0q zg3MdIK=RoQkngiy;Nf)9I7HaLwe+Y43Gs$SU9Qa?sNTo}R^p?iGnmA`JW^t3Dn5*r zAGu~+`Ttb_y;{m2ya55i_xTD5UuB^KOj>V}tR>!JEV! zc)AXK&jN_Vp45A{NiX* zn?aT3Y_Bc25K;nJ{W{s)EokkWUiV01YtgQP0ac~1VlD80Ew#|L z9FuTcj_E{`G_jYD0kg22`b@4PX7zB(txGqXTvG}TslOGd!tisHE`eb7JXTT2iHwSn zv+pDOS@ieXvVzcDQ!+45r&H_44v5#U1ezD*~2m#9%{$%b@I`E0+wp_b3H$3?7 zN69kaOr*BA2-Bzfr6A1f)*<6^b)M}580y6?|DO+*@~x=_n#WV^FOrJ~>Ab+XBhg%I zOJ6;Iey<1$v10ZpKqlp4o1(SQ=3R2SWqr28mu*~sY1XgZ36T4HwY;qw^GQ)MyAhF$ zq0gK!@PEx;8omD&o91EG#kq5vG3*GwJ^8jny!^w;Uion$Zr?qe4<|j7l!A9npWJuh zK>Ng&Xy-%NM)<8-vxKI)@_>~iF+5pCRu`W5_D1^pY}?o3mzBpkCTKe6)P9I}8MvrV z%-jaNcLn2p7uj)ak0IXeG*goL67@>e!O?r&Bv6f!7tZWnwJ80K=#6gAz%p1WEVAh2 zqq7T7xNrU=OuRGAP07i%te2N7b~4ehhC5t=Dd}-=lybM zT8Wa3r4p$`U+es=zjUY7aBtpfx={E0T)d8gb*fl#GgyAO4a`KPLX-M)auVfCj#%IV8o~xnLaQ!>^wKNe?Kzj|0RuVX^!vbt(hq@Wz#~AyHSd+C@EMy*M|3r1; z0Oy_rdU>$xJQTo+Wcx5z|8wCf{+3XYnpHfdY2o8n?^i2t_JMyn&*99*Q&u`nfNfn;tHsaQ^m-o02I)1C*p zyk&}G(#%0b4<1wl%0Jm;y2!xQ9~PEN!~NT(I%^J=kH^3|IZpgX$5zIE+#D@08>&50 z{jnT)GE~_j&eA7@z$`fl=0Uw+xg3bU{1Ze92h`|#TjQ-$tb!-7pSn4QXx20A>jgiDFAv_o-9xc zU=rM?xPx+FeK%=^#Kqg7mOI!(?IPh&T!bT!KS8B{K5G;naDSk{E$Now85^L(w z-I4p+fJXTCtyB#r>Wg=1QI7G?#f=&r90OXCL5imU406=x&b_jK>|8)GhvM;(^26X`D4B>$1yGMn}mj~oz;y}tz+brVH>)FFC4F+ij=t-Wov;?=I5JD^tphR z-E7v!lHJuEpf&u6CVF&8cMUp8c_J~1xT>K8Cc zvh*ocE1Lf6q(Fa4@`|y1h3L&Vpa~ZES+&`! zX8BMx8*bD?fTvuOBhHc@{UX2A3pEub44^|9RxNKWh@0@{4Uo*}6gq4mBDb zQt)j&vjly@xt5HN+$$c5$89$O^`74;sGtw>$pMcDzd|2O46EHFP#Dqm4rDqkT8opo16QxBHum=_Q2nkvp7sp=#L|aK8C` zO1e0}Kav(a3%pF2P3#IMvnV<{tkDLC3;SH?$!>>#CmN?!{ie|&ba6!IOPAF3P%C_i zsw?ZoExFr?$gz$n;T{IrF&Fs&$P#hd4*@q=EWLf8dN()ru}9K(x%7WQ@14)G8ZaAb z1r6eE4{Os@zaCT6Vjdq-cwA9grY+FwxTX0n|ING6s*o)btAh`6>+ZK+G#tCswWNh( z8jeH_!D&%bO&X;Www_^AK9)W5#wkl?re&aO{cbBApuih8zknKQ!t}|e0Z^QNoSO~| zepXt~hyL9zd&5{Tr%I{rHPwrd5 zoG|KXC_}+i^p!~8g7%gC(&-$|%k@r>XQbKOnz1I%5uGW7unqrU_%!18uvKF*-*mR@ zvTp&(bg~AyS}Lo}7I(-h8EnkW;nwQ!AIl?=K} zCANIl0spT=$J;v1H~;mrPM}=(z92|0MQ4`+9TLi1wdS*0+MezMD-+-JqVQ&?Yt#XF zM{;(_?XgXE4ov>Ld{wTGLhFY&3;VwbHZ^u-g zD{qrQ2Nv86s$Bu+zxdHmiKT#PL!bDFlIewmF2>OzwZ*lPW)Kz#WxoKgNoO}LCj6l? zQ8HvSrK2TT%e)3BBRt`(AKTz~%^f@DA&4vRuJ%;fqWy zD$mT@r<;28Z*K*tgqud?crudX1`k+&ekoq%2sNjKIT~*A1Oz=Fd$W>DYu}vkGRu4R zxM9rj^)RTTsK98wNPno&)v0HpiP)s=(CwG#y;xMKPiE+37XlT4i+Yin9cABF1u(4w zFjAT$CSvGt23LmpX;pRHFF5s?0=4avc|oj}h)if;{jH#6QQ0esKTH}w{V`Pm2f@&U zQ7d2>$!#JJX;Mi}Fs!46PCXC$*hEnS%Aqs}xjE(3c@Ur4QNJK(L^d|-{0H)Ou;hs4 z(<#mUouLl<7E~jEaS7rul2&0*^Z3e_t65_n#<0H@!ZMMnz%;QMaLW)##n@&g3~m2i zHx$A2JcEwy_)_@6I(RX*`pwo35H}i1v&nHIfGugmNp4 zh4p8B^e&M$xdiqf$U|(%T6;$_I@Y(mKVIwEO=}-2!;jc#5-2-u-om#t#Ul!>*U3L$ z+niX!LQuV)QDS{`()8`ACbLoh4mi`lSZ_pz69-5p8$g{Lir+m@LGo{_?9YLW6qQS- zre$4U&w?SqTAoRY&`YP3vju$`vJL)`ogqK4H)aEZK^_P8UZ{f%aMen*R#E&89eWd< z7$vXWS&D)U2a3_&AmEyD6+_CY4LjLy2zHDqFLp*%0<=r?LV`3!^HDeHUm;#@_DkX% zF_*kD?zo(!^aYYn&d}%%_i0I0m)O;%U{Tg8rO3?J?SO6v0P0i*4OHd7-r;d|vWMOh zbJs6wbw9QxrD8cV(+`r_ULr_S?CI%-A+ty+EW&zb3^QKvgKk7vLnT- zEt+c-bsWyzV6Kv#nR0h6d;uDC;`c%G@3^DfBQp1FXZbP~ehIltEK~_y65imG@mI+a z$FVIQE8q%PXi`Z_=q^Y|mfe>7MplUUxD+U6e|hYn2sW;{DRU-H&LU%t6IXujSmJsv z?0UXaa@6eT%N2e*<(nXli;d7l4c>?3Qy(z;>Ti!OKQAvToY%99`x_g8w#%0F!OO$> z-FrFb;7|}qT;V_ZNb>lMuWK8l8(B26s(NRZIiWKN@osM$N$wtg-5M};v<9GJtXhtF*-r5x2jwlhU!zT%&s5HNgF ze7SN%ij6k;x6!L~KEwaE_OG(5<1YNCvfb)E`ZfXorN>&3A4+afG$^0g)V!~HSC#a5 zc1-H@NbXI}8#?WT6R4KlDSPTVi0^!Un`T!!YT1D>bxB8=WRZo}t?2MVzezUb=rpD@ z`ki1V`mEwh{bti~qscPpQj6YWv&Gfg4clb6O4(Q9WKcWgis*9n_DkuK!M$q#xONHT zX~#O`J%St3Xnf@O@F#WL@Zn#ai|||eWrZD0G%|Lcp%ud56A(;so&*^&LS2Bf-bfU1 z{?!{`T;m#Hxi_O9aWORC;@(JZivi0-pk#F0?2C8pt`~il_zGDuMzsEJu7K(0*i}g! zs>F<>tybe?s9{+Fvn_#;*Q~wZbh{4eox|%r4aX~$Phn6F|6Sy*_^AY77G)bwyArl3 zo_DD7K09sz@DM^$x%5tN!W2nfGg+;5-z`+5JcxAs2yQc(m1-92Ze;poNzmz-hO)Mqp^ z(B=HiK0E*%jseYD1(3_e*ml~ucQU^Y=<4nmSD?s&UjM30IhqS7XIgLe7@7~wi*cWK zvQrCHna_6Z-+~?}DUvWwj3xuRnHa<{uXk6ZJu0;3%IKIuKzxR3tV&>=nQG)|%NKoU z`-<+6?(6G~*iQ3N+*?gII>*uFXGjs;Elc9U{$ERYJ_D+_NhIs2jx_sIZDn`ni2xIV z1lu_kyV)AOAR)Ek)I|imABk=enJYnDqUDQ?j=Du}r9H?s6;20KDY#Z&81AkRlZCy| z;U}DbD!{GYOCcoS{P2O*D``X2Y_e7D-_BPGFHA2KmR?}l^V`A zKU0qRHq=aE0Ns546Q`mX8PIu$Du@Sef$1*O(0R)?9sl_iz8h{k@#-5flQ5x!r;S|NBw8w zSGRt4@$P1KvX~2=rKL6$h|gGYKX`}Md;V*3@4{!&aJ^rpT&FjkSaH;?KiuX*@uiBC zeyRy+j*r$>Ba~pbs_Bchzbf9=Do-8+CPad|`uPv7i+&Ee;9<21H=rB&NrB;ZLIwF8 zv!ebi#EW25WJFeDkS?OEQ9$Z*yz0*KR}bOM;h}0)Y4I5UnaJlk(@SA}}d7E{R3; zx5uRVlVm}URZOrdjt8KiVe4oSD@2N?a4GP>th55it>CmKCS+Vwl7M@N@ z{0*<#o>ug4u_iqG^nHbo*3L-yWGg}Ih(AxROfPD`t6d;pAm3C2fj<%$@-mwDLkb3y zxJiZ`_>q%>mh+2i?_|IKTOnsr+Cr;V3HaI7^s8U(fTRfaDsH@47@PboVDT0aFIP6{ z$S#t?qb~Mln|FJr;McHE22G@9&?n0)-x~lhMB4Ye(>)lA5I4Lo z^w-K8M#)px+U^{yu9xQq76{LZ-nlbCw z|LXAR4NQdHu0_UU>W&lbrVgLx5!=wA4Dzt8k&wMAGb6V zs2={>bu%0)KXYz1=zS)L*T>Qz^NPWBAOX8e?Af7al^9mneMW#rf+^}RuEFq^Nwt~* z(f2B~FpIs3`oi_o(A(;sHt_>ynDZ63gn3My3l%7n;Zfqle2+!H?EFyDoGnq2%g%hmb!&Hn+W*>-o z+~cCn_m}f#Q$~YTD_mcD-RAoR6HAk_@DMp6(B{p5R($Cq9OT#2>-g?P&uja zybe@89txC-OMK4Ptc@$stPM97svngwZ{zyN3RLm^@SpMp?Ka96exr(E#MOf^jP*e9 z;t|I2`{(p%RWE|s_!g7Vc$MjG4rd!8?*-~qu5=<6w;x%T*Xe?I!$y>pKsQafTsT2T z58j$h=4*6IHvGug*v(Mw8tPsdnLO zm&o9kKu^%ZJEAYjBt0a>(mX@B=}P90>m$~{ze1Ou&Db0 zOLUNuiZAHIta&qTUS>8Ct4Pn?63GDjfe5zRa=oS5^=e*6zQh0GLNBRyvu<$xL0Y2D z)|D}WkWD(4IR#JcbrC;XU8wKVFi;lPX-~|G1qykZjat)wRJvW;kQEsG=v{xK+WQpq z{g+iDl?FT0=;eoKL?q;(#p-Vajs0%ajGz+pIIV9-Puy7>v&YjfD&0c( zY5ig!t@@g|!qD&{^~#0FkaitG0C}hY?Tga^T6G& zz1R^e&U$%%LD399%g_$TfQ|CM-?X8|(<2#je}ihSWYfk{ zOC2J6r6A!q4}a6)w!?#*@q|-rM2VZW*tYUtWkJiBq=LJ}Y48R-Iqo`3BTiH-)5|Rx zCk35k_6hu5k>lo*7mSXesrLx2ZpGaT{$E3f6q?xfd;VLhZSvzuhFpN^ZFN*nqm4i0 z;ACJh&SmL?^;DewDkI&zU3Ac}Zia0QqMW1tl-{Tyn}CsA#e)fy5lNnyt4X((?dVKU z``-h?1Vbq!_ic9BT@c2%=`2aB?OLjr$o$J`0&inC@97w`ij{H(5-~_7mis>qxN9py zx?p&#+4rvybtpV|a-k9D5FywA$69-PeLo+N{{^@AV-B77*$#y8mss`8ahi@;1IfhO z?|sSw)z!#cctuqgNZv>lY*;nS*$a?YOE39Dm4B*UN1-nX3x7 zlrHhg+n5-$hDVCjYxIAtUEJ9U=QfA(6ul!JqPkUvNZx>yMA#~JTxEs#Jk(~7p|pm! zPmlVk)Cbi5EJcBdl?L+&>UxwaOX(F#q29_Fi z8h*xj77Exw4EzV_`p48Vx`La&n>WO1DT1!yIOsrlt=R)5+71Q#;nYu0mm@N4BfGtu zhOyWiK1aI`k(1Ac;e?XCWX_r2bP{fdK3V7c2`5-{65^;N^<_Hq_Kdc!@sC##j*GnQWE`v z`Wf^EB!;HbuxQ)Ri}%1@g{-9=1s?Y8%Nr4`)Oc;WSTQk*=)!iJoTk|ANz>mt^LMdU zfrkja-_$WazPX7j1ZmRZjHJyTBR|7~58J&t6jj;l#tk@^odo*_Z*e>t#JVNP%2zuHt~5@Dna>?oMcIBNX8^l6Z`qTP9|#&&4zR={bZR* zJ2ZlbV~la$(u;;7N(K*v!ooI&Hi{b z!wXVxuIzp#jJ{s4qf|jxeyID#kHYugBU^EJX_4B9d=!ozX=Y?D43~n>)wlcBqw0mR z&4ygq+VY@xSLLY)or<)l`po;02Nec;oCzUk9uL>Xi)CSc+vl$ysfOn-NyZs3#mKoa{0``*VLB(g)gM6%b$z%ij?f(`#^&A+{?_I7SFHaGao*< zpY>AZuy_gixl2B;?lx@Ht+5qVreh8fB;b0>ZWndvV>hQ&ab(G&qY3g5Bj}EeS9nYv^e>eBbX~*P>wHc45v@( zuEtPGOry4p5xow3w~H)%GLA~{IdtP`sY%kt2dKakCl@#q5O0o(UcY)!=83t-@bn|Z|taZv%VeK0RA6n|8eIxXD z=W0ieTl^f}b`^F}7ZLQ;eLP2I?uutCG%7Zh$ZFGIkFHym^{_iTUT4jfC+F$F)Oa!E zVTw{~2|?%ZUG2vqsR~3Bot|N%2 z9^QY-J9}xf-O+Gx+?b{RmWFY@95n_dxB6H0*L4#JX3}p3tr3XB%vnOj^VeQw&x!;j zPzSK$8IrA&`>MK21UwTq%ck+T(w2G01(!;-;FnrBaU7tKL*Y7DQJp@|Evei6^x9e* zoC*b=gizi*G_^4Nu=uZ_qq{FsQm4IQAY{|-cv#ub&{2?8ug(~-aa_LpP?aj(7^br(;;aJ;h%$v{ARx6CfoJQVq9e? z5h8t-gdh$$KYDX1z`O3ZD!^D+G={6pp`tEMV_8g!50n5-5SbemjZ zu{jNwND>ME*U*2W8PIkGK;p@H%qCK8w>)`_$d#DFq#*;g`N;oBB_K5ypQ>AUgwS+} z1d)FtwgArL_M*YQo-i9C;yT+oMU;$9^7q{_zi|C{HQup=KZkiV=zq+nnj|GHi?&sK zXWw>DV+r;bwyrs{ZjtF$Y&dgRBo3|{$C$!l(a)paL>92pcN!*hlsdBnG?$cJ-__-~ zHtE(M9bRidYP?VP21%4B^Ys^|w~EA^B$g1UdcA0U@#;_7uTGTCh(quN`6LsV4&urC zZCKGnU;8FP$dAk3B*;~Nj-I)r^;n&he(L!z8lkOXl+&Q6#hIQp4M9|1tyHItR7n4NQrLA;^lvU}0A0M3zs$3D2is$h`bx6A%$53skl`Ll#Yn(Y_B~)&?>*PD%e8Fco z-|Dt4S$?gwnV95blUH~zlf{8jc>j?u@fHWYD?=>pi?YFjhm&VVI&~lJ7I6o1zrs@5 zE^@!q67!@v0gL*Y-Pyg zIV+hq`u{(ojc#z<3Hh0`2j6a~+si|{n5Z?QN>V$8=OXq%qccrGFs zkblA2I|UQ_n)>PWylPnn=b^3ejx(7bR71c4hS7)1?`{PaB?d!>@dSu7c`u`; z1x7CGD53RM%iSsUeHITN({#F)_^XW{Xd%;{Oy0;u_`e*C~vVFopp8 zgHiL<9h%sO%Wp+xp~gRw@ljK+uo4ZGOXjrIa%5-mBU3f~_%MUdU2fUZnD%L8(&b>~*)Q)D}YAGH#NS@is&OyTpaV3$W)5g(b`)w%4LrSk zr>|fHc#WxG+=XxdDnnZ(c=lbHt|%`QMwSO={1GOPB8%;*`>`8`{o1_;#BygrDtV|S zq;`@;V9#tS)Nfd#E+SQs0>&R!9$E#XY4no@lUyXbVrZqU_0b|p(lGkFUJK~8w8cH> zdR(WEr3fMm6>A1eu{iLm9e~nTyBoqXwM*dg4alaag4FLXGXluXrl$WLKHYr~DjN@` zgvhEu{Ee-^OQtOv;ffA}PE&%Z-r)rKfUpUs(Qj$gep-r*2P`8pomw4~HMcNgcpgoJ z0DJclkVakAU8cWC{S^^p(sG*Ow9V$>FovnoVF^As=oTQ~ zWsFGNScgwjXj|Wl4m2}wXU5sm)NeaS8Tw(z{S;1FZWPc}?!Qd@6|3^yZ18+LWb*kU%l-0JY_e@c{Ce=-3R8^5_exK($wA+LK&>KxD;80}vcM~lm z{9snlvShp6?`6O%vG2wCZv|MeJG*L%Eoem<*uM;G?mr!GcE#kNk<62c)S}}9QMdfmXQaJ zmaxqWuA*XNOLq-6WQ(fa=yxuuT%RBh;;unzg{q6Ny7Vw$;nhfL*94YQ?$8M%s=G6x{yXA>xMR6nom0k0lFuJ-A~Q4b z;+{%K;QxVLCK~$};>ZcKL5BfXTiX-bJu=D;uNCjn`W;KcaXD=^cIw8{58Yg2`;6$D1zkOqK`@~u z(SJC2pLN7H!-&0?xZKe24LNsYE1b{%QU`B$d_$ zzP9hbP5p&1_92CtCmVu$oqGY;bgH?>k@f(tk#kklA`A0&G_R|fZa;x}c!4~ovUT@k z>*s! Le7;pJn5J-6MF;a*HHsar$U3_ae)mL#>@@;e)hY{!_N>B{gR<8ian1EekW z;G2h-6g#?vU#QRT_5oE{qjJk48Uc|Hh|GPfDo~`8pL8s!{We(7S4WRq_>J(k?Pn`s zOk(_f#=jcH==xC@80Yn)?wjY=Gar7J71vYM3h2a3~Bw22i&;PF2Y|JZ;BH9-lh z%k7ii*X=|ul4lQX-v+Gohy${3g}(UZHsMb^)lpZ|aTZ_ru?&3s_D&b7GtJ9>n+Ee` zIb-!#$4jpJygv=^=F&vk*%7Ua0|*Py*B4oL0fb{hqP{#3SD~uDJ()v+oK6poLg-_P zbr(@;z^Bt0GE_}eY~=h1F44?Kryb&uHp(!!lR*o7$u-9v9W;D2*dPAhCTrHR4A#sk zBMtCjKtb;D2*Y)>PB5<&`EWE{TI*d-7)2If{jz^-9gx#Kj9hSRe{>5cPA|f%$Rl{9 zCC2}7^!WF6U4crM=wv?UWaUF!Ul5MK$+WXu84eRndOy`o z_%Sj%(u(RUQkN>1)lz2e(yfR)*pgaHzyd$as=Iz9V2HEbLz>0!Ze}y+!Lhc;mwwsb zdlI{jRd)`yXYz;S?ypN)W{xaffip-p<$JqO$EyvLFH;JCza>w(Cx?Dm;lx1j;TpIN z=bL@rjd|yO(iSk3aB7OQVLK z;>jPqnC1R;P*^kudy2soP~U9a`z1o{ssmA~v7)#Qs~5yHCwH5}2vhSB zckorbfW*PjqHc$Oy6o0NjJJHXSr8M^ zn|<1NYfsI>Rp9e=aX`dDc`S;}NA9;d!4A8{rtx>xfc0&g>)^(1rk|s8Ygvg3UAr&W zWcl8YDd`R^YuBF@qpm`lqYi+#%I36_z}XPCfWHMla@KoJ@}F#7zG?kDmqUEUS*d&t z&z~h0Rs<;*C0Q1*#JE6}+-2raI);1GGqPx21{1UDMYfUR)(kIM_3)U;don$F7{8zV z*wP~GXNcw>Isv1b_w`aj_ED9uTML#_nJ&>ju-sO{FmiUj;2}c5|BXx{PJ3`q#n_*3 zBoRuOgY*zSdbntwtL@uwHInYJuwc4FARy?L$G253)FcaCZCZV9dUnb!wGHwxcGC~c81Hm;7$pUQsPU@=i3U=mn_&n zw{~-_#KfC=>J-Dgnz&>w0kdTkw^)^Wis4&VZMUYY02_;k=RX^yt?r^)9`NxEV zAe>wtmwhLk)BiaglNH+YZ07Yw7&l zL27`Cx*l;KSTy}X()w@Rvs5y$?PEm@nK6!TLdwLCi!Ls;dxp$@C9-HZe>|mW#kCr8 z$SHmr3k=PEQQXzX)pORlvllFOp4%eC6OaHpISWluXeQd z&-H2>GonV}9VKGzHx0Qk$rz z-ZUf5copBKnK@0zl=YK?@3G?F$&!+6m#y2A*8cnedA5uC*g3Mzk0lBX*ffG=_YcQK z?{LT=_m5@G$Z5qnN8!aj^=dzB(QMQ3UvE3Ud@LQ27lphG{LC)+@9w49$qLjcT6Y^C z!3|SzqRaVbeMh$wx+RM%TLFszZ70$ zvEHG+j=Q4X>VeSxb!qWQG}YK#m1(wR)9DQ&|s zAg+0g$Jwz4|2XxD=MGBI#S&K=%`j&*PM6s|8~B?}nu(Ckwl6y@#INRR6vml!?-6mN z55(eHtL2iT#!@f3XWIb*ge|(IzC-UBe>OY0Zps-Y71@H#TebM+A}_{r);3d#LRR?f z(IsT)?>&*x$&~hIQ|KN6_ER!u0pY{+kKh@FqA-Z@|5q90em;8CuVRBi3D;{TmG5fa zH0<$xmi^=S4P)-D>#)LZAQvQqg(0N9f?&+xzv(v)kx*pViSC0;;u=Nf#D3!^drh5J zan|VU?@6bc5jANUeWV%HkHr`mzY4Xf>1dZA4PYn$WO&di)?Zf>V3CyRneVqpmYSEX zF{~5LeyT=2Oaw2t(Qcuz5rcHqXB&e~n&xL6+oGS&_dZJAKi;09!R(d?pWfYNY%qi& z9>T5Q+y8gK^*8l=$P;4+3c}U%-q!siEDMx*Hebqgzp%*$sE#P_L3y z)U|aQIj%5`Mj~aQt?jNj@XRO{`mX1x%Ob&o&%^t-x;Hxam&-(f%6R2+NX`4dR3vBG zm#{&vHC>3$S8HT~m#j~DRk!ftyu@ob$}rZ8$FeI)5j<_BmsD zcvf@?A?V|WWNz~>*)3x;l%p5D&RHCImlXQpKpo>>k2`V*v=j0UFc&+d+m{7Xn$l{c zL9e0(BiDlRs4T^LNvU@Cds^g-@C`=`uF_kjn&z#+r09-%sjYZ4pRW8-RqYnH>_j;t zr81~_6(~E|18GAQ0_}4a4ppnH3(AIPTV1kTtJ?Fqj=I%~uIx0|VdC#JnPJgSkbl#M zwo4IZp6y-mqxUqurhjLveY9V@zm@syL4DXfX}JW6G-`^uRLDBvS@b0Y1Jy4K=gaC0 z6LY_Neqi6(6)Qn}Jj?*xbJNTu-h>BfLhfG{R#0^0FZMX?nur&sqMYjgX!H*`ZdeMe zlR9fdB*-W{-6D7Ez7mlxz&7+6b)$T+dI!ow&zcY`H%MRWgxM06Z1*v+OvCE96Nzjq#Rg2S@5_skajU_LYuw?n_^S|5T!_*A=1WtuGCDW?(%0u|EL^v12`9=x zF^jF|8jq=UX!srwG6vcH@iifU(sk2RJTiWvy${QH%)$%)gBEr>=jG_o~Ka zZG+ovkq{iE;#>4;>irDOL&3!Bqc7qeq4c?#a=W7Yc*FwqjwJ9{S8&}VZju^xpzxEh zS49qd5Gwd_?T_X41Vw@k_)tSR=s$El3(jDh2$31HzX_EC?-DF3%nkAji{dR?&$&Co0OBLVR*7>QIzWFH`UcV?1Q%ONYOpMfyKb(_flZ~Rsb1p2Sj(PhFuBRLk+A*d$G_(8?>+k5he}I&A5yNP_d$NOxNF4E+7~w;?;>Mo_O^E-erF0RT++*)R zD5sff#JAhky2p*jfGY?{Pyf0m$qq<(-#64pUrh5Gy=Rogi&neUgJxR*RNP-&ut!T& zT^&Y{URUvLM2g9yzkSss@*y}_r=1-GFgSq_hts)oIe+|@nQ1TCI2nH+_~D)3@@Q6? z!W*f$szw{Pz8))`(U*p;sIWtFmz{s6S^INcw^RGGG3(E_1!41F^= zXXmmbpiG0$AVp}FRv8`6%kn^zau0Y7*SAnE`It>K=`X~c$5?Zg-0fA0=Q!QvXqxRu zt-5b@--aT1-t6Yiv96v~b=k0nvq9TmD?M(8E|j0gCmQnfFWAb-1ZZEM5#)DZ_V4nX zZmN+am(3yPyBB$EQ+WKvo;wlZ!Z+AYmUdU#WN7yJw&$pFe*>Q#-@aO?Fsw9N;KBDA zw5W=wsr=?+2l<5iN>_+uj16DC$tmCYA)P)bRj_2QtN06z2~Tz#Zx~RoC?kVeHSDiY zCFQ%C`VKAW3rl!lSF{B<>O|1e{*>X8cLjlXkJTgpld_1QL-d^FtwZF5ne9u} zE-t&T#ZPcw|9Z`T5?T@cnY}4-K8lEJEI^~ifw6>Y2ZQ+BDY+-yw#^1NOg@nn{+yWa zlSvuWGS59fGBM&DbgpqF8EnsNwNb(zlOgYmtg~R zzC5RF^4G#sTHRNtI4S=1y2fmaw>aB=CR%_?$oR`wgut!#jZ|vE@o3uQcA94iQb!yQ zJc0kbS&(^gc&Yh@XDYI;wcuOnX5~NYnj4vA-H>?3AlD;sh=7T{0`}vZm>4m+_LT() zNy!Kkv?w|*D7-1rM_jE_Ep{bSJy|PuP~)^eSCGt4X>=fthy!?rt_+*yG;4k(t?P?v z-3z^C@477TDG>P8M@3S)EIZ5>Ti*hF{I^so2O5FDRF_oBgs+;^Sg|xRKccvMTFO@% z4~DyXxTqJd^O`cvDIlh5$I`33U$RahFxeVVm%X0(-!vjwpZMufiL3VXHH4_7!G=&)nVa(+obU9hwvgib`Lee=aIYgWd6q+uWm)xB~Cpgkw7d*zf0s& z3t8OgnA6FvSHx%_8eig1Uqm~5$-gk}rT6~)kgm$(hui%``DsG%+1z6$w~-vc+4tHn zr5`ms8XJk4)Nw0d9NUUo>~eT?!NB7*tz-kYqRuv}r;SnGrJ?zSosB-o#B3a+IUBkP z{S5e=SusA#B512eX*a2YH@TZ@yz9`#)`ex(**9JFNAY9p@>hhlprJQe#H%`;TAlw! zj(3G0uEbn1Jia}E4p{;<>4xL1yg4nx6JJ8JEWh+9*1He)r$?aQtQmh$$9@iea_}P{ zKw68~Nrl*XaBVaZ|ql zZ()oMq~0d`l55|JOQq<$-`?+S%Yow8;!#(Z+ofi!nQtET9{Yl|X>tmFhoy4k1zw+~zT)u^AfX+B2j3GAbH7AbRdL|xl@G|v#%sB*WOM0Tf-Q+9QY6lUr zyj~6o>yo+5WbD}sPDUQ+QblM&y101dBkZdfz^v>dNU<4|bALA~nYOcz|26*eiJ5<$ z9uAs-hKbdu+vH(z1cyl;vvnc5Uf!8(#k#3T>Xx~meneC|s^OihR4H7_xTeT%S-xlv z@;{Yx_dxcRevKso=@Oi*{^rQB%^f(IXI*O#;@iJ$8A%%k*B$(2KhPX@jMBw4hJ`wfs*mtGba91yC-x-NY^ti>IU?0%k4^(Z zzFH>!0B+R+$7&?5v03BzAln^QlFruRWpfpDVlFmKR>ZK~x`a=ZsLy(lv)B(fhf=1S zV%|jjx0q+tWX^SMVq`Gkl|$*z%fpSLVy-Rxo|5lbkjai=sd=iAOR5ABdU%JFsaX9W zg^Nn!1c|JEW zOuwojFQXLP%cGHTpkws8z~l@4sS9F16{i|ZT6N#U8OI8FyJaKLEk$%On19nVb|gK~ z$rhRZff#AA(b)W2pdj(=EpLFDJeAJs(ofL5Q87*s+if!fRDJP3AaCoe!ISIiIS0#x z&bI#h^XC}f`7}l7e8i`3pS;>I`zFX)3#f0aXt?$LKjCq}3KMjTLj_y(F{#iQ`!a3$ zZ^g&mR(sasNY0t*aPE!pc37hT{04}h_8UQvKwG#_t9|KHNvD|egIVsAmJb0rw%ZuI z0<8zmVA?M2_#0MJ==Fctd(WUKyLDYx1q37~0m-5S$sid-1OXL^N|4y(oO2Xta+WMP z=bUpA8fcQ_93^KEn(Q8YbFDqUwf8=A&-3S0ovKy(!)nT=y5Dz<@x0G--`AabR|0^% za!RKWLdO|H6Qzk;yx2}9*O4r17jU>l3CCc!Ub)kx^6w);ibfZjZz4be(Mu?{tLmSJkE76PDw^OP6R}2L}wR1f0xR_9b zjhX0;|D%&qxWQQl3xDm3fhdI?7eu6d-QVDWWRUxJ%F6iHm05O{^K<%rkqvFZ&yXgNhOJ$=qG>#XRa>`m~5u z{A(K2)doCZyWDeEmE$o?jVlXDhRk|sF3j@X3YzDQ6r9p}?aBV~8GaDHzS0$}JI>pC z1!q#MHu>FgyrO_x3uZ9wC^r+`p^5yfl(8{tN#D#L|JK6u>NouYh|#A9mS&*7)|sh# z$A3AiVCOvyY7Y*W=p7cITcu-yRxx!R>J!u$9+7X`XXCg#m7d8~<&c7P$nKkK91le^ z9$|+c#Dnr$V3BD?O3E-bG!JLm!~fhuHn*i_`1S@hPgzsdLh0t|lsjUcXU}|g_}7t( z?8g+Lu0Xy(X{)^U8>1*UEhlHeS+{dHAYU46l#$qMyUP4r_#BIjoh3*z}cTi2f=DbZi3 zdBaL^|JeeGw1jKt6^?hgy-|h!5`nHOkC`Jpc6w&>*W-G@!FwR@tCS;>Zk0122F!@% zk$$TmNwMGh$ukn5i;Gf+uy13#CM!3C<;_0ndks11tyr^ZfWa%gV{JdG$ViaHu5i9c z?Jw6PcBMv#bBapG6dELMx_`Vl1LX?cr1^DUtq?l zhsSv2HIVhjtdf{TEDQchs}baSlmArx5kB+0pq<&l;J1l}B!A44kC~DCG8TmJQ}wpWuvsO}m{1j`^iWD+R#ize=``?)kFrEY)IK;@0Mp9%mpyom=jrZ0oKb zv|a=emA*{I*wWbk{7Q(EUM7hg!77?tAd@_kBfayEMKE{YX5^x|5gw#7AC#%8T2If+ zrrE@n^?AxPUcf$M^>et4`C8xSPD+#G`Yw-)>kp?ltc#M67pvT_SL~CBV)0te-ciVS zdOkOfS;%_VIlPc&6fPal&av<4ZQ6D!_9+(Wb-lGYm_GfLR{}%iArti(lM)!fG{Ty#aAh(NBkfTgprzv8u=X+#Mrkv`bcp&ZZYt5 z1+AI=nP~zu8Z%Y)v^IpE;*=rYrIhWuP$3o*arY_z&wU?5>UEu($lhzHe$t7la5m0m zF_qgPgj{jA^Yxi9;*+AP>VhtMkI$9k!klwTWPRI-#X!aJUM-5tL^^pQZa3Kt-Bn}R zto-ArzTzAjUsZ&yr#s@z&^A}tpy~TUAq2qPT5bV zG3fzEO)e%=5qw9Z?kNs?HcyJ->`Nr{lp-Ex>VlGCfh*)bs5rph8}ltrH&zQTk|olz zFZ!JX!$2g5wh!j>aMwKP1oudkJxJhou-pEQ%&EugL~H~R{uCzVwYa+A<5q|?w_!4t z)qJJF$M9AES7#&HLxth5uBGO!RgNspWMV| z63_BJY|@34A~Wf1Pf*}MfS<=4{#p%bv}gJkPet`C(DfNjO(EzmCyh`!B3~0C6}nX+LAmcS`j2Ms6pJX&SRE@!~#*5hJyo;6jmH#+7wIh^4d+LHVxwKE9L z^UPrrNtn~(0GBGKpB95R``x`{aB9?SbyJBC<6E<}y$oh8^B2?eC58c(VbpNn4Az!E z)tM2~bn@jDLs2ubtev=-V~cc2<`J~azjd3vOVaeftW+ZW>UzXxuzAR`+2u4T)@8qG zGh}b7MdKzFf}CqQ6frl6azeb2I@!v1-E zDaW+^%Q4VKhG5c!3GVaR|BB?maVxc%dz`K5JU5F^I+%7J2fJqT^=Cb!&Mp*FM6j&3JdCWPa0csTAiE zF>_ulHuuK$Wx`@JOK>VG9hlo(4wN&Oy0v{34Vz@IX#~x{&UlzsS@k>r(%afFb_j)-jPNO;u=$|ekRfx^?xgY0@+5~gD-0AT8IfZt*Ns=m zOeE79PRkdU)1J{DxBD+;Sb=G2RU10mAL{bmM^BLiLaP68oY$mfL=@SLNvTOekS8e< z#!{2W7_65?R$I%vz0Jd!@XFmLj6>7WuDPgaWSw6w4Z4Buhe^b~+ zAO~mQrNtSEbpB|KIWuOgUM_U*N7A+|;Zf!w}v-J24)20Bnp?GJ8f*1oA2 zu$-K~$oS&e%GME^_^piGSBkvGn`hDMSFu@XVCplgH2l}PB%v#M`0LCKe{L^($-h}1 zyI}RLCH4ge^|ik)=eOdBEF*V1yx(&2dKDcf5|~K(f7W+Q2DHCdnV*q~{{4GsM~lmM zt?3r4++zzrtt^RElv4HqDyvuCY}tGxV%VqFl2jWaY?_%7+`be3>aqtj&n3oq5ivEH zCy!*0m>+MduqH!~XuJO!v=$*yeG*~nXEj8j%}{;qNXmGR_$3q}?bjl@X8UodvmiWl z+iB0mS+-9%yOycu>LUSN_|cbG&E$YA5Whn3j!dt|g?@G;>ftNR)05fQAa>>huTNo; z8W|;Q&@tdK7d&`Zy3=(tTBsb}mPeuqn>Oc@jAx{X5TQWwE$gT#j+-KW&bT91@0c*U-@Xa`z9@AQ$^OHF5qY;Kgu-eod)0x=rKT<#T);Q-)58 z2eZkR%N=iLa3N4v+pyCHd(#9d+m{`EUVdaF)JxAp-m|S;u2Y*v76jWEt!+O7T0y-N+N?WL--) zpzr~`zPHI9F53s9V8SWD$NvhqrXgV|c=6FT4-lf&8B@#UV%U>h2-utCgQn$VWzgZ~ z{&0E}$9bO0@}u#5Tgf4}sq-5|<9MAlK|dk)1U1SmgyicHE=j2`o|VWLq*gMF++`Fd-O;)ozm2ntOPA7yhHnkWeM}GFOEUOUzF&zX z@K&Cn|22pju$fJS7dY_7aKkV5QkzS}Q+_~BE;Ky9DbIVF5nK8++Si5cDB} z7hogDDx$%=^Ti5Hd;b@J2MQ9q?brVqJK2iiNxuf@ETr7ZN2XWhx{}#1hnERc;24Cl zwxoQraXWVxU%)0M0{Ixri($4580jXx_l#`!ZmKt}+fg<)o}fZ=K+;PLSA5V}&sv;T zPUx3l5*#tKK^{@365vSHSfjs7g2)oT*Iga|{(+9E#mDFOhrk3$=xMZe%*fm4d}zyU zAn#n;X1vHbyg;i})0?(VFyLN`tn{dTQp%S!r=t~7RmBC=bRtFE-MRV?G0dt8vcUh- zr@vN<1g0c~p-3$gG@Up)tedHK<-Yc~KXNobi;pIm$fYV*JDO1XU^oMGI_|tnlM6 zHQpd3ZnF7UqMT*YSN~JP_85oB={lDD*kUcFpk$@x!~MTPQ-oxwft6jzBfm)VK3$o1 z`%?lXIF2Y7g^V|xiammiA1h{*C8f5oOmGuDAOsOJ-MUg~+PDD?SdDD1*0 z9<#ZvmI$nEKQy##oWJ!3Hxy(^LtlsRV1>xSXjA`9U5vMhJ*K-T12Ln zrU{W>?(qGT$W#0&^6A?%#~3FM{2sLX{n>!ju(N%GSWuj;*53#6wUYNSFV>)LAkxx@ z(`G^X@guezwwCiZL@ZyTm(w}Xzw00HKCo=Lhz(;c`yff$qo5DJ)omU3p{Ki#-{Xpm z{ey?O<9*BN#FJ`(`P>t&Qg$^;HfgFVX*#w!NkFuqOa$EJo*bik@!O^--tl^4G|HCh z>ljgwVMJxI;N#u>jfj6ezRZ*qu@Gp>6jQ+N)`kH;RPn1LxA)GqX$+cVbmNTtuW0Q5 zC4uCZ@B@J4lPhhiP}lgJ@4Y=-?!Hh+Z7TjBp$d4oD$O2}b8bHagR;t!zRE?Xz%f0O z@9{YPFTm~p@~*EbsSgi3_r?I;D5Jq{t4B)KdFe`<=pRWfXKY@2!imi$!MH0Y6->^1 zL1wca7``l}OGlHj`Cd#~;Lz2)>nVj=M@6$ZTEA2wuX%v`_CiPfSMLBkKIThCUMZwbV|6VrE zSL8PYX=UV3h`9dkypZ;1uOao?9tTY(|L*5N`}noP#^_VO)c-A!<3B$iO4L2yHSde& zlY)QJegE}Urha+<5jS55wZ zdGY@CIiOEpL-+~l4E`f{?O$vO|M-!Y_Xn`QwcqSJul*mkzVWgTMwS7at$^ z-xuTma#jBOV*LMSWk@@eC;ul4;D6f!{@MtmppdF*k=E&4~PqWNzURzLfz4a%SQ z4+VDEc-)?;IQq8%y-K>n!EJkymfKP5SUW1!?k>F8x7pQG;m%CWuMvn(UkJL$>l69T zDGy$2clw2z%+~C+D7!2;|M>*MjG+g&*Hx=P2?A1ET8eLhZr=X)Pf37aXG}~jNG-I# z0SO^vfGWS^ItT`i`M_ugcNB5_FGcO&HjaP1o+YzXlMW{c>Z`E}=nGcU)K@-PGnDXM zo8)oNzAQPfMS8d+MI<9Ayk9AnP1f%sdI{{N-)hb;nc;L~aM9h_UVQ(n0_M{S4F=^`=!c$3=6l7$*4)r~nwo6DW_x=*~Bs&hV|u zPAdHw2i(>I0I}`wEpxwefNAeUhy~>p6)_v8s!vrJSKg7R`sql#lfcw-&;mX<9ic7C z2@+**b0}@+K3)5=b^Gjq#cE-F7lz9i-9P9+78YizdV>UkCA`Xmb+)~ zd=|xzhlQu+fx-8>f2ZwLg+7~;#fC&S=W}Mb4FS3%VXQN~!8fo!OOFh+>MN=YUOVzS zAAbT^^k=l!V5Il-gnrC09-uCs2-wx}D+BlK7%(uzjX8s?6rh`o63B5pNZGD{vs#B) zi`cFYc`8vj>ov9AfQX}lz8GBzYQ96zKFTlA5aicaaTxSN#?bFe%9jWU(BS=X{e*k_ z8DEE|W6rF!=gn!H3lJ5J0dPp#?ahIH!MiAlh8D!R^5%i?9}s_Y*EAXLPpA|+F)-v+ zbX-vVO!VSYkF5yr|5GWyOZ6a7C<>rKthq0Gqxcmd{6wPDH>bpeUI`r)SBbKg8jsrK zU6^@IV~dwgeaA8-9qrd*;;xjUJ1W?o><4_kmqETeeWI)Wj$KBz#m#p*w&##$zfGv+ z+bks<*H;t3HN~<$Zd|LCP4b0gjI>UdA{(q%bKJ>(j{taj8l`hq^Rz_x8sAICVijvb z%4`%>mHoMTpN94Cg{vej$9;u&X(59(-qg&UQCHbAR3mjdY@Zf+c14B#O%2^>FMD9+uv$>7wdi7{ zx8FI7_dY^X08{yKV5;5T?G!q4kg8r#y$^#cm9!rTJk6JL~BNg@#d3je`geNDF-x4W*}B8m&^JaF4KT zU+CY9nRf?*wxF<+Wp5WXq<>qjuue1OdOC|hsimlDP>vH1S-m-FF0lJZR1>q)KU z+@J0GRc04yFTPY6NtA4g{J99fGxGWJoC6iL8vj4NAwKh_061IADlR4Drc%WHX z3Sw!tv!Z1BBZo|pa>QLO)|7uQ7H)%es00#PD)ZScb$Az?+`lYF2~k~EgH~*Mf?M9O zWyj537FI&*2iE!@(u~he;A=P5p1F!zs*d%Tv}>xB>qao|P=ga-S{c*2?!Rg(W`=oj9!~Fx774>N8O{ zqG&e??^d5v94t`QG|dgh)s&fS$cMfE6nl@Fb@n2D?F?|y0oqS_KRas~;KeG)4iq87B zcY5+&l`5kYE>!k)uM(x=hA1iw3kbi(#v&5{SSSM>K(VS?laohM*>23D>8$}kS2##xGaQVOQt3{^z8 zX$}6~iW*0!Pm1_$9f;j#CkT!is~?`F*Wp6PWhs{%5~Uhmv$)oI1_S3?4kO8L==&~N zH7>n@-4%V&v<|T#P~dTM<%XcIL|UQGsKp{XiyoD1Kic$3vR6ugzP$l~&jseQlF1C= zny6_d09}uEdUY&7Ije1T{VPgr988I?^yNh*>g(M-bJes;gTbq^pMe)W)X5UTYq=l| zwBE@M;|PLNFY6N^BzHgglxLOA;d0yh=SpNzXioG&K5p5Yot0SS{Fog_k|BLHY~*$9 z+9X}&lkwc6k>uI%5p@TUX)l`3G`)oKT_A4Y#xWLi5DO5cn%I! z**ijlpuHCizbHM3h$*KpnlTewb&SqA9;=~_8SYG24jlO#S#E(rt!SKW#Uqz#-NPqJ-cm4f z+Yl~>P@9QjHAA5hFbj+xrnvW#2Z*L1+xPrn6pl%x(lA2{I<;`i)*G36;)Q{8kth>eU^U0*l1jOg5BrlWb;cGd5mx;uXr1v2(L zcxSEuiiaTgw*$Be{#N2jnqRGUzWORcC0c*7^FJ_B+}3}}|Hi*fS@D9=P|WZb01Ozz z4;DXfjpS~*?mW8@y#6@v0fJkVROGNpR~E}F5GWqHkOSEyjE!paAzizr2;6<**iz2= z)MA@djc!W{1l_imA&p029TmQ??cR~RN6t<;<|{3aiTR-)k|81fpW zxSkic1uFI_{1b|EO~8^@7=Qs!TpuX&GuG_?cW=|BXub)H_&M@@XAB zcm1mEycbMxGO#$qIalceI`kBSA5Ry6@CFC&v0O9LS)>uX*vz7XB>o{|dB?2c*gHU$ zf!TxA=Xd!}l@ya9rn2;++EhYL0WPbdwKjwVKr}n`qNxv95uv?$iDlfX(F*a+>5&>6R(YPb?GWJvd<44i=b&mL~wuVqOB5ZX|5i z!OagZR-TH)cq)Ip>U~ z#(zRYf07bjO#TVk+It6}iJaHfi~8hwkn&8z*n@^bzXU}Bwj*#3kTI2zav8u>IlW3# z^~oL{;>qatMdw>9neaXAkaBR~x}yO=xLH}$&MH7qTkEu61;;t7;$%tkSfXW#JX_OY zMkQ%z$L)=)>=V7B2hWY^~BrF%y8+|PdvM2I45swjgHa; zZk;T>NDy6pF@CtIp1yn@^X@EW>L=CtPvCv^*45t{t*D~Q=@Q$yO4 zOU(dN$%2mK5{*UEa)&%x%^n>*ncTO*>tV_nbHr<$El~iO%?M45Lk|JIrd2fdr7hs{ zv-Je5R&{Q-b2R6fHhrPm6nBRlL&}%G@fId_E&`DX0G+QhYy9{o+4=GJ^=^Qu3RnSg z3Y;{xbD!%#xM#mRkb}2(ze5{%WifY7e((*1es)T4XfKiiawC9gyG35TG!UL&Y&1+a zHme#MyM76)m|V+QEe~0|x4z%9-(dRtwXCb68^E|Ex}prRl2El1AYMO?<*4O+<4ff* z^=h>a1d#eYj{VsJ1R;R*I`}26)F^iT53ufL8w`c{u2Zq6rS^%}2_pFG|JHOu3H%B~ z=G7vnV-e8Np*s}H)PV=UNu83&iA7Pke+9!n{xD1kTq+tz*Jw0nfPGqfIH>&o7NIwL z3x$B4&1K?;I9AN03Yip~=zSW+jH7qC)ZxCs zGPYA*1f%fV+-djf3c3K}`U|oSlEE}lq!mmZjzRDnXa}dQOWPu;r;$Z3%3&_wJJ}y1 z!082&gLrz&DRxu240AU4@&~AXlXl%u=(~f%sR&k16-+h@n`$Bo1SWNo(Y*y>hap`z zs#W$8D^gt_fCaI|B=#0+^_jA!LO;hlBzRB5TKc6{tauEhn|@S3Yh)F{hSVAs%$04( z@@VyS*enwLQMyrTir&3Y=v!5Y*_x}Qsjlk!ADi$Kq%-+I^_%QCaI!uE5$%}mhXn)WWLd_i z0%TopGW-$=W4jt_ao?tQ7MZ($IvLub%6rXtp=M~20PhB78+9+zpnBz&cO+N4Bk$SQex6nV5+HKAj+t!q-uZ8|dC5NX zo9(r(Xyw&z$@arKaCE3O`q^ikB}gXxco^4@fx;nd@e_2-alu*HG3RW+dRrdLRx7QY z)IpP?l+(M$e5y>;5qFj>{RIQn*#)rA%>n02TD2? zm;LKZnkDxs^Ob#tKE+RnxFpg~=BAMdn$9tAl;9?ZwW*0xElb^`m4!&aR@BfW+&N45 z65*>)BHr#zY25n_<+8R>l1zHebRzMkaUNDg_xCjm&q6Ha*T)%ry=E-8*Q2nKf?%|2QZ>{P#zq5qj#k9BNN4&A^}#Eh#5E4Fir#izh(3 zRSs)-0UZhzP=%3(gi0s!JRj6vOu^7qk3V>VPSe{NlB&I1soCsjfcZdQH)R(*!1Y8& zhLw?jb~jHYAo8}Atv2BoBZrp{kXh8R!sdnpa3AKMW7#H zmUCV19wXkt**#^Zg{%8qR+xK=?|J=@FQX6c3o>-wH216Sr^3^2kZse@1Z&Kk*Bwq3 zg6}%jORX&(Jz7s0_750#m=0`1g3~Pn7_&jBh#4zQNEtIl-a{-Gbs#Z9;b*17eu$VS zCTne44VxX9+v4Z6Mncb$qK=gNuGJhzprIU8_bI3EWA!V*P*GJG)I&2XeSfe;eiCvz z6Gv4G4#`;Z!wMGV{WqV19-LOr0tm#%^W=qLLZq@H!f#wx2Dtq*tj8<#F}S7zt@?ly z%ODA6o-4CA4xlcpNfM9XWlCnF>olieIgpi;rhJsjL)G*ED(KOLfgAF2yM-o0VX^?3 z<#(*-=|J7p4`Lh)$wA-ICLp41Cdz&#(n8Id$O`}u^CENkkkALW~&wjJ~Z zC6M`5vdYrs7+N=10ioM z{yBCuJTt&)=6U(UbgWLYShceI$lD%6&H_T3eDCEk9vulJl=|DuBBnr96kVty$Wf9w z@?unlkfvwsOcZapA=rh=7EYZcsCP${J^eSZX>n1MShaKi0iTyNvVLg<{Cb~Hw+oq) zGMk}y%tI}^sjK-$-64h~qvKzUGN|<$jsCVyt;CErWrmwh*Vu=yV&0Jw7<*e-Wzesn?S?Scl60wV%eVA2e95Sl)_YwM=6#({cI(8 z5;ArE>>Cq?sqO~*)}-9+psH9wq@cH)C2Zen7_O+-^k1A1Z`LpPC60s47)CextsFa{ z*kyjxMf8A%Lm-l1uv?16Etz%@?-b%a-XVu;UPs}@uD$l1)u9^l1eEn*4?e2(XJS>D zf1i_hZKD0gwPj8PO})W**{AZ)0VUsJ-eo>l!(yg?(Vy9L?uDwr7WS^OtJZem<;jyi|9G z*ivE#pNFC2Ah=+_zCT4+Ox9C*j(IheJhSVvKS6PT&_*Q|eO2{N((ssJmS6ry7M|Ht zh-yl%!NB(VtLQbjJ`@fjFp8%^6pJ30e*8Qeyq(9Bmq|+3yrXzu`h60>+~C`bFOou zXTASB@LY40a=MPkE$7>dX#RzMvCyHWcm@2wNat?^|012wrHQ(mLx@FkbDJ&PC<`|t ze5ZcJ?P9d;BTjm%ul8~=PrSN=1+&nOdvQl_8FD`+{%Q))qfJw*b%#(a*S|bDxS-M_ z!P>&Z{4flfoN6&uB0yt@wwf$pZ{H_@M6u$+$w<09xL3YwA6mBmV~GKz<@+7Q9BH{2G`r}+<5cwD;_9^_z`mx<1RFETFZPrqVOdIeAgdV= z&O`HqJA~eh#EI>Wo_3O8C}C{bud+E^_IN0djX9;*O2~d&oc71eP8)sEJDLodpV>(jMSd()VhOckE@TcxY5KtqJ?m)J$6{y#Yv1#r z;*~t>tf5Ik^4O>CZ_w^tL9mx_NieWeij4(<=FYS_o$s2fTdM`hBq-;D zrrE$ZQN1&B%JDxT<$mzL_pjQKv&w`#u8n~3h*m}oQ#0nUv7Ho^8qy%*?nWPn&9zUd)T6IX(83$O@BtL7ZijWv(w(iBTPXuu&XUQb5BzuB@5A&Rg zO4yrggigRMYKH&e+C z`t5QDm_n>-->$|U4R~QK3xh}o${OwjDbs#@!G-~oZQBY*^ zZboni;%5AxAnth{JLkR(P7-R-PnvVb1yk=SuTsC2B{5pAbs=~@Gx54_k?CB$KK=r* zsu`{i7IHuUz?DH%M@7<~VZ_%~N2&WH4+x-irO#NB*ey99hDBzTVM6d9JpV?H8}}4z z{ayPttrV>UrZVVu8^>mzZ#<5L(9Gm366YGRrFTw~mPLeE(fh8Yb*ELa+YPPy_%Zg0 zg7isUAu=yq=NHN)CdX}xO@Jx~Fj;QN+S&=^|OuQ@P(f7m$t6btJVQ0fwHj8uB~ zpASgf$hw#syuifo*nXoYetd;30?Z1AVUZwLK|I-3Mm37+&tD5<3oF#wc}Y0zcLzpr zvfRq?tLS6`MrJW9zr$Ub_{7HuvJvk46ZOY+faCAfq+IM0$4@SS1@w`2+br;CN+*wg z{ad}b=cBQd3&#uxU=%fBOQ6j4)|K1r9{_F+OI(i+sNl2@lwRt3VpS^2Q?8M! zeNM{^zj_d(awpru&M5{!g^Vb+RML@A62uu;?n-Zi2+REiQa9Z)2!ek7g;sHtAw2SS z&v~r(7dMzryt4YZHUJ{5k=?iFV#UnUjO@F1*ulB5XuC0J`&8qrF*t7flOUkt=M7+F zCd0gz{O1%g57~)mWT|5YTRXMWT%A1s)AaNQflz(YZ^ReNSfSR*pcE01xR?qv z&Sh;mf6Dy*S*8dL)H;!e18;1Y@3Zfb z{>7UfU=sn|_1Tt}gDANLKsCyr`|*<&|H5h=|ouaOuMq)Vej1dX3iNr=Jn#EF}3yCWVc~j*RLeN z$gDrFPcOnDrA%h8T6f+doLX%%XHu%}Zgu^DJ}{fpK#8$D5w}0Os!Wnbho5YS6m}w0 zk#3JrH?gICB-V#y`vOZZ#{4JaI~_o{>!qpxgpJzgcRcKa(Nz1d~O+FyX4PL zxc6Hfqr!2<-QPWI}Eed~yF?~6tsbYSlBQ;RrFdnqL~ z7UB0ABi*_7EYB~(?=DMO-~@-hg7PJ5)}bXBV9wxI=i5GO`QRiQ2l^_NED64P&hPO) z@#u}>#S*i8-(Vc0RliJj>=r;deDLF%BT#`8u>tC!qNUG4(8Kw7u6d@tg_~xAl}F2t zcz(Qn&xF;UD>{d1iK79)x$1hCO#{NPprfLZ64$u(&#kNO9(JK@mV9P6-vS_0syeD} z717f&ce2w83L4;1=;8WYN@d31S&^6Psmp4OPXh9fH5})Sg;JWg*-+nreLK(dK+|4I zKk=`7mvmwPt=?c!IF@$bSi0P5*1VjNj)D1rY^pqDH@C_gMh*uYqe=8u|NGJ^hoJ4` z-2EYQbn&MW;JyA`t_Nx*DfypI{`AT9dYI2p10;1k0}{Glo%`kQu@t}ON$(qJx8GSR z{H6f;vF4YN>|>{od_#IPLARG1Z$vI{vCiAJ1C7|_Y`1vV?sFh?2u*fJmGH9RbbI~) z8k(}s1pvz`0(IIU*t+&o$COepDGlK7*SMtJ*P2%yURkFqo}J!YF5r$ru2ZDnOMpGL zJrF9ze~W}iu2Uy?F|Hie2xxXE*7D`v-G*mKER;sLy1wWhk)c>j8%R zHi&w+$Weplg>W!E6LO`KCfV5<-(4TD$4C1>{TcEp2+pUhd7_E!ssYr>N7DaslhQ-= znMhh|wwUZ^e?_)TK3pbs#(_TbT|=cb%4nYNyU=_rG>3~l%l4cWw~y0oO4AI(*@iSg>N4=hPOi6?ZZ>jGMP-xU`Hgw&5pJ9rVj zBMmhh@Dkt~d<$irLv(C7?`-ym)1guTJhFoNKgEDvMtAH(qF9=?9^-UvZ7ea< z1T!+iJ>C%e>1VVSkVL{zb4oYB#F7jpCk^IP20&=Kps4xqO%Ghr$T>wf$Sg>Q&jDe* zG|b=S03{`v?43)`zNMu;+9B@2-Kf-dD=_E&^?IV^m^*@--C5pIT;|8z7`WdKpWtvM zj9e4fv}8(#bKhtuSsedDFAxumavuueG@n`l;$kDptEb$Tl!Q8xoE;UeeQeU9b|dfT z*ZDBk%N`mgeml*mQ5=R`v;C~niLU|K22++DY@8{@{?{SLw}YXVAnk1R%aP#bRCys+ zXkwE`BWAbLel*J!P9yNK;G%ok`N}i0Z#=gK*&OvJ)2HHA2+YP=z48kHBVDy|^&IKo zgleI+$!C{ka1J`AJ%R_msePuCTu^34)`b=bFSMNBGXFDy*4e z7C5BGP%hr817DY2DTz7^CT2LhB)6%OGB|2N1W)dS9KyF>ewh0VJN0t_ukGa2J)t-5 zwEFMwV89^jj!4`pVR(uUBp~E)VB=w(m_OB5P?5Me*mQOVX4Rr4`<)neM+Y#D6>2^m zT~3$ z>IGQRi%Aw`87$&mPfW_7f{(GP+on>~{6Y5?tkB;I?A>z3(W)Fvao7 z!eYHVXsYRpPl!pGxxqa0m5QHqC$<{>?3bNiS4OoEVMiCI3Hf4OpOIqfBu|-O%J&~P z++xjtf_>%|sPfC^K<6>%7W5$rw*|i8#mIw%0n4JZHQ(mmmjPLmauKkKs-Bfz4po(z$u^kJI-D-J z9^}_9dcaijz2iO_eG<8KTI56WwgU(8TR-#I&`rG!05?&oj~*xg9daASKE(9D0B-3Tg!unlKaY!6-fU~@sFX{8Gnfv_TU+LWlRE&lC@v-V z_Tyh~s()39NM?mfy|{3ca=DQ1OlkySevDx=wIoU^HImP+Tx(%(Z;Vs(Ap%wM$!Zd} zhsUu}YeJy!(o$b>E6jEMtQdHB-+8I+wg3nwa;<2T>_V#6-Bl(z(E-ImQp5yjr++HcPuXCyqKy=SJ7dQ%5pIe%AR3(`{b~j-I}4*7t03J~4Q-JnS<9EHYWS#%^-$@Hskh zIw6o*HNHNe_S^B)Yu@ZK52VVYH~mTKSEu%hK+eFG&{C@v0}oU6{JNTWDxI3jQ*Iu7 zcM&FYc{TJb%*1=Ac?b2ya^x>%IkEj-&!vxGt#iqrn<9G8?Dh{9qS+?(0dI7zoEarN z&5DWB?OmteRg+;>d_}EBQ5!=3f3>?B|PUrLD3AnM#&UcUk@Y;N1 z0kHDh$>5mw)tKwHmZx3FnEQ7ZdOa5ek{>hA0-m zX{C^?kl@y#E{}pJ1F(BC(VL-d-))=sK29invXONpMbuV5>2504gxvm5nk!n2!%lH| z#L(vJP|%6DrKG(`8VK$h4~a!u<4m~O#(e`D0yoQJgCm{c@US7kaW^XTW-2`v34D4Y zw%YbHQ}&Vjd_8 zV#%-d)fP+?>n{IX<;*Vo24V^nUswG&4?AshOntn(0FueGo^YEOIMl;-<}T)U_GTRw zfMJe1k-%LNG?-&~=0bf@aDp=EhVV{rlD951_HESJK-aqzPlha9c!I^^MLQ5|e3UHQ z%~dLPnNMq1`SqvLkYrt z;GGr_gyMRDf=9-HHX=B(6Gv~lUSV6=2>v*_*2;Q_#3x99>%Hazd00^hEIJrrm~E&< zA`{|W8(l;ZyGc=uyn%<6uX*KBWh%3X$fOkA$FzNTT3m|m<(0H2Gcg~;Qt z)3$+AD-Ivl!pb!oc{F?NB-_zMYrdK}2JAUCa+wXa{czz>;eB}DG*P=-S1uh)Cuf3) z)|$^(Fy0px+7Wu*3lO0%JW&|i4>bv7v+?d%>tACXt-mR|k9Ht>B-z;@k^5%3LbcoKNoOb4F! z3s8!@s^THRpNV>UXx9F%x(?P*U-MPOY)ZS`KZoKDVy=%;ZwY<%2tF=UuAp*8&pb=O zp3%H|BzV44`u6*2mGRJrwZ2G}$3kS#$iFwc_*uD zgS>KvH(wcJoZx>LUHd`4Fz*`7JI@Z#klpbpS+!$}wQ;TpTFx;X{O971%Z%{*!c zvniQ&;z74QDrguc$RYDJGZ;`nX`a=+nnS4iFTjC)eXm-0!~y@e~)g?^RQj=fzPvRi!!%6eMO#nHC65` z`p{1eNBfCh$c<+QIxMk>qxQ^cZg;v;2u31JW7*6tDG}l%MRiGv_}F!l+;{o?s%nv{ z9IEg1)pVNk<->q-km>IEvcEHvkzjR<9T39N!i9Mh!uWGa_y<_fpS`7)hY%|DbuL`L zeI>63wZo5*r)ov4$ry&oOH^qySAKM{C1;VfND5X{39Ig|U?Fep(!WxFno*G~!mEwq z{84RefF*bhpgwx4Cl{9HVqFLDlF9`6EDD9L)Egeb>-^Kj{CBR}UWwx%1o#8O3C-{@ zcWSlIl(Muj;8I*VKiJ391!$NrY==Jn^nUfUSzx24F$JJO^QQIOJC7~r8$LI3a#xn1LoNSJI}k^1WQwFOt#Je+!4z+BY8 zgfwrPjyH^hJ9~7WtFv)t;#5pOObT)2tPLJ zra5!&6Y_~nc9}F}`o;~iS9Txi8XX=_XbWel70f7~L>9cVEEJC4Y+()myIBdJUOV`X zw)q~w942EB@$#2X&#~K_rcsF1Gs5RPDp;-o&P0X(SA>tfKxV_7?e)u${IXcEFUPp9s1o3FQMbrzh3~m{KW9NMeAV)M#sn-_8U<5jExv# ztvrFh`=o;1SO>dLC%%O6S~{H#P|}*cM;#mNe8szIc>MN7%E70J$+S>OmG0w+o2$jr z${N#2rNI8l?>ZIcdmJ1^K3Hu2n9p#Ly~LqEKSa-((lY33tfdf7#Q2#c%CsN2*N-Nw zsvvZrdQl7Hu)es_4J6`NYowZcWIi@&tIKDp{PN^sY0E= z*gTcOAZ9J9tn<|u?|5-v9xzEQZPBXpJ&uhEz|FL%3v=Cptey zzgCnR|6}Qe!edZA(=OaN5ULlpwK=F3xDi(t~2tW$jWfQWx%<6rW$VJPSuCk{|AUz z2(N2!58X6e_qbimbyRX&0e~Q^kv0i|9}8~&IA+zS=CcbS@U5>hc0LacO%Ni5O2rBX zz9QqO>qHPL1KhAg-qr~S3e!SisYOY#JGkjHCw{ne;zdZ;?{AF$psxfpj>i6^i}xyc z;3y!d8-4S26ALlskf&B#@?BJYG0@xh9E%qa$wUKw5^LC{YtB;aV1G;&VA;Ld;#Qka zISTeZ+V8N{1R~*6$W`IvsR6FA6U`2ridM(y#31d3{Cc2UM$v3`DL2Ffc8Rxxi`)V6 zhf`&ii<+qDGdl1-8GzTHz7dB3~Wt~#|r!EsC9M5YpgsOzx8J| z+?Jx0mG6K%AL9t~@8?l3-XSgXsw+sexZ+cCo78X&-G{C8fej@p5Eh!4#)PtZFTf?z zTey3P`{yCBUnmnap>FP~bW&fL{2VmKiE^0 z-R@90&aJueYS%wUI2J2Y4Cl6Nr0i?v>U1aDjRbPKoe#%vHQ_(|q7|(#iwPqc_|-e) z_i?1A6xDIa>MCT32fLy3Eje^|4@l=Qz!2}|oO{nb z=iJ}D@BQoj>s`y`V$ICn`+4^B?f884>+XB7pam-sGlb*S6@d^HzBofr?4PI3W#+8M zWdW!T)0l74VIHsLJF_;=R9<`UkcY~GD)sYcI^MXrn=csH3qH1vO#>6{E7DLIjkW42?zp^3S#-}b{bNjm#NJiS0mTZ}GTvJ9)Q&VO>%_4Ox?)^$V#jrG} z;|+tk;!E^U(t2|iI0=C2S79>+fp@ICeAn27*N#(3MRl}_hLB~N^~8;-4DvLc^NE>F*MwLdjzSv}I=vwwTmKQ3mQpQ}%Uy zA2l&tg&E#Dy|U}t@#EP*A;=#gYML=juCKf^b$L7kl(n9-HwkOVMEP(sKj}#W%2!vr zujVuKIF~fmQj@@~r3)Vc>1vt_BsoLR#zS{MMazAE%BDU?hgj*_T`?Vf01Z37WxGyY|(E|tNJJJ3RQ3f7*rX=$y>Pw zw0_cn6deqS5uPm2t5IMt2$3=%wqaEIoGG=jt{gM{<=#L>Izd#ijVIlEg8MDi@0Oxl zBD`6{MS!Yq4O!=nVKwEC)svzHhtfst(6{f6&^zc~6aw>`9Q#l6^rLNUaqxrpO&_sl z{!X8sH}u)LZBo#81%xKCI>ZR=YI7qz`U}h0VctY?pZ%zfMt4pG^$X%`F@1J%HZHTv zV3o+&KKq3DkC)X!aJN7su$~NjrF+Y{>`WdYbyHkQK{*u>o>@rO))ielkA`*eXR&fn z6Hm-7#S+VrYztzKL8D;lhyc?gx1{ePU z1}T6*lI>nd8K&d?OZ@X-hAbca1fl1cGe^-frzX5qF=77WzKVRR+0}(%rb%aX^+!Yu zpnx(36=LsF<1>62xoa(l35&UXFHP)XpU$k{IJ}IsOnxS_`#R3d%$mpK__+lhbrrmf zHB$zhPM^eVqlZ6U@Yz|&qF-q3&?C$fs9&^3JeCBBk)oC115+iwBM+zpgU^n9og*)| zM{^v$PjJEzAhpLQm37Eeq0kxa*`ipK63aO9Ddy*pU?MY0&ooVY&1MqC&H6k!DDVyqLd zoAjl0+EWeu0(?!QZz-${Z&~!uqa3#_)`4Q6PY`&InIg6#_(L?c_i@aTM2J);6ookfL^w>8!eSuol$nN9LrBx1e0)hGTl{&nwSWDVeE*$212D zc7bNc$}+iTOc3VDvSItWg7+S-!Tdje;Gfe0kr;ebDh>WF^|#UuzILXN^*@_|%Z!WK zAerv{$>3v`cQ~*|j=u(%c)#1P)Azoi)L^+ZX(Z3U5&=eY)3P^Fh*n~-%Xp*A5oJ5) zZ>TR}KxLq8kx`gDfPYsh6gBap{_~u>1*121fOY+N&na*7Q|&-(lAaKqoXxIB*S7QZ zI^JBaF}_z=5khXtA0yQQi$+ek-bBrv_Sn#4|7dh$6LhWxCjir~7=bDpkq?QAHAO2t zmUAw5?q>u6RaU>ApC)nc*a8~oaB3ygAlCTB)Hmyi-F{G%h>KO>*7rsr3+-GlxI~Uc z3I|p8p?j)hECYkl0^VFaQI}gg>)<=tp0a-yrFS&?boc0P+Wai-uk%`1lq|>qKyZFs zXS?&@;8zB|rM-^IPhX+^#gGzxuAu;HmO88hezMyofofIphue`B-&1F-i%?O{h73_c zsRF#~bhZ&&)`00KtB?*p0A@htyseQC@tTT2 zY!Csc(k3w{mD`ck)BIa#SOtQsjDXq%NaaEMlCR^3vlOlQnuhddtBVrB!oZ|Kpd@N& z>+y=oR=O_~er}IP{jAud9aqE4%g;Jt$Tg))uq@GKPmgN7AZQo&fYO zh7_Im2jsVA>ZYxN)ZJL7wAN}NYQd#n)hlEO6j=4Hs%rz0Nbnmx+G^d3J48t&O=d9C zKEn4{z&zUVvEm6Fpg6Psik!H>=SKC!Qx4S35x*Hc+i~KvKu)Ad$Rn6;QR9d-Zc&f^BWi(o~bJj$7k>F;SDMh3lKVNPs)%F6_Vx3 z3Hg?H=Ng<1@$F7mgm68lzRe8nV|vxh?EI`yDin|s@d>gj1WVzUxKEi_F!T*)>t1C0 zNu%yQk?LIT}Ec&e=h zJxnXY%~pA>m~T6DPqECrVNDM^c8iW|;^&W|889V-=Zb3(eF{xS+weH%H;_8k?n=g8 z)LA(4%te(m@r8+1<%{_*TU(6@Zh>a2K}*fY8|(t@&7SMsjOm%Ux=h$msWzc*J7%Pm z10$WQ9F4^7m_RjlX81&WPKx%+OcZ2dA+?{PZloqEBHCs+*~GPWR#Zg}bpPekNMy%R zYz3qq*?;^rdIIwcSGbCUFq$=Ki=e33J)lU5eg6B7WsCi#2^P!;)*ZERNUmC~+ls+5 zj^>Dg6zs3eY7(-orPKhaRFN8a|GAh}j>h43WzH{IP%sIbo-z5*q#{Vh)&{3nuHrq( zepd|sX7^gmFioTLmP_Oq8~S)6mxWYH;9GFr6Z=fF)^}{W#SQx{vHib>qS|d8Y$k-} z?L|FPC*C5Q(iNp1Ri^t(9cJ8@n0WXS(d;i-{*VJHl4K$*))tJXX7ISoGlCK4mJ%-(~q1Gt-n)lzrdU9=~Gl5PKVY6bX2I#CNAA#Q z^a01!aGk5+Wy9;THQK{L$u<#2luVEPvMpPv5!0l~6JI4J8whQ&gu!F*8q_p%YMd^n`lWO;WGZ z`?H_x(2Gurf`aBHyL5l_0ERb#YSq;;k51Ou54!_+Jet;}owFu_oDn_-sDW8+7DeNx zjVE)yrDt^UJm))8HSoHcU+z=o}npaIUNrX_NsmmD3a-YlGTh;U}|+%7?& z+x=KnCx6e;Awpt4qndPV^YRX*%921mm;2$-?JBrHO$X%u!V~sCl?~mP!~`^GN`?9r zR`CW*xl*A-R`MlQ0rC^MxW)Nw`n8%${;aQ^{e-6&SWm077BL z%zcNvWjd3u)4 zT+i^Euor(!$gvFZA&A4%Y6>X2(VTFp>BqEFz;a@S*R{OVTRE9htsD~BiXFFx7M+26 z2RylDgl!j&P0I3RJh(>oe-2X1&Pf6w9d!uim8+bVc3nNCkfc&d{BLiqR z-2WdxIwbcGqejVzUJhhcrF$xBKI%udaN5diKW1o2bfu7>OiYcxcOkZi}kk|^rotMyM8@Y7GofKC7S3%mp zoUMOirC5r8(fD2mUr%3G1YEw2jpumN?_d88($uAY`#_i1bF9*6hl#UpWy;_?M1;*! zd-8-5_wtmI?Bt-%MROJNMrV5fxD01Phyu9;NV|9T96-d7{QC?^pgvHr#h z|MB(GZ~+(e_qwd>|2a?muYLX-Y(})7J%AqMonid)ztY>EGJah*5MEqmbM6y=Ug&@O zdolS|kl8}c=jeZ9k^ev$oHsyZy*9uc+dr9c|GxA;ee(Yw#s9`B|DPNM0TC1@=NRR^ zZuCDB6^%dc6Y(%Ou9eL^)Bk$rPhWm%t`~!jXbBKPdL{q2{fplwdkLUM1#@%T$NsO> zNJI~Cts12FZ(8}EQuE&_pA1;o38m*RY$3_V$yM_ITv>D(Iw_k@0XwM31(DLgQ6%%v zFaEDPq21sEa=aq_(?f#+E|LrY|7Sk+uU7%~W?(Ndp8n6h(b(Q-+L>&l_Wvj{{Lq(g z#FEMzr9U-h`rODJ#g7~R<*MOSfJy&iN74-3;3r!oN0Qv&fc}Sn?+CvXZ{Kc}{MQdK zZ&10u$dyw3>!ZJ4>^}>LT7fFC-^3qx?y9DE#3h>~g(XP8|ar?~mnV;s94|3=`#J z{AY?5F9j}RRE$3Rmw>0a4O~Xp%<*5KBxwe2HnyG=PV%pfG2c8t9%?M|uRHylbTtMp zqX;7y_+w+y;sH0$$Bdl%ZwDO-oKLwmk=Abjs5d{PX(9lgZ>akTDe+%u*eG$}PZB!w z18|oI{$?fr&M$v_B5*^$k#y^K|JVa(5O9@}rRDr@2)+NO9$WRj?T-xuphx0aXI#5Z zDC^87c121p_8Nudi{E_>P%oD=dahRgA&yN=53um!OpqlO15{Hbjv?Fg4d9dAxnz?G zc+fw#PoseaOox-ZEMR}q9!8ST8ia%Kk(MzKo?3U(wZ9IS!(7rWU%*W4tBTMyS* zXUq`izNuT^I0=l6cEHu|Xw`_zjx_!JdjMFiiZPzRL%bf3_P`$`uxB_)L~9&_JHVi( z_;@6nzB!>;fkw8|Msq>-?E(&*-^-3ZD7;xQ$9}DbmNg{bJIzdkGDB4s)M`nGJqw#E zHM^5*^dk_Cr+}%Sjjl#~=y;tMOk}|H42^-r=%5r)YBr;y?*zxr%1j|D3u9K?xXr<8 zdLwIq5uV%Hg#<1+SQ5#jre7+^U;5fBnV*xB?cTg`O>FsZbuvad%(#`quApCrC}RdINGCDQGsdh6jmv#feMlV2H+?j*ft zj*oewpwzEwR7I2sxcw7^pPA#?=}F=ckm#05&B9kD0mhH6as6qw+Dd;X%TJ7e@N*9q z(7#>&Q(vg^IbRow{uvCe3Hbl&23h9}n2E#Jg5ro5Mhz6E9MB%=u}_Q`+C?<&j=TK{VsY_ zkiTg*_e6ko>!&=54dzR1!wS95AaJ1ll?W*29H8hcztWC|!ay2lMOj-;LexHTW*>Xp zkJfv1W5v@LZaJI{&|4!!C|usr7uKYF2gk<$A@+Yet$Jmek&fiqf_O{7=D+44k2~m@ zjy_NjhQhG0Zf?3q^0lQ}>Cq^6w5>ypIYPldzs4p%UNN4DK1(61qc0;@tm+?omIVY( zf64uG1;81OpA&LLH?E)nT1a^8NG}>>7VNT5nye=BiMo_uH=<`VO41r`D$gkf zxZ9lQ;bWYlvj=Dc|7L0kDAUAtCgW}?kwRPEO#M)vX02R!(FxQJ%;=uwNan58@x=GN ze_$ELli+JuuA6?p)yjRRHX2p-cl_rw3aalx@4_#;1`#L^MDzNs!O8r)tHP?+O7iS}wEHaQfdRVBrM_V8DWBnzb zoBUrjtiKXN^u~&E_70ams3QsH`-~13U2Me;h=P;z+LKfVp5*Bz2Y0K2O)A2jvzX(Q z?QAWvVP_lNiw(a$fD}-rIxL6F2DMl)>+w7u)$vQ%)x_FPkBJglu6l`+derM|Uyi4D zqY{CA+@^0vs(sXKmQ=0eUCH7H?C?e!fF;zvBPMi`Yo-Uk^9+ZM5%2j zn?R<=-RIz1xAM}Xoti$dOV=k7@=Vi%Z;&@*tFtd&g4jUCUo;E#o-Z6O|7Na;Gu*tf zDzM00AiNzm4ujeXO89>{KDwp-R#uysn)U7!p;fb+SFjnD{Gr=;BF)&~|GO_7DHQRXx*T zDaA1kJ zlRksbSY*8$-9FuHnG<>=lID&#m{`iLrHusauS-Fmz$5t3?!T!H-T(YoF;DXybKmo+ zR1(GV3cz_ShB@xQy%;g0f3uZn(g6cKrHcG>E5xHReZ!ZYxftWMiif&!!Og2B0+xKJ zZe3FNmqUwgy1C(K+A0Ax1~v?<5u^9LjFEtZU+-`7@+{@I#Wg>Gj+9+0U5DM=7~dL@ z-UMR)U*@e33=;)@R|kLm&zX9wN%p1cErFk%{;iMz`};><)Icf(G-Wp?sJQ5j*0AT} z2!bEmh>ctWxc+H7ZMwi$KUHaRkh&Mz=J)@0o=S|(dohE=;`a|6x?UTd)$RGlS#+on z*=Ug2*)8Y_=J-n7yLrO>PfV~I1Hk`65|Ey5;N6)jL%@z=^c-`!zPemuBo=0R^~LW2 zQ_d9$)3du{yj{`hLGX(~Uj;Tth{SQP$U1HPRiVB6x5rUT8_hjScf(G^M?Hwb6S3E^)urFfgd4hl-x*JLdzf zWk}4Epxbp)KBUki$!lgEw2tGIPppi^7?F2@y#tnfV-#oQhWpB2Pi71KRD{UtHsnK; zWFZzzH7YeHKc3O*D{}3mfT9ICKC?GY+DqU^i=NyvVBhE^_LVx(Hr}L1DThXz;pUaN z_%G4^CN=zS{FGyw?db|x@bmSbCTLsmZ_+pzklYI(tabIUzQ*hNa<*7B0d0LZjGicc z^Gdrkpm?K`9vo@Z#(HyyrwRbCNp-)V{+kdgMIT0Z{tRbcx>6A?@mZt_otq6J0I(2QuLLeOKl65x<1jaEg*iA4mN*395iuc)dD!t+#uxn{X z7HWW11O!%@092rYL*yqj_4S;#vFWUq8ff36>UuSzSLmEKnxRK1z5-$wXxw8ZFvXL= z1x%7aNo~HrOPK!Sk2kDhC%kYIDi*(b0?7F*E7f_tzr5pB`r8{PBF14o1g{7e&mPfI zqQBNBq`q!{j34Gauh*)wZa%UkCdyz-0B#zP)Ww9A++#xvY=vAzwO1d+n%19Nv?^a? zbrU2I_L3gRnDSa`lw2MawI6QTbJ(cxD$Rz^z)q>x znF85Gvx#7>anK=Wv9D|F(~X_0OO)AhEV0wfgiv;5il<6OV2{-0M8N_LkT{JaFSLff zBI}j9oeQW_VDbuxIhhx))k>fV2eu2BFh;Z!DcigHDWjg2A!b@4oFl&UggR3?`s@#e zpvP7cu+{DhTaNDIIx;Pv%ppZTA?L5<(#7_htsoPWluxpi+e0l!EPA5E`bVVGU8dzB z?Pd?ZFLPRItmO=;Zix~wIVp!R!%xpU?-}rwGftB%O?o(I!JYf=G~L7L^+bmN)8>(w zB^`fZNb%dbPoh;8=wbp#&;y^O8QHQZLu)4m-e86D;;a7n}lhLmP)DLexEZMFS3nB41RsSr-Kf z<+rzIs#JG3hrtQ&l;-H*C*bL)nz{q?lB)~80FIdVz8@7SBW^ zqbT|0(>%-rBJ;d|8gasFN{rSuya%hUU%4L@xE`A&89FB(|C$#VOXwXt9BA5S(rGfv zC~W(J@`Rl!4`9`c;)`5VD|sK7>F?GkKZXsqc>lki5ZeM>3AzSdaeyQfOTv3V9+6uL zvy8is%o}VNmvFihPo2Y~w^}*v*rsopzq<3u052L76x(@Ag-?63_+{f!p{Em%NodcS zK{Uo~uF9w=k7utO+}Hc{4<~;)l%DQotwGeYAoQ$Bq_3W|^t{e98~ef2U&Cpppopxq zPxwdz6LLr_jn!!7%#SgO4?{KC zhB(g+h8Rwyw~|E!){HPrSQIT{In-sRQ;>B$mqn)fAdJ_M3m!8-%}9#F`SZiBwzhAz zHs6c46YMxPp0QxG#bf~r_xpnE!9?`m75E{uztbsID1F?tiK+}cuCCtwBuN$#&T;pn zdTE7+VTs+;d*kdOdJmx{czQCoAF@Y(>3K21;|Ex5yx%y{m)-6hzSO(g9&ugid;Fro zmJ<9!Q@-Z4&Qod^P+##efq<+Sj~XwN1Tig{;&^eu(WHb57R=SDRy+22_V{ZbH#c8Z z1p+@KLcKyg9iR|hMjfOQM{xv~28^;x02xtJv`PMs&LPrxt~W~|0b167m2Lx6#fx5X z!0K8Bl#_Z+rAMkKoog<=7lyroiBpmEMaDC-#sTHA-D>`o%v_F(mv?%M^aHs-a%MQ7V*i*lUcq#r{7UY@&Yr1O^ii}26)fK?hT)V-2 zy#wnF8<5n@{ES78&Gbc)$}n=C4bqJES~%8ZuEl_fHBdHCjSiz7ell7%C`_6-(h3$uUyzhOV z(IkNA@hCEm8VJw|EH|_Hk!>z`i)jm%+f#xPpFMjG@AzS=(iCMp_Sb4EK=*Qnh}-!@ zO}6GF+=XP8JdYjt&?ix@HArC`5dyrR-RD}hpmk*dpDG`L%!p!&4cgGV_wWf{=KBYM z;HP)3hsl~Zp0JQVob3MCDwA@*m7aHeyuR$n`?1d1A^hpMZgo-pIrK&kr(Q+QOe^Gm zqQ|wfz%e?kqh9M)J@SbGIxaR&A7f~ZXboDVxTq_=czo7$bs~EPV78l$ZB~>j>-5BH zYt<0wkf*r+n0IA6S-8JzGJULV*4dv5aY~(r^vEON>t{{fv*|uV2v+s-58~XeA!66M zEQVfZsSsp?%#EHL%cG_Vq1M&d{816atkt!40o=Ya((XxRS`LJyTn~`KUWrseLG>YH5RiniZEJ= zDt4Z6IeR^ZKhR?MgOXYM2e2nC#iWa(uMgNLyzz9}4hEX8pw1uZ#@WSI7w^Pt`w32S ziJ#PAN44-gys81!m3G#~y4hKe<}j>t+dYH#K(SPNGi=b+=~<#aAJqx-uSeNTC_epW zJwlfS9?2l;e#{j`tvz{4$s>T`);XR?udSb0I2Yff1dHWwZ;YfR*tsrbt*|v*Fp=?e zEC`l8fVOc6Vq}Z0O05n?io|dvDkfcN`w&96#zho5L_~Uyys4s!F938OnCG4^3k=Gx zy@x_rU9K_d7K>ijJ3hgl0Qv>2Bj5`@hBQENdqwXxoNubaI^jI|%Q?LTPxOe2hERT|A!eB(n=M3(8b`e&z3M*c)vj=~VF9yuthHlkTxNg6IXl2~{aqSv+ zs&C@HH{bAaXkt@zROH!!=*;VDbcE<;R~tOwXn^E6Z^!RgRan5lWe-dFSIrkoVuABt z+j}RSvSKTlfc+*Su4gx_II(!IB>5bR2Y5=Y_*+2X+T>t-2N80%>Wj_PlZ%Ul_*+%xzt zyfW{zF3D3Y)OnN=O2PdlMDrEy4k}_5Dm+xVufaRLky{d*y52F04=%3LP!@D^8Rnjg z^kHC`JlomIU05GuuEOLoE`y~-Nrqu%Q#w6vmj$SJQ~vKl};*ROMd1$MLl=q5f= zc9^Q9+P`Dx!oH=Fb+xY;r3vP(`D8u@!-> z_P%Trw`D2=3o)@_Y&6%DNmtRxsnK>1#0pVO(!zba{8GSr8+Yz?pE0iKSF0E^{hBY@ z%F0(2bF!nGnAtxLbkY!30PF>qq&Jnpp3t9B+THb%%^V?InQJ|ak)@)Q3^pq$vb6}5 zJ6qiMpSogb5Jj<)alETm@n}8hw3=8zo>0BDh)1sVek#3I);e?)TO6zGL#XGx`dSQm zx4$y$KAd6E^x={*v*xQA&c^70)^>2vTOgCLf*~XPguUS+DHg-I`f}W*{{Sdi$^u7; z9=1gtrSN-x4>|1JM=mIuZHBRL{c=p^hJJmInwCGlN~+k3>-1wQl~QI3e{c_fn^k!% z6HQ2OXxoF{JOq6SI>D2`mTFg}x&>5@GQe6NzC`{!Pz}tQs_?Ahe(TgoJb_57TTRL< z8tvpw|DNt9WnO>rqt|`io3WsQ7}i&p$~*53B3U6HL@z?abnM?7r-~C#qv<*Jw9SNV zAk~DOO?3YjzqnAQ;cJxLw&y6?D|%aNm|q`S324P$fH@90kX5#s9+If~8t=RhY)vNm zjOsNp%a@QYXCtyVw}x2nCx>*-eyKz2b$1m70C<&27M%#8u{z+XK5VnpcQ3syT{TW5 zR9vS)oozD0g{&`Q?!VlOU`l>9r}F3^Uf&cX+csHX5}L+<^>>x@yqQ)N-2KE=FurS9;_ko@uk$7XL#vesM8IrK+}3_1p&AD)OXu|Q zL8AGel=wk9-|aQuj}jaVX%&Asm&JRu(x{x{c^m`jEU@kUS{B4}_2p|+U{(t4Qcv-95G4A0Es#U1=eD7|7rqG^(&G6Jkqm?jZReQ^)5_jPOM>8i>je@!fy(L?!_ zJ7T0T$N3~y%IUeBy>E&WX~VgSt^-0=3)xxqsDAMItZcgP_xz=LMuMrt=PWbar6=!q z@y+758y+4bE?nL@Akl_C9-*1rBMGS;&7^u=^?An{ihV=Nef z&>l7SOPcu(>?)_X%sckjm*d7+JnObbHNl5#MpBB7^9>?Ed*bSu{J`72M{6mpEXn-H zaZFe>HReQ7Z_SdjPr=~!Yi|3-^18Fa;%YgJsyUC?+0nJ&;*rXCg1@fj zAwDB4OQg_7V#cP$%uYldlPdPFblPSQRPcCJ*EeZjALsd-KA#~3*TH-x63Mh1o(B}uhs zUf1x*&K^ zz-(Hn^W&ocy#YH{L1{YQ1vEN}Es|>2Mm2cVgl8qIhbn(#TDkqqaJPNf*AIbR6X(IC z0ZzLd>vK`2YlEy5fPH}+I^b8 zq#9gbOsYH5-)UnRoKHIPujdtd8ayht`-mRfplIZ|V}c<0=b372_fR!_ys-A}mC^wI`7gY|$nCkT*VKbeK@=c@%M( zK`N!W$P8+}5OQYr(5y%=@DUiqE59gsmPD$p0CZ=q&g}Wb=_sIA78KDMNdDvnKwg8} zd3*e%pIX_gb^PZ(s__(p^WJ3%5-ktYViGBVCABt&P1zh~G z&Ubgc=kRvg0_TMGs&!Pvc1#g`m9<_7ge&LORCnYXaH*;M+2Y)UCN;}b+zbLRe?^cP z$31Iewwlx18;*um#q};=T1vf_hVJe3ki%}qu6uBA^Yd`+RUo{@DgCPe3<~C-D{)j6 zSsH&DSg{W@+l&%F5YyOgv#950R$BD68`k)NuK{Ls9dJpz+~gh$`CiSbfz&`Tq!+~ED=H9e*(flE;Zctf$x-;`XvG!0{u?Digi(H{Fz#2Ui;A+vqy z==(K-V>eJH3g=wVM_{$Kc-u#A7C(wST!0!YlhR%+Wf!vT6uM1*Up-a5~g$TBOgt&C2)W7Mb$b%K0J1DIYGBw@hjQUy)RihtH%$cjD(DL!g( z8Nqo$utn=zI6gg7>`>;kf-W9j4C-Qp=wG)*HAQc*2kF^xD~yw1_FIYk`-%k56RK^@$tY3b!J1%iZ}y4(XLsyM6`{|*HW0k=hTOZUl{CSD z;p39gsD}G!-k}gO`k3N5e>aVTno>YdMwDMEz3nj!WG$ct6geqB8@eJmH)Ko$848vU zK(e;HgZ3w-iXIv$jK^&F!r?qnqmu9nciK;YDJ;_8Cm|KMWkhTfgMS(Md?147dmKg> z9AZ{Rp0c75=XZ8$f*%|Tpb_oHLHXt>*3{2d@aNw#XhBbL%VjBIMJuNwn!S$GTAezL>v(0yuW8BEWqa)d zj3ATR=UuVNZGVr}%;yT*R}$dildTbbnp-nVE2^Rip0#F(WE*R}%(5U2Iu=O+szQc_ z&$o=cyS|7TAwp2Q49Z2(whDUEIiRogO5R5Gx(1o`-PsJmI|=^p+X`R_4T&e_xLVm(w z=U4qMNjdQLv-57#_HyM#a0O>Rl(4XA*J@*SgEy8t9RJDO1;IqFqG|&#_sIc^*YVZX zq_R#i2I)IOq^nsuPF>UUoxP5?XX*}YK;nyOvxD=G`l8=68HarSDd$urO$+snO}%_c z6R!IC^%CW;1}Orl@WY&Cl47-k?$9@P@xq*4i_=Z2qwFWv1YL=E z!}dZc(tR=2Nt4!msA+%5>+-~75fpM6h9 zov_9VRKpG6GJR?-0XMi-kg(m{B2?D4?Ivq77orb~ZrP;$A z*$``Hu#^-rlu!VVy6-^+=>z49Uutx#qZ%s1A$CAZn~p|&=jR^`xnHYMKCua4|cf9wm7852>TciR68{Ar#Ppr)n&sS#8dNz%7LtQH04d7avfLR(;Yv6M^ zvqw*wJ-F2;0S7SLYunQMJbXlDc(;@;E2#McPZWU6P?T>tVcp_c2qCv@mBc@G9LFqg zm#bcCJgQ`Q7{wq`c{1Ho8K0NRFx|+?;;B=bRIeK4X`mAuxOu)<+}Ow(rF3N)2sfQ+ zHQykGMzr&9T7n0YJd#=Og$udvb&?As!&2=*i-)S+GSXH;qG{W<41q0W6-`fIz1nTe&Nw%hs*5=W zdL0-J%Q_BYO{g@+!!Dh>>8L%`b>39&*Y|uuYcSg$F*&YTAN?5;9AS)O@&GEL)-gxT z>CINhT@=~qJwFf3#=7bL+NW{w&SS|l2Gyyg^Q!gyaJAnj@UtSqMJ3qUlAzBdJ6J-39b z)4}!!yVgcJ;dOQM>v~&ovRL@ydn^9Vlly_IS&9i8JP{>9?WH9h(t4?K4D~OKs*>M?r7EA;wm~g(m zQWdxUc~G6^L;0E0hErX6ePK=P`1VD?`AJ`qrR{xsT@-)l?a301$FKCtUE@%xYdkrG z;_=H&tzOy;D1)%AZ%Tjje8g{m6#sE!QSto4qM`&Ws%X4N+#zlqZ5&3Ii6yZqSta%S z(zO$Ag~YkILi6k)*R#U7V(F>K2`z?QSSQuCq#E{n}CBFh?+LV_Oaz#v74;>2@yZVu+KfhkjgFoIV3&$nHLB zqJ*7J+U1jL+s1EGh)6yX@PmpV_*-b#$>nKovizMA@^3|0Kva*u zsNpKR{_?~#($mhdzafHwrEWxqcy-6+qf68?$L8jx+YFe1)ma6{8|WumAG&h8Umq|7 zcfM{Efg4wt^R?DGZ@~k?%Fq{b2ck5rHK~~@zu;}TaI|6w1~`!0fd|^)$UBm#t)n$D zz^y54?Gq?-2Ai$N6^C!l+_}$x;vt*p`naF&=TQk&PfXIaBaqils(X|Bw*;4aj5yN` zwQ>K0+Ac|%?FaJ#`+eXvS$?bWJ7cGCev|Fo!AVK?*0|~p_rW&mCWi{iL{m6=m6&{T z+ho{HB#t3{DjWMo@>UPfpc|Dn9v z{knzz&9x|!Ki3!K{WTo$#WfloU+y#u)+)p+l$!Smu3t#D#kWIBj0JVLUyQO@{A^R@ z*P0Bi&gsViploajBeI^8Zvko7Jv(yS%F9BLy#xR|?HVY_}65~@NQk94YBB1CzR2>?=6O@4*q+(zFEp!MO2*H)F7so;LLqy0M9 zCA%Zr1@@AxOXEIQDCODCVh58*4#Zf$GbM_Fg|06!RKxU`*hCou)WYc;s!AEv!0~qC ztKyrS&Up*KJ(N`yu7|a*9&>!+HL_Q7vNOinLa8VquEt(>{1R{w`CN(CAMI8yxsWdf zLs>I5I-_L?np#V}#ZLu?_+=j{TTHu9@$#@cl(yqU(-8ka>2r2!EmSpY7AGtyYxliloFo{ z`SC?%@=Vomm3e-`i2E$-M37lDfOQmT90_waXsgzC^_RrgUcZre{PQ#izqP zL}Xhjo_DK@u1#7D4*>wW9(+gN@qk8o3>X}0CGzff7Y@vFjoLF8s8nIaSafytO;Cu> zr;y5|vVYD$Iuk^-4lMn~-!D`Sd zAJak$Az^t%@1)*5z?~Q# zQ1y{P?7iF~OAf1<5WYGra}}PDk@2WLxLO@ychUlbT)4JfRApNb4Xwcu9pXc5^j4f(ou}ie_wj zDyFhB+Dmi4pFIOQf5Al(ctuVEBo>X#vED^+O>cL~znYpr*&q$c0^Nyg;si3G;5xCp zIrwdUJ%j$>%ol;`&Y>q`6Paap|ybDj^ZM#cAMMpYnSv=e~P|Hk>|-d%vF2Aun@flP5p6U_s<`uWD8!8naJ~pxb>*+~t2@;>| zd7f4f%NUiX+nb*)wQ^VZc zY6Awmi=9#o{9j)SVj50c`j)?ZrF9(;U}ahVSr!*wY*QqL2g~=YLlNWG^Lw^`y%fC$ z9HyoA=n9g0?B=>#@A-f-f)aqOehUncEwzTR$E%riSboLq1mH;H*0lAW;ap+rycFf! zd|tEsBhP`!9Ecr60I`EtD(gpu-tVlhKl|vZU3vchYRRRUb zKnVD77*@2wJv7U1G8J>rLVq!CY@-ki58eXvcMiGO7?Hh13fT`lP?~iNfy1=B;M)3o zQf+3udKPr2B+w9CPmp=zVQoLL;s@e#%jMR}OH&pTkN2?01ju};+mWyFZ>>LK;L&l#yERo2d68fvah2?2N< zv9qpc>aI0dk6y#jOQ&-};76rW+G(6d+Eh1lE9a)QZ(#Cya=~SZ-7>^8d1NnRw$aew zkA}D3J2Ys#G0`tYh!h{G$f}VQ5OQNQt-NJOy)3Zu9qAwnSH1#mT!=dS7AUh>D4vic zdGHyitigM8>!uDlAG*|0JEvUWRaXX#hpo|#Ym|peG}5%L8+y?VsHWzI37&qrlVwpQ zltC(FditzWdZ~b~>JyOiAKIYhm2{HX*WJ@fe|22|Zj$ghJ+Ls_@Y0+-|7qWMCT@^? z8L8sZP+BOgp`-KvaQ4+@0)w>D4Ks9y zATa{c-3;B*`JK`G_PyhIoZa}M>9Fi=JwX1 zpt$$kB2{u%r->AA)D-R@_;x`+my_#VCAg6S_;pF9e_{b_1whJiArI(U0`<}4-QgSv!!o%VSzrW_C>E;K=5F{l-=pvg#Ofjxt%GJ zrH}qJz7_D`^Xfd|HZR#6uq~-OICNJa7~Uz=D9jT+WCnR3_ht7NN*ZK`tq#yh6($7o zwwo^ZiV&GgfPq8U7mHkB12Q44ojFMf#2r6>-rQA=^HdXr*v%A0cY(i06I6y zvN$yC+vY_12OS%mg_u&)P5$bY4+U6kHqE1tJ=}ZF`fF{W*ESW7R*V(998~o=JKG8( zH^g$t_S0?Z>qFPJdj|UM_sw{lX^g&;28$TD*l|cURs8Ti?Mt-CjW^=&vLO|?w{`Dm z1>02`EHSl9WiuH#H(K@J3-{CuAhV}`^HPiAAG&dp=KlUsP~Y)hNG6<$;wRq*#GROm zdw#1dAQDwIC_Qi|ixwURvck%6q1HZ1%ksBw%PAp0x&cW&bf9#v`@vLCj7cMvti)Db z_8yY!Psk^7BkDa%3j`@|r%tWxTUo+eE*4}t%n*{P9=V~u%!_lPZg+GIXkIoIT{V5D zWbQfGaFgVQ-9>M0qKf48dljeGuht_{4C}!`+l2Kk>)p#d9YdXLJ}d=VO~o{1AIB3zdhLW=GAL5{6{s@ zp7}AX>RpBb$q$${5%=3jHhsS@qzDYwgmUeB9l2wgD-&ZLV&HhHTvP&B@vBo;Xudi- z)ZzdF3+{FJMV9=y+{_ZRM9`>bdv7*Lg!8gUJ~GBeKC&w|Yp3)pJa_bLyE`gTTh9`^ z)S8M(M8rC4FjtjlqFM?U$Z+d?JWkQI>M>p3dj+He;@uAuxd08)vp)j(_*6)fX?MxG`@}myEyV5`Q}6^ntpv~&sh4QF$PTftTupohked3Q zqT~-RgH)ksBo!{i-Eg<|h-CYd*b%-9UT`-y@uViUFvET&7Eu#_YvK6xn%s*)jX~Rb zka#Fzw<+;Gw0f&}c7^(q3ibjIYke<&Bw&!=8fM|%YE@nV++v8;?BtCY+XiDfjLC2V z$F|JV?Z@+8Ndk)H;%u94;~7Aa2AG4?Qa;g#yWNpVJufHyyK@q+c}VFNH}%BKoAk;4 z&xM`yR@Qf3<6~8h;)w8$K4P+cmXG{^C~b6}BK%G#s*ewquZQ_}Bt^W1=cb<&eaj7I zU89V{UvvZfx1W2Y3_O-WxG1Ssb1sWU?StikJ!6cvu}GmnBg zDO;B_>2=nb_0gXP?CzIgXP?=3tC~%kGj6};zl~V(ee&XIgkf{oeMHFEB9@G90Af;6 zoB^N5%SSG&PqgM-A?T3taxm*NlISW>L;X%&5qR*H)94`J^hbGm2Z+D!eMx`ypLcSq$4@g-;;Hl#XJf;TG zhzwVeac%&pIss4IPIKvHs_h|zHNlOYBQuvdZIBEhvBIMf(cne8#T$mij`CS3)0M%! zWswL8^yfg^bSZ#m`HMSuexSuj+j6Jy$m@;2veyRp1e>86Gg@UaD59k@Mxx!i@BOrS z4v1Ya-}x@oMdIrF%;W1bz841;m{QyN$@{{Weq0wo)Uh}+>Zl+f+mh|ttJ>699oI6P zf_U5=9Setak+?j5(SQp{!?DyhxGQMsVn6-v%7iQ`^B6y~ayiA>*R5DM14Q{{YCM2; zj~6gC(8R@Q|H9o{?u7|TBL(8?PjLHz&3h`X&097xaqE_>a|ab>?q#2H7ynWA6Wj;W z%%}u0T*fF74FJ@Q&huhnZB=+U(!m?Mmb*vsp@@VIZep32KP1E8c-f$TYbnAIk}{D2 zdz4W(4N^E9u!fTA8@-%pK_-;QxRMya2+E^IfZCPLHL*Akepy&UCrba{pm zUUX8V(_X~@02JY`c=_vZAba<$w=4iQdo%!b*0c%&`H=##3(AKyv&Xy2ID9WkBi>vP zWmMe_jhCdLV7^1hYOmLJwiB?P&`F$~y6C!hfg>^i?M|UD?3l1MPru0(S!N_Q?RkYC z&$r%P!h7_*?KYB3HePHP%#9plg~=zm>*T83sID~*EOC4XUIoFX#)qUvdNVN`S8*bPHIw-JpC>Nd@=`M?-$Bl|Qe529 zW`)!IQ5n~jwYGO0AopY!F)3VCUOp#hdGz|(gH;zv0Y-x&t1=T}Pu5DHTc9s_WE)&> z8OWvTGVo?qYmuPhI}aL9NefOE?iWTKq=F9v?fFxkH~rRE&Ndk&^Catby@tURi1XPm ziW%;`UJ5Y)s%W%GcVcntXl>FPeS!OTQWN*pJ{}Zn@;d|R7smIdi-Bs0Zx$oT4S!@8 zx1=rLLZkY6FDUg&^I2m7r=~yFr$0gjz5^*}=$mU?mdU~r0IJ_@0EpfhLr%`8DZ&<2 z+`XO?XVyR`3UohLFq?R>Zws8svLo(y2QuYbIe+Beq;-9@_{#as=+mN$V`G{Omt7;n z-KA^zO(Z<|Z~IhJ2}U3b5baBwP-8 zz|7P>B%rOGSDrFg)A!S8smC|JBhtdEYiY!?j3azHX*XoW%O&_jt$Jw<5ktDHtPKu$ z=Iw2;jiwacK7Cp(+%Jg_wG)o*#hX;aAtAA69zWjTK(6JtUqW6cK0_QTExR@HR^6 z*5?Op+gUY)@HcTjmqvd7?70q(yCsmia?o-chW&Q_z8as#P}|V=TPZlz@ELhTld&HhQkgrmGobH3=pQqK?Qr;z-RD8*0^>#|41Ve({ z`61`x^Z3k3@5I;bn>@0K8d}hP1Lj>%z}=N>`SDGaLUD>SB9yJ?jjk37#|@MEuoOSr z1b^?T^tCj%#9@{%x}J&aqZ1Ly?ysCzffo=UBxO}$>~mhu-3g9x(n>u&XQ>aQ&Nxr5Cph?)b2G8?_z-Vfbu`NMYn`?=-Yy#n0R`WT zYPlaK7el2+#?QAKEy!lM;1GlcGOfDmky3fywim&$%2#%J0MPye2^aRDXNlB^wD>s* zg!o?6gXmj00tHYYiuia7@o77#v6*>DyJNKY)1{YahNGffSnW$wDFJjZk;#Fg`GWiZ zSfCX8pJ$05lP|68_@=}lgIwy5-uBZ>33kKg$Ocgv10|HrH|ODYsmTn?OFtnjfWoHLmOa2<1<6i0k0$*L?rX82mT|KD z>az9$s3NQ_c62z?V8oA8okCT6_6~e21H9j>a!o&DFS5(-CR4ux?!RCAqfz#uMDLFp zofU^$JDdUcGxW3X$jvvZ+1--a34dc5;H)&V3%GvXK_)_I@nSwL_`MAU4m&4$VC*q6Pe!(K%r?ON4>or1KOUnX~<$x4quV?apKO+}03{tV}%uP0o;SW!T6S z<@7 zr@{k6=+F7P0+3qBfM1}!GI?9V%JDxFK z)-5D|yXCX2`?E(2NQtb(5{)xeY zaiAZ1)%8!%mG`Tt0$?}vjz1{k@NbH^*7`DnvdzxoFNshk)fB@3DhQ<)`t$mc`9^k! zUn}|h)r23-EAm~+HK3sQx<3+Q=F!sg^pYgXXQn^J$ODCQGi~z{fCn1$%0gA53rz%; z12xq_sn3+Q%a4n{J^Dw~C|<#5hUPQ94?w{8B_$qmn`G( zZ@EmM{_DOUz#0#I`p@40T~L0O)B}%yj@h~fu)r~06WZN>Be7AwL|<}$4B$Zc48E3f zs$-m8>g<~bx~#J`o>!{?@Q9R-IK_kqY&I?lgoG!tI52p)g(YhH1|8{_qom*6^y5JJYslEZl< z2XVMSPM^nt#8`!K+#{9xw=pbwf);Zp2a71wp3abaDsUHNz7mU*3>*9wdjL}>(;g`! z9sA|mFEX!+1MA^dE?HMD&12~L2T;smquN#en@%GOBZD@VL{5;9$itS`J1DpdA}m0t zLU&xv|K_~uc#ZzZ_O{Tqb5{JZ{cR_$8vFDZl?9u?uMeGp7B22f4N}ie9RlZX<6M%YTDAN7#Y4^E5v&ofLlyO83-HqD>K_4UN zQ97P77UM9Vs1Fz%a=*2mzLr}@fB-NpP#`Y9e|)Vl*gfkyuXVlwcBAH-IKSXM7+i*G zy7#t+$%xIlj0<8BF-%1u(=K#L6~ixI@4=n7^>e|nw*;(7la%Hv4-9{jjMDcRfLex4 zU=~5BQSuUP3_zVhfjKe76u%!VCjuG#=-2oBmpfn5M(zS|or-lL?M$QK{946$>@|6H)2+V@Q2` zMiuXKUCpPL+`m1g0$_8rQLs5qSk@$zBDQnr9&HKTr-Xhb8WPWSDr}<2 z1e~ORUKNA0j(gWLPvS+ke`#PmKm)txmba{H=X~lowART45Pnr^)a+T~{0N|%S{d=< zS9=_m>~&oAk{MsZ5Lf89E!bA~{r?GQa8jybiACkp5s zsb=AaCmP47Oh7eoYQPnA??5Cm4de$fj6OPmv~z+jQ1u}O zq|Z4`{W0b5$0q=$h3dWeg1pVEcqY(98oMAyi|VITKqrQ^@Br9>_jPe+p)iX~cA1Cr zHrHD2+i0QngH)J?6RXwXUy%ELN<9&p?QtC=?fq#wC^DgIu5yccwU`!P zQp9eu&h5PW~CjptRQxd^nLfpZNPJke8*WQlJP*RWzlAQE=n3;d=>Y7Qf zjhvf_SrEiE8anehwms$i644Flx>a9<_d5*Jbm*<1V;@@PIW{yCag2es%(HOfBm z@x@q-gBlqv*wM3VOMA1_V&B6O3vQ{tTX(w_1vv88AK!iaowDA<4cfU?Bfh7*RP*?k zKT_r$N`kJ?&1oqlsbkesYTDo%QbpAsvSMtK(~76hG&Uzy^)nfIY13jZ#Kl56cn}>B zGyOo%-vA_|bT8F1;N!KWt?&(lB43(Dc@nC}{`5BN7ZG79*aT@K@rfkW8{aHTY*BM5 zp=*+YSW^E%ir|Rn#DkGXlR&s zt`~WW?svDBa9$2pkoMm8czZgu`C=V)?14ck1o~gpG6z&tCv-6{z3i8jVR;_WT47`4x}* z_v0K@73zR-VL^l!A!U`f^Iu8zMygG2WWw$bc|ko<)X z_j?)sa=}Ied<1g6rya0RfBpFnzD>~lCs+OX0pMe=S%K*+??JYI&rn~^$7n1RGd1!b z()Rlao}rUV03Z2sGbZvkyyPrD1_M;j4CF%p`+NR+Z#L+{{aGY84FAXpezdU!nKf7)F!^6U#0Fm1_$M>70|Gsdh1}LbI-sb+Bf=`l~R!f(0 z$sQzIG&WN;yJU#L)YeD&rD0Z~w-JAx%s;Ltqky>yM38L9|2em&`{+EjQ$S*~=Ehq7 ztEjgf$;@$jjzcm82qvtCe+CnN9+!R@*;N9-`Z!ZOp9TQVe*O7b4b~$OR}yC$noWyT zAsy}*mW!bO?{7LUHKUlHU)Z$R-d+w|8)BMJ*DMr+6?~Xw{_}Q!nadv&@TtZB1a>;Y zUA*|MQZf>o0BkQZ3!P#o`M@<*kifgp*#EV39|7{*!NIouX$GKZ2WWe4+Y`j+JA=iV z?VnEx5ZH4@kbkex@4pbC`wtgBo`6JdF0^X@zB9QLaOdlov5{hb-uW*g z{l}+f34pCmp}9-pkJW1`$7*e(r{t$2+~X3a>Z7$8?j6Cr^lG?;eOpPNV?K)k%JG)H z_+QYTzwh~Y77Lh$kRidzpN-jOuld#2QUrLfRF;PpMI|A2GxB#p4&L{^P9N|tB&Xw7x$q;t+_|fVH;|78%ci(-hWtn zKR^$;e}B2B{KriwxzM-n1AEt1PlRq3xPq?7_&1;Zhl>(^bf{sJH(HKiMhVB1Cw#5ggV21ui|0iA>Fb(zu9{~yG{IDeAa$vhtS^6L1 zcMU5;!N-WtQCRR#7Z3UKS<_abR{F;^$kOAJQ%YxWj_Ovj(HJ9mjPZZ)6efzNnw}$R zqI~qx=@pWj15&-zYogB94qrsy?6Mi4>&+GS094^?eDo2UX~$@Ra?ivQ2eV4g7iNrY za0MT$|Gq&yp5+3Tz1}qERlFssB+ff)T-!iY@fk=dp0374(- zahIB&=$#)wh9*x7b#TG8bDEF%-5xn_O^WtW-`HKmkKMJZ&z{}h&d$vzKd;`j-IzRJ zQ%DwyoSmfvPVIPXT-HJ8CqJ6o=|i={i~mnv{vndHjwmU9v}fC>=UabvXlXH$r>=Ud z(=-A8jrXJQ4`z^F&E%Wa?M9stLD#+6wQ|dkymooHbw+LNyB6&LNX!0)2T&X~TK&UT zbs$BkCLIh*%UT$8atd-12|8RGhC>_%^Rz^TIL{BxPwpJQTA}B4KTOKnCQ7k=izD*T z$7afX{&+XKs~V3s#JKNm@q60P2Ar_=unTnOZMw|Fdz~b24Ml|qbp^`(h)b7dliKRN z_WPch-06mD8m5F?286g$94hV%R z>DGVHt-tm}A0AwQA&twBfg#-ql@e-dF0l5(%Xn%p|=O1?e4`IS5{Uuoma4h`ex{R{fh-{4?dd@xz?$| z_20A9Wd~qU0_-Z)XA|2WTVaqHznh1W3ljIff9eZ`wt*XMVr`-(7X1kmT;T_i@(Gq1 zPTxy44Ic5jbf_WQDWEiFJxN){W?MPR9ncS8meRT+$m3FyIS>sLB7m}ui{zVdVIDMs zkA!B{!t?|xCp>ZrK7T%TTV+x!)X;Od9PTE~(a0^x0+sto$S2I_yBcI`w7rQi8hh80 z@0=!n7-fgw*m4pY7t7{{?0FDCN^Mkacu~{Yx)%hCw%hH05W{wMtHyE5XJbs%?!1*) zF24Gi$x~M!9)a?<=%M_NX-P*Ef^UnLyz`UR`!>1MiuQ9lEI#<$FhH;okrQr~8mqwM)KSCw4xf|1EhLf#!9f3g$^d+yIx1h;spwW8FUs5KPJ9erX-*-fyf}3n&oX>R+V{4{1J1GrZAsz(D z#jPUu!lw~Q4m&^D`!qpX{IXU`tA2H)P{zU%5riC=M>yhV_4pMBv64Tvx&}A9!{u1( z*-q|JdXAYHd35?lvtJJpji2?cxo;u6FAJjDXTYMGXT=y77@2a-byI&zAo$1=8FQqS zgabaT2YBG^YndO8GZY;N= z>B~7kOq}~(3pYauSgmUEcqBnrC(GO(3?{WTL&aBjCGJwBktgVHPgeW2wu*hB_M6fI z>D)Hv8YYCeY6lt0#`(j0hf%-4@Ps7TC?ZLJ;Z^C_1BtIo^%!eJ(q={;aj)av%e3k zEu#I`oz>+DT6PH1^$wx&<+=Fn2B4SE)RStfvF$}^gEWf=XDUy#%^pGWS|#LF^deN?!CSg@Z?Nbi zQd_uS3h?68VlV>ED?`8*;&HNPzSimI7l=)BetJ*U^?*b@Hm2t=1R_}kVSeC*lzH%a zWd}Ej*Y38jFIvu}m^#7zDC^lr^S(kW&^_?Wb-Oq{P`ZjYi&@A{$;GaK-a)pP*j)z)X69rGg+${cr`+` z7v=j;RBSeV!U@a^4B^y!Vq0MkYMZMHR;4Vqw6q*cBX&DJmA1d}tfmp)sbc#|?Yx!2z@r6)g1wG+s7${l zT#rv1A2wC2Su=i7-V7#`cZj0?Q zGS=mC4Ov6Wl?aWO%+7Su?Dr6vRch87CD$=k% zYJ5!n_0uEe2#7Wk%GUUH0v9*y#|$VX8~6RNeT8Om<@W|X(x`x^c((OZq(rQ`oaN=^ zG_>>c{tj}ub*s3D)q3bbJtAl`Cx=0&G8hh#HC|i9qQgpof>Zb`SsS$W3cDKyf&OL& z&m${qasxZ{!|@R8A+=_`4GDH-4Pd|ukxORR=cvdHMjCieh0W1?1v?w_SD2bJLNi|* zz)j+FZDa%-Ocf=o?WJ4jGx>*+=Hj4i+6YydC~HqLa^R3@!iwhJX0PqcXE$l~k@u1# zb9_R`eZem>AbUuRuhDy^@zr#hFWjKO1q&wpL!mZ37D5vOxk2U)#+8x=HHx@!Qg4aP z|4^m~0nzXkXrnP}6n+6*?QKa`K{pT9jm~<6z?W0nyW-6uE2BmJAAH;3%2rlMQmGe= zMy>rcslmdf{-))E-Jbv9{PoMXwR0O?fe7&-t^7VTHy?7_Qj(ka1Vr;`sCz7`Ymc8@ ze(wicg&JFKUD4UDfLNPYx^?Y{98%?0(TQEA`Gv40)@Ws~z^kb|e-#4>WW}p71saeh z>($MZAIo1x4A|NVikIR~WBYmzQax|l*}=fITlC>g$->suOd2k_xryYkq-jg7P{@I) zRQP#QEujT};`$)6!2d>ZY#H`G}y|G6d3fAB6X0n!Oy?y|w37 zYl4e#ewRTm?1nBT`H*oKg3Sd6%>-2ygBs_C{5k{w)(GJ*?#d1V^b>pkanGr{xQIQQ z{SM@}_g#A_-X{bRVX{WB z&|-mT1|b}TQ{GDT;wC5cIvsnXzm;QjvK-#{a&RTXl^#LL<%azwz=Od#>iv74W(rTB zP<)G8GGb%?&h3-F#+lkKK6mex2VcyC&D^*B4uCx@NtUwSs9I+^;utcZn2;E)a$N6A z@mXn(I$pruC@e zUhnMg_LU^?JAD3dS<`-{FYe#oHMdb6`jOL9`1XTuk-|bVoDdylQoWitHw@&GSkh9z z?C8&Iby&=jAb75#yKsg=cVqvnD~J8Pk8B9!5l(g#4V0}8-bfD_BiF97cyPnHfGP$1e3g48 zk9>PY+6Q%*)rI(tu^jpJIAz5WHoa3^+SBntWqzkYt{7EqGp&@k#DnoJxSYISvWkvY zASn=Vvb_I91%#kq+n3P1OV0n|R#isWy<8#4vcl%hnQbG3I-OifaGXx1Y++5e0|9~? z=Hh7x+cRioHKbX)s%#x)w4z&cF2! zjg9%XR^9tGLTq!qe5a@OqQv&yu)Zbm~RcUu{X_H;R?##i|k@*z5Owj>@RDHkmUu`3NIL4qwS+ z$L^0o*NFx2V1Uz-QGc-!pZ#7{Os~DHH0Wq!hzJv#{IvMSlP!eeu1Pq(Ts!+-m3gT~ z!mdUeywI^${$5*zuEW`w+4^U#f#o)l6cy+u)VknkCN2RZ>|Ggv zk&UtignjozkUQ%m;)LN0lXb4wF>yBV$2f)CyGf;GZ*W@e3oe1Ax@4mI@w*7d3v~pG zlN4;r;8t?5Bs{}yRz+#h_T|yFB{qHEM`NXi9iN=Ja62R;l3Vu2LR$l0Tv8VvT-$AH z%wUuB5VSxq(yeE0wR4B5iQfQRnF+IA!d`R=$$XL6n+eRhr59vjRXmO+#(_p-Hwg&G zP@Y&DwSte>fYW-_=PR8P${}+HpE=VHh(Yszf(7fLyk{+~GOLt8Kn?qql@Sr((5rm{ zQ{l%lsRBcf%p-Ms9Dv+C}+VqFiQ%n~B z^6n7s+v?Jw8+&(pd7gAav>N8J4PrZ1vgA`Z%GD6a^_bmsc_eVo#(S-YzWM+$toY{Q zl&kK0PKq?^Rf!XRo*I|8^4A)#Z0y1kN5r;_of}}$83Ps_)h~+{(Ys|E$h{i$mL|N7_C|7hZ_`kR~Y+^=nK2g z2fm7wn2M^jUVK!%$pjx?<95jWTlBq<0hL`9&}Jz);4+Ui8iJu&ih!dfREi z9H8o3CFTRTLLuL#nzhu5Vc@;Um62>F&Ou-%7z){oYbM9-Y;f;wVwy@+1(Hbs|cnyV20{gUXw2!ipmhhCBv2b&kd2|dW+1ZKM8yEF6 z_h+;_y)y!RS;#wGpHp5z#?ac}GO@)_48T%@_cWei8x=_fU(7x)kfJ9HNiyVny}HMR zJTh-aKWRNhOos(twvL(n=V$^4X{1rTjLmT6*AI7 z?bh%*#dxUp6sulC*df`p5>D)h0e6x5uxDw6+}1!g(Ri6f@W+psuLd*XbLG<|dJRY4 zZlz+TUE%%;7kard&R*?y!9u+C+c-H*0Ir4C<3MR02u1S_e` z5fc$bid;=*oxtB(33_jVXj?`Hqx=M*^I(Z52*)JF6v4uMYGmnT0Zn4;i%V3v-W(m} zQE`@=h)ZPZ#U&tk2bn4KFBa#2d4MJv`7or|lon#~@ad^T-LzIBlR@DKOi$M8%rexT z*RBgxW;LWCgxq0ngMaC$OM7l%q=nA=W)Y)7?~iJaxiV*u}S1SU9>I%!)vIdjrtN0_{TSDG5e= z=~C`9jzH|k_8Z4qFe8*Br0hr!7(V+dn4a?SZ^L`R9O@th=7sZig9&aI(>3B|-EtYo z!ILlgn!#KV^m0iSH7POK16c}B$aJ>ecx=k+Hh8qDWn_6mZw#Z4s7Y^d{yf*ji_c$P z$Lw2A_R3;nuP>A1V5!VGQ(if-F_2F%`ON~2l{L=(atEF~d#qo*_!Wp%wODx=pA&sc zEZoR`3`KSpU9I~rv@0?lEaJ#UXcp^<9&K&EO8cY<(g{Pqar@(%&h`o}V)#R)k$?MB zx54&~cn*3Ki*E7I#=e47pk2V7{mmZ{_IDVAk%c-}EwfZ#xGEXbq3c2yoUHmaAeB#% z-#u2JX{fdBeS{eVy%{8xw>H=uCAV!HQs;yASF2A%B3iFv+y}@#Kj$-yQ znKc%2#}zHtI{bhpdY|>yGuZB}hHF4CltHq$C;MfASnkcEEpaO6q9@|4*+Y>r-+64s zgUaKtiP$=ju6TI~3H64(jj@X;a&`lBXhDl4w#GfxECbNq(Ahx_sYPZ7(Yi(n08I8DydNGEvv`rU=pF$WQ!fn;Yv0xV73_ zS%FN}ED&!-_ej{2kJ>`xxbQTgy@?#xAzd-hT1E+wEZTWv<%~34WUK_^xl-F+#iqd1 z6+fy3?GN2Fw4n=4&0iAEa!*VgJxR`F$0kzZSBx}CmURm+?y3Ea)beS6hUP_<|v|x20SH2Cd zgPn+&p5=%_80B zwTH#|fnDoxpl%~a>A8Ho+w1vO={KvxqC#r=BbQZpeD;euXU~ErL_1F_Kr1ubdUccV>Rg7* zAc)r7e5UNmcwx2#M*ck{$)xKKWM3GJ2?`k;T)x_z3_}g$Ft)2qNjN2XXU8=0~Tv$kiRMKV|tXC8>SzgPJH3?bx8cxC#Sdr#u zU@=T^eTFByUSp!OJb(yvPBH^J(O|*DMNs8vmEOF0%Xh56l~lDl6HLx^%5h$|OEkd8 z_4W{mzq*vG5~)^0+%^Hvf;Ol(;iO1l+W#{RTUAd{6mG7UEO|2CeyEDIQ z@HmD;4!{jvE0f}(4Six^2>Z^@qG#N%rfO*6{-)95xi*bg4L3wZ$j$R;1{@m?E!+oS zlhX3&h#%}-EY*I{d&LdoXo_n2AJlecddp4)-4=N@3QqN-c9oTt zWtdOILr~FRy9IjVi-^Bu?Bm!P%vd~7YJn3N8-p&Q7r>jy#%%OHlzJ5C0n#jm4+k=hdG*E?Hwh-j>QQcr# z?6`G}0V9-5w9>PErpg{kqFwPSPb>y8!)MT~_fF($-1o^Yvhr(qifDR3ZwTAX$DX)7 zuxYIcDZuWK=-+Q>__g)yp|+k670!C0Mq{;K1!z*CMtn5p?jz;Euw@;`9^eT5%Wyhs zI6Zj6u70$NSGV3?Jr}UK4HFq9HdnqDF-hpj-agFNET(2)kkSerEeg$6VcfHMTr^YV zdtBv0rQpDfYbPwMrD8r;r)oD(ZZloyJ5jmKkX!u&iJ(v6P#U=b5u1I`%nS%<*LZlu z8#?5(-e`*w1t^3#LP@t5F2o$pmK0I zjxv~4K<@poYNktpT&L-eE?s2GAonh*nrVms{eB5GzErhS^hY?ONeLa0>(4wNdU}BK z)O8-?VZo>ili-dwQ9-(VUrKDc?axck4E8rg9`4D?4^ONo^M6{N(BmYQC|KVatp>y{ z88I^8o((?S$L*P1P6zksUZ!|BhCmd_r@nb|hakTpw>vd~;l;l0NwfbOUXb8X#xPF` zs=9fSXBJ&(+a~te=Cg`hja_gB0fP1E-8!s4+`~VU7$-cuPm#_O^ePOV`WcZqox9(p7yOtYD@6G`v zr*M~kuci0j`hQxqA54+LcZ1sU9IfY?DbzgH)q5jKzt4S#p5WSf+ zEGY>~_Rp1G03LsWTtyKN;cazt$4~fCQ4o^I;MLpDqSK;kYMdmE-@6Ff1l^a!)N*xk z(dPpZFP3~zDI?CZJ5(OVD5WkPB@>3AKk>}DlD!$I6Am>`*sU}(_Uh_ zc~cw|NzR9Hk5o_=wh==Nb;>3{m~$9~#{fxy+1~=p_JP$K?tL#b3ZxZUCEk%e_@D1( z-DDV{(W>ddMBmZ=xtewuThpNu`+mHY9!0ayyiIWKId#>e78i(E*Ent$N-YAS^5pmn z+OcWp?TsntXVQLpx$_Gjxs1_^^l&7PDo-iTzur^E&MdHqm3hfzl`RM36aZ*}kIS6# z0yrxHhpfy*z+$j@w?TOWSae1oLzoTly9O8TM*u8Z_O*bv8sKr(B|5NGQ0@-&k8cQ6 zLG*zV|5#rjpKo?J`3#_CwY`adV`{PL)>L%s-e%A(PU~TfjCzHV&ODts1(!@cW=_vs zvibRM2JGNcH{!kJ-aADlR``hncNn3`5u@pBKniBxm&8?^o$k%CJjqB^YL#UL70SRsDLG)-#m#jF0Za%m8PV3bCL_^ybENdCCs zHwKhQh>vp=i^4YI7uJ{*k-+>Gs=t6eEhvg4#n(taWYIFuW=b+qDV8`u557A(#18@| zljtV*W(Z&cynN5GiVN^7Rvi(_Lb!92FPc2cXp9%|L3C=N1f~V=54E!cM#ptd-=Sok zW=UcDK7(U8CYt0{oI(dJ)~yb4KE2!OeDnRxrZz#Zyb|!!*lxF-*6m~Hd6A*n*Veg9pnPuIr17x;KyQ}#* znu3n%*y(d2Wsr_Y`oV6!`w=cl!cfh}_INRX!2$Yr0Q3heG`)SmLIZe=iwk)9>&^a3 z4DJX}o4q|0Sd{pZcYj6*nq+Ued;mAR-o-QRNr(h;PhvpIcc*F^uqh}g5^1V|=3cQu zOBa~fMu%a+@6BP=5NyD0@ojF4Q8N>K7!&oTd-)SM3+y!fE#vRR?x99_F+%w;e;$5# z;s4?UN;EdAS!tt_mA-|Q$#Nn*JUpdB3y56mX(r=E`N3;$t3CFu$?9~1-XvlD+*Ddw z!gUM5o@*#Ry*B2@2B_#}zjgFTiJ#;)2Q>L#$sJDPF2sa4!J7J_eVn&7m6%baBP)Y;@FeQf})}Il4pSb%wPOv0qvxf~ju!o;NYt@^y>TLsd zzMiezQP9q@+u_K|nYH(%mK9NORZ0U%lV^)xF>ShjZK>IBTz&dFGi}Gqa%t(7q&a zZVFSVMm|!QIju_d!aoFbK^n+5l1x6#j0GHVv0zi)lLCZ; z7pCw33PQK4w_|*E8X9v7h7!9o5Ggb{uJE5w@qXTd(TVlCN}HVOc)Z$Y@7g}=3FC15nm3ew%inNzSha|$a}XPWmGb?BIWqmCUhbdA<-p&s9>^y-y6#HMKdrkfHFNtlR-6{eOj|+uF96ua2h6LN?>2xCfYaX@VYm(@w|KL*oukFTnz$qWib5bR@#M z5cLiP*mvOS1og>00-jPd!x3AFmSI468z`C$c!Mq+%IYL@ygG7~s_iWiFU~eMx?)IG zAfV&HS{VGeO@+V!Bpn_!`FvA`SOZ~Md+gnI;&hh5VDew6%^fs%G&sPu^w~`*VcHUf zz3$~#`1S1Dlsm(=q}^B>uz+kdN_0Z_tAT zhi^Uh)8~I3{)^U`Cf>jcYajcGw-_yVs>r8;l1rM2xY+G}0>;dO^zXLdY-ZyAeC=2+ zkfP0hzFnyu;7nU~e`zv3mMiWUW9*1=>8*&LzK~vN5iH3Y)hg@#A>>zeVH*-SCLo8Y zj)|W?d4W7^apMtX&D5*WU3B zRgdZu2jHPO^(w4As^iaF^POdGH+Dh-pvVM;7*h$3b#HofLw3Osf1u1lT=|I7W6t1W zU-o)7pk&6iVrgPEht#Clc#AHW)>W^lhrbz+j~oyVjVQd}$+-n9@;`ynW||zNzyA~2 zhnWG-=R{v$9kPNeFuL%qh&e<-ZWzhiIhE)`t>4{_13!X$W#2>0^!z>!TF8OrrF0TV z|FR5Qan6(-eQT^0{s!oOnJ$-sQc76OYo7(;FK;OXnG!fe_+v|6cR3L|mS$V4{YK7+ z|8X&HM9`Sr?`>nU@f^kzy=wpmOc0)x6|P8t?=4Kl?W9zK)Te2xbBLV2*k^Ms<*4BS zO8!daR>{Fn^EVCv=zu5crF*FW#F||iE*l~Bv80&@Bib28s3U(2gJTG^KueGhe&a9j zSJC^dWJ_v?qrYYX6F2wsUvH6+kk~P+Y{S-ffpN3B0>jpa{n^QTMq9YBjo0!eaoN{L zEdNPDn;2DPyP*78=QBn*65_^B#=~S~2pJFZZ#=mDm1$gFfISB!C@ zv6jI72!T&PLAY(XExkTzMs-Co=dyt(hiN~?qC`47%alp`#E>RLd@@(FY-s(`g=$~2 z$)Za4r*=lw$(v*l!jthB{^xE$+L(`tpEUP*ssjU;E73l7jubQ@Fg-mzA);ne2%x=* ziP)_;Z3;DoA)|23o;R$^!P4t9#uCGUy0YcUX|#KX4!h1Bu!0v1dC~wK7=7$suQ}^-4Dl86Qxz|j+7#1e;6xF-LU1@;B}{?YZ^@tbt9Z2D zM|SJwRzdz6__LuZ-{Qsg`-KPD^+|AG`Ey5o>G!G`}ATwr0^O!Bqq+|cQXD}2{!&_KNEcNc^B2g`Xoj0 z%TruRgG!-5jrb^rI*aKDpsq&c1$cm3$^9D@23=0Yn%CA{wh+B4Qf%Xw;jrY~N1v$# z91@(5-%o;#<%i?uO1eN@9_X2P^rj;HfJp%v@ifz}q?qCO85>PPr2I<|3XB_Y|Empx zjVt5mnZDgf4{nlrm|L>hieUc*Bd|7Un4LSYyCf6H{NAVpLD&yPqYaNOxhKQ_3n)Us zzeLS{_~nC2>oGR3KH3L#bd%(bAt9(XQ%z9kCW*rw2Hnrp`nG3yZEfXKHcNehBE?g0 z(z&l*uaOCd(?xDDei}dUSUw*-@N=2++e2+ix?wRkru-;XJ*HSYm>CkS(BfaBVYu7W z{w+<2EXa49EJN$*FkKJ`w*-RT8EZkq|1d|PTXTd-1Gd@R{m3R@t$=@}Dy`1K-t<@{(02$XUEUG-YD|`h1wvOf zPg6baCYX%v_`r(J-OD}APILCLL(x=L=YQ@GSqzmkrWI*GAAHKzh1Jj%2hWL@?4&9# z>^oYXte{eX!fy+>|AWv(z6O4qA}KY?22wl8!eC9jkFC#g1ENzT5tu4Z|J5M7#|jT? zzW;d*4!e(bE?$RpMoMLeMVqW1q0xnINN3)3ywT$II43{8l%^PKZ6d?%LY8t%Hx-s_3$1C%dRoF6*t@> zBq#T5XTqKYSM@raZ+SdiJbj1Pp;#uy7t5?7BTkV#CL`>1P1~uE7SQ0|{z~}f=`;Jc zt77K;dM$7A4&Q=;@FTBkGBGiKuC-%wkUW%({{O65f82*}wDE*{&eprX!diJ$_he0B z-^B-Xaw$+etJk|_)+-g#fRB1%T2Vi<^sVBCBz$=HNJ3;8DkklU%W zfbDc8N830k>HS0H|H}=Yxiy%7Sk4l#oJjK?%D>uwr{BDy4_#5o?VoRtmZYYkX{EdJ zqMbPe`6im!%G?B&#PIhIW!;DQp={X3Z06V1HLgPBam>O&^!dV35Mg0n_n#!-I8?xR zY4OJwX&+9n1-^I4(^NXw(}62{nvSuan^k#j-VX=4^n6IkVn7NiX%isE zXML&9FsOv!+L}z3e_W%w_-j@jT%`q3#9&{A#kjS<+k31uR?(Cm899t?3724|HL&LvnkjurhhIZZ_!^TK+qggSux=;IVl>4aQi#(x*PMN6J3KUT zf8yYy$37E7j8J--?KpP~+H`1SCzVF@!diH+%Z#+<6V&{$U(XyC=MeqE<|;g^1Vu9} zn678@6RWd+fBq|7=hG%BZFl4PVgapixhFT4(h3|{O zUOAzCgZOBCy~rH67~QOU2U&EYKJzNeKGows`(amiet_$vMsWDW>&&z6+(CWGyH;#_ zhu@^*;ORd05v0j=E~Ph2^2n&dgy=6Va3_NUegM?aOJ;}BCTTR(9>>dHFB!K4XW}x_ z)<|Lc8EEmG9VH(AqtaEpbc7TN5;3jB_~xK9?{ULewLm$)QKXsB`(cVs&{anv*)6mS za*aY=y%al1uzxcy?d&_7@{Xp8yvlSd-l(&{-ETFOrkP}&ZI&-941Yj21?!##!w4ck z%GU-_;=~A+lw)CnaQK7QB?3JeNLSa4l-QQf*Dj$apWY`a?RLG;nqS(yD{}MX6xo!p zq%)h=nVd`KFW*$d0rO4t3aPEuC2k5F##)k$8b!+YX6jtU7Q~PmWta+_Tuy(6)_AXw zBA!FXy2%Cl9F3|$M4HG@Y<1a1p7Ebh_Uoa7L`Zu^vWETTNu~3Vp{=rz zp94W}JbryiYS-B}omKqx@dB041p16R6i82J1Y%HBj5r8g`E#1}nU62|7JyN!$27arXn;nsphGBLni$deGZ> z!=Z_vKS{iKw>ZS_sxNi=n#x#T?SDqAaXhZW>Dmr-FS#SKFCpo2;e9EWf~V9A>q1S< z&2Klb=d=P;HUhX9|7$$P33EOY!CL%9D!2LKo129 zqFJEd6C7`eVy}01b=Fh_3EuJUhlEGs4xTp+CZzj4hOb!7(H)hJ@BOpme+tz-N%N#? zUv}LCUpzkL7fQ6~a^r=?L?Ig~m%Dc1{cY?s?1Jtg;`#B=A%?4G4xops!cu7Echw*C z%lz-6zcrOlwSJSQD5Ex~61Bsi>W@~+)ntqkWI!wlUM2tzTni(!w@mmy-opP1llded zA~k+`@0x^#!7u_$e6b0>fv;#{hEj9kPmGQILv;I?mK}wc(8{F#NHt!-Lh#>fi)Frn9P@q)@v z5;Uj0?SW%}i0=o=Y7o|vlP0NUwff#gI#}b%hL4U6Z-2w+L^Gxnuk)@A$zG$d-FxkM z79P@Uo(c1Cu@)x##?-e+5X&b0A1K+ay!X9=!p&-P9FJz~9dfbXae3`n;%`<;v#isz{SRtM$IwkuOU?YMnUr{F(%Q@+O#WlYrW7kD3&1yNpiAy0txi)9e4~1>uoy;3&Nl6yy z@!iZFDVhh?&&shMzMRQyG^1BY>rB?0Q_jb}-VEVOSL(g2SpJ%Sb62kNX>!L`2y2L%&1})Gz z;`X`676(4Fy*jw4SXio~=JD%gsg+8A6~_OcSz*6@nYYLyJYEOsx2jKZ{VDuGy|iLl zO*w7_b2WU0naLxc&8W(PoJJq7iJ#39TOYkVJ5;zlnWEzLTcBk%X!;~5_UsvIPaI2t z<4Vsb=XI$)=hVqCo9EkSO*)mN0|Nt}RW2A~36wbEr&fD)9^0Hg%Z4=>4CJ~gK!3P^m zh3VnXDun#WG|!R9N9gO!)L87M8C0+BlkQD%#E#bGvGVuSYQ>>Mf*vp^q78{Ak)^u9 zw8G_(m{Y09`sB}1|T1Etr?q`?DxJmGZtzVaNiy|`4&STz1mPu z32Vls;CEHG^&9DV_%@~HkKvEOIGDH|#mzpEll!GTd}HTq=gB-H^l z1wSMNJI^(c<8s&gvOBlN>sPVA-%5F)%%qLEESNqL0W*RS zy4d!Pr=KCAKds``D);d4XccV~7oiHD;>wrU9%WV?TQ@rcHLQviR!glyYlEJd=-}wK z9U-qcUn>zc{9b);I4&zy;!RvSiIZph-ur`QDSST2bF4a{PlKZwTm)P;LTvSBOA~F` zxXKDQgY;@Z*6go|KMfyXA%RHMWuPb(md4kE@JF1jKWKJon3zZ-4J^#UjMIdiX!BKz zLzFU`TPBam;uuyR2~)iGJhqzh?qBRsx!+C+l67pu|L@_nK1b8H&i>w;)3c?E7-@Zm z#!Si$st|Bm^8#x-Te^LYkcnAmHCWz)Mj5$G#L+AV4(fZG}2|g z9o40`k5Z+TS$5H+-*V$Ka(-t5dW95%Ur8F%)(@A7f7vu@oiLX8oNRf3c1LpX;8XnL zL*}JL#&^r3{l91I>~X`VJ-9ss6ei_Y#)ZC=P(65cy=w@Z^!{v1M$U0u^>veeg8}M) zQpb@Z9g_}}036HzGe7)4MsxqnP5t3hfh&phL^Kay0Z&{>6#oh8$luiX$T@8)&kB2g zKsBIwTvTn~I}xEE8HMZ54T`T4qe1BeqndW*ddS|M{pLo`KIqm6<&2fU#Xr0gne?Fh zrxr)fd|6iy^V-csBg)k|w(>douNBRspA+<2BH_e+5<9wl`?hm%ye>?Pj=Dt;0(ZyD z?!RT>Ox8B_tn^k_$rxu;{rvG%u?{7HkeWsxQHP-u1|2sHjGg%zXU zo2r_zj?b09c;DYork)%H>*co`GG79Xe;;~=DYV?a(mz!^>?XB(cW~|+Ju4G?da-*< zNR_q==#K4BieH|V3Wj$x)o7tkbiYXYM?QM&Agp_DKpmr1&xo*D{axZmM(cwsNrwmX z+tqc8BT?HiaTUKi-dh@B=j{Jh+G6YxNjP4=`|3S%y>dqvn{_9mZarnxj@_~7X(ZVv zav7}=rnnZ>@k9%=-oGMWmhCl z*nGCS37cn;H(K26wlrmzuw;ij!(0vesxuScCJ0pRihq0-&2m54Y}lds8X=Bg@{!=2 zxOeGAnzGx@r9=q9<~`qPCg~jE>+wvPFAhXyL@r~+cGR4lobsSbw&xyE5DG0`&5Pa* zUZ>R*yi^D@8?`U(K3NncA68F+QRA>1C`i^^=&X=lto5P*wGUJ{y z+F6HHlB`szh+7qGLy)Qk-OdgO8wM9#Y71=G{rT;7rcbY;i+Uyp%IznI(R%oYz+474 zw`;NK$cV2w^399@g8YyV5OzyT$$wr-;!S~~ha;-T-(p>LXJu3QD{&kI+P-RY>p%A6 zmJu?3?p7>g2>RS(PaFl&iCN$DCRa0zy%#afsuhfPF*QBy9?B(u)svNIP(>rz9&{XM!q_bpeMR1UU8lvTzNzRyPGPkVdubD8rw~q{z`7IfT zr6SMB$t1p(5i~e8%gFe!Fy~ZumNztmW_|{rrsHN-nXeVmr_>U*$|O(=yV}N)MrM7l z5pu>t8AFd^jp!YVxu&9NHRZtOk&0^Rx&$UQyrEoqv1j(Bj@RGbnf$zVeQ&x;pH@Et zWr=k>YcKB)Bc$xgHIe+F{9a0`Yd!`yQ{La>x?zHLq(;j*nSTN6wwU6T4i4Zpkk`>9 zXNl#m<(H{8jV#)aRUo)ZsPaiw+@WTmfqa=hpkolpc(;1!n>u#AhaNzSE;0Sx&ctRs&r4vZ+rnaoQK5`9In3}Cv+-BRQ)*>1K69S}W z?2$HBt8vjFPV*hFVD=7Az5p>rL9fBJ3`TXwHZ(T^=({pDUavJeJn>F>Asrhr!B2}O z4rkXCGn>7uumfczSz1;axa2;n0VkbrRPXh)>QDYIp@Eeeq5LlXs5Wf|4X{6bPtM#6 z^{U|_Nri&JfGbt26r9ZiS5QYkk)ZUwH;G%cFy!RSX_NcA!D%I9-I($;w?{^xa?|CM z1cPSj$8{OcjssX@Z_y2HxLNMOqfqrC0fvD#_Bz+ANwp+`8PL3^WSUoav9v)5ggBjG zncJ*a1<#eQ(Vk!s!6j|x@!S@aezp{()-hawcGIAU()g*;g-mh8J*u~ZF6UoDWfISy zUb73{A9Z6jC{B?UhdNn?nrW@2)XbKdbiL2ciC-JG7S=1l>Wm;~4Ud(NYZ;~0NxL*U z){Y<(k!VBqsx!0le=Ta$J%hhDWN1E14$2@R8sFhU z8w*CI-j+1)U{5XM$9{WAEKz6i!-)V$P&ua#v@b99YRa$JyWkB zZN#FY1%26B$fsFudJm$aQ}B%Ceh?_An)BUd&Px@!aE^VSq+L{^0Vy%4W}49MW^dD$ z)aLP9m-u!Z#8M5~3^QG(G~zyG%MyiHWViMB_Urq^g)GxhVT#3cPPzn*oTN$&SL&8b zl=n3tg|iuC9mnfb7pHb`CDVclT|EvmB3ro7s!6&KyV7o#sIKVr!idlaL4E2^Z$^Od z92-p-Jy>`W@h<)B=p{@`bgv_y9VQZJ6^))PLPJj-dimLMBEc}Q5~Ad6Rgo#|qS`wlz79-t5>tZ3hvVxL(C1u#GmQ&HjM~0M`*+<*6GR40(5?W5sUKnNbdNrelW$2))p2oFnVjpPS2b z0^4Q3nBD%sFk>M8UT!6!=_vv{o6QudUWl2s!yF4St2erSR5=Sx%1-70Y zeY|BSC37BQ5jZ=GTMaDgGce_nEHx+PzLkb*|H*e;pAz3uObA~ zFss^b9u*3GpCN*57ATWO{;LhzgIe2Eeg@EYuaz|c8&|{by=gGza5?TkF5(~~9q@8O zNyc~rR4{1Iv5)*pqSBxS%-U1NBW~b3dp_U8G8S2zDvWe<@div6v)Er>C8`-LA^E|&GWO1g}wj=J&Q$r{5Z}{ zcD3u_#^%ip=4VIy;!TST6!FA&vF*l2G(ZVJ8mq2k*6yz{ifpP4s?S+AcG~6s#a=QB zCP3N;e7j#IGp9N`);&z+lT%u0NR%@fk!`f{7~PIqEMT=PY{SgDlLJ$wS|ZvIl^-89 zmE(<)2~#vbSQ_&+_Q&GlLKQ@Yp4B$;Rpe&s)@Heo4wvt0_=F4I6um4IZui-T+lI}4 z!D7kYg*Dq9NAno}d@GuC)K^R$U!qqZ#l8Lh(}$6@XWNx#;V(#jG)L$4L?-jFd7XHh z`IxIL_z8eE7=N0l!awX~WFcLhongm`BKV9|)tYX>pCrN=%)q!FamlciVm*DSH6H$l z^;BA=FQ>3D@YoGNzs*SuBf1>pJofLTV>Qiuw=pOZLE)t>cY6c+MFiaLcd!$heJv+i ziF>x%Mak0T5~pVx500a`abR)G4pY3FF5i9O%cwxWf433%+?Usu0{n_o+tW7J97I@0 zTSq51W|)N|8yAQ>ZiuPJ@P;dn|6)sDpm@BKx#`$rny#^la+RLUtHPH^0zx|eID*8q zc1F3H=GUI8S!u=8BG2TWd4E14%s50C=Qb9y3igy?;Q>(s2NS&@vaOK~GYP<4Z#q zC#13z246TqmF7@K+4dmcS2Bc^O5+=i`2b}D#U#0P?AX|_?NtlIz4uyZ3W@RbdJJm2 zj}vm`Q}4L+c7FMK0OmEYfl)l!FSm28n3TR|Lu19Rz0cw*lQevqN%85P_bQf6|Jm88 zFe(zya)YIk+bicPGJ5PTcRNCNeiBho2?8A!JtSsrT`Wluy}qwDQ+HFsQRnQ73HVS@ zi2_jsDIdW|`e);Fy`s@2YgoI8YXlF-ekIh0t?WL(497IbUhE67fXR9|Ll;xGj_UP{ z7%Ko>)c&Xq0GF3zD=L~E#@MWqvsFuID#Zsj@5;Y1EpN|h^eaPhBN%7QVJ1H$0CoZ;2nHJis3~>V;w-R5pp%xlA&;$JoY?3$4HxmfZwb+&QnXJMzJ}=gI zR=r|pkcJy$cq`Ieo*u8Mt`eF1<;8w%9!IHnN4_bohMvj9y}VM{ScWKxyG`Y1uP;6i zq%VN*^y5VMBSX-{xh~P^`un6MQ4Z#wVtZ6_eF94oAiqrg3-f=mTya%Y9Fd33FF111eU+J#KVQf4SnYq5ilyE>=iaLzC4dwK!e{$7ey&#<^H&^Ka`R0uNSHB!o zvyx(yb?tcUW*cf~2A?-g*99n7osHd8xDo9s=}u9JeASbF^#JDq+Rlz`M=B8`ODy{E z%5kILl*VpBgQx6Wmh5arhMk=qN@~OquaNpZk+s!cnQ?hnj+oL3q3ZPViu-OmSH0ce zx2|TL`ml=ZWAHTfOXn>=2)mSzOPvP`z6!>AgfJ%Z=p0LX!SGRB#adrGwB+(B#JbJ8 z4F2YwVNC{K{CbUn-vvTEiw;&AC1vSG$sE<*GdhK>Um~a2Upg+kO(dh=-0}1K289OC z-DtwCv8DujDD=JFnW>YJ$4S>H32dwAj|=>+U*^roX*H}^IEDz7^*x8v{(>>UZ#%kl zNKH$tEbC+$JUy-ZfzK=LTeWUXM(O%xl1EkciN(o&oQD1@w585x$)_=dhDe%l>L247 z4Q&k*1>^kAJ=PHv6It)RUSW*sYQ3MCo8RJCk~TBnobLDeI87PKYE)#$8eq1>N)urA zvXKPBp*dQGYSzrA$t_=+5|wt{W!x^os>Y|`tu7lu5M)%h5`lEjW1b-LcEiNhavE3V zI3L*e6o<`_cz%D109|>is}g{zrGang0j=8Jn=m&(JC(+XMPFg`KA%`yIN7d;Z#mf> zx;rg2y#QJRa6pySNrs=N^Dg>*DXf4sPEGCibFXAifa!(p1!f_Ua2+6|FNQKYDhNUk?I5l(j=A0wzoOe*2IdZJe z87Czj9m_vFDLopaYHe>^HD7p7qzgp%3P^C78A#7#`fC@purORY^p1WS(+9IayHG;(ePGwAk=Wh z6S4p^WN43eY#ldD79*n{xA6YwE0^Fi6iKs9PQy_nLRT2R(tKbb2Zi3j?MK;DFI?Ir zZv6)5d(YGPP$sk{e$=X<{EW~qW@TkvtU0)`nJc^Nd%jK#Cu=zs{F891+0UR6*twa3 z`#zUaWU=D-d*UzP=g|R&Z5yYN=iPi2 zuMa2_9)Ti-*AJT^gxVIinK)x54X5y5Y$kM_Dm8PGCuoo4a&)_uC%zbQxqEnomLS&7 zdEMOq_;x%g@PJ-tN)y-x%isZRt`AdBSs_|MwT=GaEAxGxxyw7&V|L!u3gs_SMO%C4 zuYQRr>CyAkw{9os>!MOjR%e#{sp?i`@T-XN*i?L2b`58CxDT^H?{(?V;F8DuAtMNe ziKC5IyU~Wf(h%BW&X%y}f?5O41LES;Seo8fR<8pI)w^QER~{ReemB&bqK&=kgdt2h zQFd`8O!X_d80x?N&sUmNO$q^D`Ma zT!ca4CKpUA6)P*I>G}lZlarGlA4L&LD=YI<(a^)?jcW98>k@vg<^*tXu+uQnA*ZCK z&``I`edK2r%EJ|~@LyRU%KgYQ<`WVThV1#{Vz}rom|=^3rG0OJVm6NOH`2o+eMf^( z@p;1Q-*gEq@;x^y<*~cGyS26D?7w23=7rQ}Q1c+oA^m2}=jT`nali`tRu(@GfcNKm z$k0j8TTOn6+v95j+d{2y<+0l>oFt9ayWLbRRQbfUKW72_`~Uvqih(i_WbV=HLa%}G zo{RZaEo_m^&)RZ|iSy-nE26wURO~}|9cQ|YWXuleN?o&!DD#cj!bDJlq_fB|eNXC! ziq&q^4=+qjKMUG(Rqs1)D}5ByEEZx}i!X;Syu;jrPEr6f=Eri(pu)8R9uk99(tHjt zMV_GMjNUCQT3IW)hQhSvzrW%utyLPa4V8`#8hmeT?|Rs|7ha{!GJ3>2I%zncK-Zg; z*>%YKQK45^Ss9TpG0)KKB|;x1CKpe)e$HcXuyoW+9cZ2-_^sDJNz5=)Qn7!GST`k= zW<{UrP0;Ks6%sH@soLjeMx(qRN+>9ZL;?{J+&=q$d*{PnU=|`PD{Itq#D_yc!7TVr zT2`d_9EU2Mp@|0@4Udukfr6RkMUooyjk%M&b^jW($}8Xk;#&;0bRNL~zmi34!VZRF z%+^X9led_q699%|{Et@-WWxeh{8JtVbnw`+-qC8L^ zpjsgPez##Onvj;$RiaYqdd46DBKFx~k|Wo%XXHBw050;?thBI*80MiSi88i$V7Cxg$>8!5 zw%~#y(TRuuxFU}a`L&Pj-!^DG^402?ki)jSE{YJb&O_AT#&8Ks)zA+1S;EKnQ`H{B zKzryhfp#9RCM#>t>xS>9HnTpd`cU*r9ks~iA~~aKiW%>g&QYz(B7UNu613c`l*W%y z?7hYC2ndP%>+8ult;TW0l2D%N+3jxdvW#g3pf{ZLR|le;<16$90sb{SpLnKvE3;B~ zuA*19ugwN`$M;qCHa6|l&=W+?P%{pm1od#ebXw`}GOTT&CG;}~H;VngyisCE*Gg7< zXpO)|xLV*t5n{IwGx;daCzyq80|!_}F!N1AS{F%zsq)n*jbHepVPet)4;CFQt$@t% zE(Dsg6`uM+Wd9zIiFHM7ZCb&sf_5FPyedbbKUNH03_a05+}EUwE78RaR4W*3uXQ4( zKGsi?ai^!J|2dd_UoMpstIlQfyGNW_k6;55GPKjO|NjRi8-b6P#XTij&JuG_O0!@K z?5L`AdeUMOFt_<_tHbB0g;7#r!5jBFiK(K@1<}jg?mOtC8PZw}J+=Ss>0zYlu@ejj z*H%`ZD&t8+GBSe$9Rx2gfoF1ZK_TB5Q1|R!@g2Y#w z;=+OghW`*001GOV$g{q67B4Y-Sa012>`DP+sjlaZa8$}1$|k@1MvR67BTCoeFNv8~ zD`N_3WkKcb(~r?D6V5T>%|bO>TVa;0VMtld4f_KS=qo0^n#oqjhihxSe=B1Px*+GR zOxK88T4+pO`3fp}T#XPL9%PjnnmzPWT+Gm;zRSx7TP9HXe~DfG+huBM!xP7RGBg)w z;GLMvBQso9W*c3K%wyc-s|9ZHiNN2M#c60~=5}jY|8$H7#-)V~IlA|OQv#a@^G(Be zuyw{PUtp65f#;sX|Ln?(R?u@_$zJ=09-708k!FfiX(JTzTFYyqa2P zRCH`st;d!0J#^bgJov~LLjrdX$wQK|sPQ7R7dS2EPya3afxi>|b=p6jl}bIm0bMqV zyXprLjirej06pZNU4|2P-f_Azva$p+=aud4=TVSaM;d&oEDutj1xu7 z&aVQzAb-76s_gguz;xms1JDURCMM*j%X5^FaD`VPAq+LqM#GhwKJMx*Dh zh&M)--cEQT<*#CbKZ*b0kDGIo3TmsK&xP@DrTJm{3D|j{L^aCgpZC{UQXLSb_LS_ zozua{!cZ36AW7ie9H&&g!F!aJgPpG$<7NW?@Z$9#%9-@s{LRcYr22|6zeAe@4Oos!4>8;^boxA0M&A!Im7x zlE0yI54PC4V^wccdHE4ac6c38V_P~? zqE!vwCrDo73t2;0ybI3aOQVEa*|WDi70|B#RJ%wZ2#+;=YdU)Xm$~{cxy&I7R5+^G zo|_Gw;k%`pEbeN^wtjQj-a&aq729W0*ydug6HMn3WC{%2aI32$Q?$Vtk!X-|U@C~` z{TOWETM3RW{lLU&&5&~(4FQx7Q#m|GC)PRoTCI36Cq?)ncuO8wuBJk{t6Aq~PR;9uLD7)@Zq-1sl6-Q#O{BJCFEo3uheYTOh8oW^6q}NF zb%XYf9(?rHbwmfodkmBLpNR)Qka+Oud$i;;JNv{U7ut3Z7H+6paq>fiT6640>LysL zh~~)`x1dzqG3xryPjWFy>fVb5_gM^J0_`gP&>W89Tx@=ASlg&@Cq+8s?TUkzV>3Zg z%}8K(D@E(nK(?cU1NQITh-KQk-S>PI*$jiy6b;S5By`!9ewmiqG`#Re{M*Vu zgu-~(`upWIViHUO3i2&JjFYEd%(Gh$%6}}C@*E%cR^MmV<5TltPS>te}!4Z#C{ zHW#r+$LcYHJ8fs5WGq;fKi1^-E4{Q@%lL8c>8wzMGZIF8FBPQA%p7$$@q!%()8j&D zpe_OR(BkJ+Nlpv9(B1W78;01z&sRM5`Q95Urw zy!iUu0;F_;m^ut6?6dQm=mB(CcS-&2PM)x6v`n`+vNV&^4P0h+^|$1KDrZ-50$zUp zC(jh|Ne;i^d^YGOE-Z`<4uVB+TR9tLf+!h1eIbh9p6rV7*-sF`^|oOlbj+91^xSS& z!^f*THE#(p-PmJV>!OSn_+2Kj0x!HjxhPoZj2Zma37|ffBrEDk?3z(MbtqFiy04Qh zrICZ{S;mC+$u}++ zpKPBMFh$(-8O%LQ z;0R=ZlonY&L4a9%T>g0)fU#l2mz~M$^u}gNR8b`6Bt6V=( zl?(z0$t7NHOZ%qCOODTKJqVr-_r`rR0M8p6Fm!gB#=D$+VPZzG4~MFfS%}FIA2q$) z+QEdDq$?Md^!S6#6U9tkGHoKMi6_+{OjPaSbUuD#K6MOeud?_JynLVKO0S_-v)H5# z2?)@X26(Un_1^LK*qD?SMnvh}{oD42#Xv|j))uZVFW=;S%nvzx8HY-Lz15Hio&X`s zz{>jC%&+)$c!F4KZ!h8O_1fL}#jPxhyFeuL^#7m+S95arAO_;mTXCp2J1??+?!JQ4 zsWbZU+#hO=cmE!Q?BiHr{EP&{#1*eYtL{^ym@dF85F+5**0;wnyd8`?-;LWp>xOb~ zQ44u#p=-{1U*Oq@M!WL`w!#0h_(1@ICGdsR55Vt;pU<&^4IT?S$x<8#Z-yd1aUd<2 zj(5|fE-a}CUzM#!Roqyj4xQXaMtdC0b{dy#x1? z8zLBs+KY~|4w56!`+&9A!c2m28(4=u$P~lgt{Yop-zTjmdgo!mo}OadW0 zG~61BqQ=5GFi;o8)iAS!t6AfxrdEL4#ikkmFkKV{K{ zDlgQz8xC6&-XI@pYaEa1CmUw%ec7ri^dEr zEJ)U5WJQ+;eLp=!5lA|ht&4x^V#G5mOL>9J#!Dfli_VNvW|e1+Gk9V~#7kEZ4H*na z`O|Hb&>-e-;D@z@7g2nC%exOcdqXSjofMk3fH$rn<}EV^tjHh^j)REuDFy-&FbwMV zoj{+tzJ4|ZcZ_0e>n`A3!bj>>|GtrVR<0Sti_^ltdH-C!A+u8*8A@urNBg(sYR1E3 z#ItOAZfcME79pc-VQyC1qcLhv%mbkVInf$Sg^sX zD{SE~{zZ)9izMN1!M3`lwjx2;KRnsOh%pkGe#cTRb2?DTX`Q+Wki03C+-QtW+}4 zLsb!wJ!=;Y{xR(S6nm)F;J(JA4+G`Ncc9$pcK~PNHyf=TRBdJV5W((x96joSQ9m|a zV{0Nh+#GEFQx6)xXDktmJd*Zm?Y zup1Rnf;GyWVlr_DzXB12OX)cfMO2OUo!5ljorblvE?=rgoE!s$uSIbZVzB5JcP#8U z50dlzSKeP_zm3L~u~1H*cBI-qdZA6*;N;DrZ9Q@qFxD>|YNc`~Z*}~uKv%VHu=AF@ zOUH|)58d4_x5^5iQuV4G{Tdpm#y$G_D`fW(Oia*{OLdK}tCv#XOsR*0t9i|Qi9xPV7g<^)KE{e%l04gR_rZ!;I8Gi=(D+Eh zae*1C*XbqXxFY_{v-OKbE)=Oo&o}i;@%+AbteC1cI6c%!_Bb`bHd7oZn5uaya}Az67W>MN5f8P@+$>-W zFhei88tdgyWN;Fr6Y{HDyb;moM?}=j*S0WhuCSwa&KnB>z=Sgp+j^jgp-_)Sf7z2N zJ~7@))`?UZoX$$o8rpuKY{=OWT2P^V^5nEyZS*P@aKFyHvhYDe4?O?oC0-@f9RXes zWRZa!;!B6^EZWDXTq$zbBejiT!yOr=To|AIB>=Y%X;1#+^y&8H^K;;FHFE1Jb9XA% zN?LaPiUZ?W{O7wTRH*he`@VhicM~~Bzk}i>?0!LL&!$u=n4=^R^8I<><~mwA&2hww znT2O#=jCka`n!#mI4 zziN2899v7%*kSsP?6jI;tt1%vE+zI38hy-8NV7HHOWNK3vdGEyj$sQ_Xe>&==A^*W z-E&d<6@AEb_j{B8gcPBVmAN8sayp(m_X+Xoy?J3PJa4uZ@lq!K<)M&w5_-vm^}0iw z{tBlJ^|ib?(MtQs7tM)*2b5?Q2#iP*_QG5;u81L_kpf{CR5SON3Cf4qEOffhv=)8u z=dyYo-Feukym#%!EkTKKQtISyD;(KzxHu@}fzR=Cc6zen^#?)c`}akk6K7;p&=0#g z6Zemu>UzYyoAfvfAifqFoclC=;hj(y88|k$9*g>Yj%9atx?Z0Hmu1QDQhwssi*gkD zqnt@fCpF5_r=`=?zNOz>YK6|j3ay`Ie5wERl5f$+#b6p!%cXXb|A|-0osZNrjyFWz zMA{37{MA`FeYxV@RtY5m%!qZKOK&B*pOL!1@?wz*#aB%Nw6caIIh%4(RAvnE_S?icyC`sa)2L#UxM1+9rBc?#&`$&0Na_ZYd06-0##s+@GCW9+voZ zwsY#{Lpc{??6@rd{LslUSlGRJ0@sJIeDCa0F5*0g$p=4XBWy={EFQ;wf3(*W3scq? zrkae`4?1s1>)aQ;%mhw_k3OdCEY!yna<&O$S>V7!ZByT>5ziz)o!%6^`z6!`cg<`> zV)wpjt}qsBDYJT7Z;aGqA-_)p#l_hjU^iY3U%#9uZ@NFz8~+RIxr_YEK?w_q=H6w3 zd~4ssroHy#r{(ld6!q-BP+;w*%t3Oc7d3gjJDYx3;dr}S))&@&TV1~=!10;P+v#O< zL*1V6+;BGQr1)E#ZN@uuQszH`yv8uW$r^GYF zSK_^oray{4c<4-@T-^>{C)FYhlL}SQk{>l0_`59lp; z`q0PK`JIJq7gS3H$`P5LaZe}7##xIbRhNhd!_K9)-|L{ugtwAzzut(^IZ$*fcY8P~ zfzM!pBaJpio180gpvwsFr$%9;AeqeX_ZA%QZnPygN{S{%W)%AEK4jQ&l4nbV&b?I z)5Brbhl5XX=4nXec4ZvzS}I(riajbn!}HdlC9z`)6cb38zllQs61u}REb{OR?Mycv z##M6Pvu2cc{|{YX0TtJ_Y>T_QdvJG$AW3j{hY&ot2d5zk1PShLAwc6UjRcqAF2UX1 z{^s6$?m7Sc_s7U!gvNTWy>_jtIcLqnUb_*2Ad~RhB49*CF@Nb-NI8Iv`DX7)lB2%T zB&<{Ta!+s>d!a)!$yQLEbGPHC^O4C;hpSG(-^;4eAvUV$DfTTGhq`F@IOQlLY+l>g zm|bSM6~5r+Xo?fe)X?PpZWxDOKX8B7&-i1GujpJNj)%NozNe-L%k8M`ok;#Qd#W{U zy=wE$TyeKp46tCji}q6i`&u@XQ_}7IqE0lSi&Bqy{KP-BxWd$;TSzhH^ioz=II;Hb zCUy9N=pm|KkzPF&%IZ2`R~e;PVJ___+@vIAk_EpE570>WBaM7=f>bh6d*-k?klXn|p% zdIZPZZqF>=DJX<^9L@(tM`I3lqA(VA45tamMs3E#V8vSKeUVfA$`{4*V+ti*+>hky zXbF|gpe_{97lx#z8I&Di3M{+fy;JHbmYk`u7$Ebtp8A0=D8iCN((C5a&*n{pvazv& zv@kF_N_frg=y)9d8%?~3m|61p+|0}@CME_70)dQybFS##^Biz{mLR+oZXxF9C4V2T z9W{6WE2clQZdSLoLg^2GiY$i|RV3Yu;n49*|4{F0E=dA%*sON4Gg&#-Y()sJjg{cJ zH@iw0br1*23H~fQ8(T3mu)VELcgsA0TeXxIF&uxK}@VqIraFP^Zu+WWf zsFV$Wm-c;9^&NreFps}>^$*taS#U&T^k@tEPH#3=5As%j4=_gCHlhKR` zNhJ4`y_fJmMtAab<-2Wjf@OOP+@?g=GP|ZCv_w07=Rsl05X-@)WxhEBgh9TNI?Id* zNR_40aq0nE$Y!eMLe&U>5ml9YDqYvTfpCWf^+F4K8)@cZc(bkM>HS;j0<21h;mdNc z>!|xnFrr=QtURK4j`5_(2L|}{4GsKStkp}7J{BMHv8TD%TSm5@HPCCD1r`)o32jCB z^5S%$yck68G{M&bIL}-c7fTZ0E7MpZg#Z5+Ht~wJ!HOYHbMQSe874^HpjWDfE|hWI z8a{hJ2w6&nO+%hV=Ji4%LdY9t7mysk|Cs6f2fP(nzzn3s>J=?V{)E(q#%(UJ>CIm7 zD5w$L$`nT*IW5N-cP1&iEEbBpEcVNTW~yNLN}+(&m|FNg^2>DYd%4H(bppg?*zO~f z6;Nw3=`?NK_nS6u@zuxkaO>8^vlyK}z6Tqgry7hCm!h*0Cd5nGdWh$5<7t;>QjB=4 z9{z!0fFUebz`S5jwk^uVZWH*C6$Z#X~+b; z^OWIi;g>C$v8V5`1XO1CcRE(b02x!%lxQRX+|9P9sX1SS!Ko7V86!5+#_VFa@7(9q zMcG3WT}%BK>E!ku1g)a)-*h6Jd9pLJ2cM z0E^{R=B4sN(TTmBQOkT>Ndy4VlEh+@`Uu?k)R`VKePExEmFL=A0n#NKV?tMJ{18jm zhOdQlh`=`XHX16>0w_XUZWmBd6}k`heUnI_5g!PhRi-07J~o=VP9aD%X}?({Wzc=i z{~sG1oe|jJ2r^q5FDP~G%Wj{~uf6$7%xchrtXT>LfOJ~M5#<0OBWvK?C3%^ zB)2b6diV$0_frz|QunJ;EL+2%EGj63XZT`3hQSLl@t-hTT19hY-)RsDnNxXrH|(QL zWW=YV*iAczbmza{nQ0ceI9x(MI6PF$7FdKUz~_WxEy^#1Y&rN+aWfM%xqk%OAl(pk zbO;O5IxkaFV8Q}7i+2PCo$o{q+g!-GtE+PS#VB{iOUUQ-d`~XiDhOnc-n{857Ym0B zPQW5u>_fMSFHgBpl-Ky+7o)LmZ)|j?oF}ba%qd_ICPDGn)s59SOMfbK4D1iU2@Keby#UIgYlu5-cwEk+Rrq_-fTV5$m#HoZFs3Rv21TL=nj1k z;;J${sc-Uxnz8<8b06$C<4+XmA=}Q^s2c_Y2pw)N$k5b`;P9U2Cwki6Xeth}W0_Sh zW!Pi0i_G*1-O~L7j=O((gzhVHgs!O$rj2fHXYgTWRV5A&>~o5Xi5L-iUndX}ic%w_ z?E$0HnQNXpa&W=U`yaC@BQ&kSpB|FUXmo0H&AKJvoTeRSp&<|@)7k8{WY>Af=REJ3 zRMl)6prm(t*btfxRTW}mljaRaXzTq(!Fk;phKA3+2Bx$DDST&Z zn9@OCJGjx49cNom*$tcMGwt|^pfMe-RDlF+WJDRJ2FtWgc9ykhu??4rehJTvxxO+l z49S$ONL+lH7mpJG*gsoqG6~XPi|g0Fy?m@vg`MN4yFr6Gj;nB?d1ImtOlXwjl=TU( zw}Hcaw)&azVAdTf4E>0~zK<7gRa-L&2LDB#=i3<-MJ7^qe!+`uyKw?NR%E49DaIm# z$S`6Usv0WH3tc1Kto>u1CfD4Qtu26%0IzdWE|02M&teMb$5er5h8-*slyehnyH6MQ zltVJLv;<-J6_}^cQ@orRaL=R4eZM(y9IP+r*~Ucki4}=N3}2Bb%?o>NMyU-!ie^Kd?CHZt<=lCeHa11EJ*k+!u6t+RmpWi zN4qQ-Mj)x^3vim5{5CbVq#VihFkgc$Nm?4x(&0fp1$=+XSdB>f?dC>)p`_lOL51fl z$Y$xK=z(p3$&iD9KQsh6AlupOMXdXx_+(XjIa)n>>~sQF9#2?4E4&NNToQq7oYI1l zY~1WcSxp*^y|S(jma!2f--g7+(Qa&RcX>*%hlpUSnL{n3w!%-0^K?fy{{Y^?WRdId z>~4#RUqPj)DyzJFsN;x<=cp*AyT<`tQ)6jJ7Mu<-%`ERNHzjA-4@=P!D?nk80lqYN z)_iE#D?C0vJJmWd{2fT(~C&Zv7 zYN_iE1&f0fVX%7ho(BoPCIX|+#mzh&7HY641`8S}m$t?4E_~qz{Z9fMSvAQ4MEw=0 zhm8>}5&Gk=B2;WxKO?Se)nU9l+|P;wI1F}c+iE-vWt&!CzI+3{1>DnpD*X@TfpM-S zn__>qs{i8Kf4v-&Uc)z^Q`G_yP}`oKbQG`6;T@mP;hlP&tkfGr&8z|p+7J#74^~Yo zIG{X3!i4JE@Rl^YY9T_DNt!wy~_6}(2lu!moz67!~22r+WY)Lit~ zJww0Pnn(XOM(gyjW0s4Kx12cN-NTDFhM33{dgmFD_~k+-TA3JCZAVMq4Bx<_Pt=cM zaF>|a`t2?Z(F7fgl5e&g3LsNogPImcb3O>7zz6|tbI)bsP}?X;D}4kN0*|ug*Fs5S z2d9vYlCm_cIm3&yw3f4nSw-*6tNjbF=TH6lU_{$KOgs?^p1q=G~=-Vgn!dhk_Cny(GW$TdybXu#G%NF=4z}s>I5zVu!5~Q~&u%YrXMr zlH!@gkwLzSmk^$j5s@?8`Fo*-sliO#a)7eYV&-**X^K>KnQ=2QwZeDXX<^V5tMe9vGQTMpv=-^fkuu=dKu`4b`eaqCIRaz>X3b~EYf&WRH zEsDXAKh~W9o9z0%MQNel0iKvhXn+2aQ>(%MhXqZT&B1KWw*lN3JVix$h@k`GYG;of z4GnA=ZL|Fpjcy>7Cz)=M91j^vT6~G0ETwMYSmp+kbS4)sGUN^I44cyX>=Q5{NBGzK zIJiWC_4f!>6~K!8f%2wM{@TPIxJ=g)9*f8vB;JJhy`E?Db%E4a7QVo9`hXUHJ(mzMO z3fe5sSjZDgTs=RLnrqu}mv5J~5)+Qi3fJFD=_QPV89aV$d0nvAo|OqzmP(SzQPR}G z4LzGC&`Bjutu{1|d~zLLZX^;xO!(v1o^FE=8{WqylwM*>Vxk{3P{Qs)A2P z6%kr00ow$DnNLM-^ke(cvukZ?Ldq&1-NzSj)j_8*`S9(M}xiyoN6h zr%M6d zde3))zh-G8g&wr2MD&!55(GiG^gGEI!p;lrehpoiZ~e{b!$a~~=h2XqA?mRV5Gd@w zs>|F66i4G0=u6*ZAVu<^(s+u9RXmUsw<=n!<6zB`g^PIRQW4W)!2h9xJ7X>AN8x?_ z=1jK@-yhK|3%|{C?lr7vrv2CDQEPLPYj*N6T=(4RZVl9S(b}f_yAE!pFyYT?rWoX6 z$X#I#m^kNb|RAzEsK0gAKazCDr(32&^xyXB9TZAd9vmp8c0`PVYRwalc~}Wtmsa zdDgm)t+y@7@dHV}KH+f^g8}J(OJYIQore2Q4;v%dEcuYlni}4lwcZ}E-Ssk}@_uN5 z{{&=lpMR^id(+uG_1aPK1>lkJ8oe!Z0Y=+Ma863~nAGD&S(Y4+BxZ zVY9biAJ=CyJ?Z9tk4Z+-C>+5X+qYY#XNgtvA1{d`rcKw8j)0?stR zWV;1yB8D#9>cY=UU z^g`ZMWsuaj^4B-gSq@{;F=B}cEr0k7l7I~_YAogaUBbIJ&LNgi1VrEuC+&%`x1!+1 zt5Nz2tax2giG1^_E-icN@(sMk96d6IL!H4!U(^ z!W2uIe>s>HIkygO2f<#uCc+|e*f?*{$e|wZifLU*zFVYpxGYSwt)8aDVGT4;Ei&O4 z2L}8!nvcbMt$Zfb^RFy(;rK%m=@mdDnC_mq~& z)q?p$?1Ic?#>Zv2FqI5(mg(4VmuQNm({Oi~u#={I{iMXqCn#VMB~eYh^xzYurGlM4 z7`Vdqc*1=y4Dr1@z{q;qmR7@j%tKCmd?1lDG!%6^97k?VhXrRE{8&$2!z=)rE^y4y zt2r-bE7~8Ucv`DXM>8LD%Y$I=BVN;CVtOW@->_;uw)|JKoZS3!q&%Xey?wSeJ?m0w z$EAK>@^%CN943s%4(MR^YM+==@H6!$mr8*}DiK*lyS9nz{W{mx6jkI7NSBx_g!eat zHxG%p{f>hy#(wHiDW-7Bd)@8slZROP4J2>-SU_B0q$E%(18~KRDIlWxAqW@hn_)=+ z;NYV^a=c1)>1a(tRfN?;8-5uc^yq`4vh}L{5fpNv1DAfZTpKdIky0Rt9M-vCYYU+m zterapfS`EP@1`rU>sB&4$g{r#H=Hi!b5A1FfoG)GqYQpAPc>5$>1s>{E_e`^b5WRF zp@bs8%IsWOk=U82H4T=SRUtPSVg^ ze{g>8huTmU6Tu|%U@?Dx>nCuboQR@Sb?djO@8seX+K)>aL_mZdr*NnPq(sWEqdnxI zU$0G+sA6pV7W~qKDYo9vk|G6kkywZ0%Dh6dM$#?35X|888;`3l`q7=ohz^I(VT=rd ze)Q?^QnczUxMDG7)GhR+_el!4?yBe@&eN7#9{BzO25Q;We?I+^L?FsU9)|6XYD)VC zNhnfi)A#y-l|K0g|Cv;oP6u5_)dwVdC0S&4(o{P=IdF^P3bfbtW3;SBrvILt*8wnf zoU*#Vzn|0j%))YEYZR2iGkJ^de|M{+Tj`Q8do{A)*ck%}(d0O7C;VJZC{QUtG-JfGl05HSH)Dbp0~PnVlTV+F1Z zmEWFjEpAaRVvOnA0-ol2$MTwe>ae$U393#c{)4s=lmk^gsz9Lr_+xIa`GtQY{OPHn z-l;mHOimF#I?2WU5<0yS5tEB;!tqxi$WULj-I|pIPV(j@OC6~_(UBzQeo9OQL8=N# z3L9C078*08i=Os!yqqy|t_@Dmq+K}Aq(QFOSavY)JLOgY`%UFnH7esqV6}Sx-fU;@ zGI{$E!EJXE15pey;!~gc0v&jfq&2eNH@Z_m-txMQ7UscSI#3j@Yxg&l*i=GtG8)fs z(g>-~wuwf`3Q3Y?P+?nu$WF=9cBTpl#+j}cLru8YuS65OI*-3{?&3uyLn*y;noIry zml%$@i`f-^rBMJwtHN>SU-aoboe zGV;ZQoZ@cFh&%d}t8u}x5}L*jFj`wU17H9|pr z`*jd_MRX}M(Dq0Q!Z(-)nebg=xwX=O8}G&wapS;UyS-e`7T^Mx^AJ|wiw_WT;Gbz1 zeDU8HUTl6#m>y_IK&>@Dl2xk#Ee+vTQq8tgfohAZ4BN(T?9%zw8B^y6|I=CxJus3tGOh5l)kn9t+8 z?(<{4iPZg-Lr>KfkiDM4AIvr?beIKq1K)FYVWVLRaXsh5#Kw$6#x1g#4RZ53?SU6e zb7-*z`qL8d{tBa}5TLHD0~$mlnlt0mVX^{hiLA#SLOhM}Ly%LUVwwsuStsgF7O>61 zJlw)>B+2xxEG*C`e#e2W>Z525xXLWRip*JX%W zKEiNL2qO&U2XEp@LtHR?5}v+LCDXUYY;WVGA4p~Dr^*6F8odXj&o>`$pJM*w*I-3& zdEitJIppP(h0U9w|3*Mfq9w8p1ClKa;_wvEK^RQ!oUF2x?r^zfw=_TMyCn5A&un+q zr|1KcbUD_~;a423`^h_}sdQ@thSZ(l1%^R~5zJYw{IFX;aW}BycXePtRajaCR=LT=BMhpy zre<04e6Uy>pwDP!us049LPVD^(qsX@Ik6Jh7Z=)VfSas z<2`2XMExJ=9@euQVL>y?EjHg~3y~9_&rk1(%A1R(xh^&@1)ktvx~$JuQy^!s>Zd6G z(QTk=0-Pnhu^Z3s870fKnc!@MWC&8c%6At9+r+OORW+2GrF6+&58d9O5x-R(30GSg@?t&}y)T?R0hlkJL+62Lgd!;5VY+O0n4m*CyT^OVX14ergm>%Z*JXb zyuw5_9e7p^!v4YJvvqz*KWg6Z^%{Ra6RoJtGgBeliNM(2vxG5DXl&o>ou36bO6s#! zejKy&v%t@)7kUKzMob)~U0eWL5^n7&;|ozSF^6k19{6Z`M16)@|MQ1VRN*ZK>F1n@ zGMRF@Gtr|U++!rHg}DSJLq=9+oH%nLRt4tNXDclT_mUmsH|g~^(44-%=+X^00U`3k za8EJC9_*k{W24i?EOGYO6y6&zX61$n)@x2nhNYSE6(kM?>G5_zMy(X&MJyH;!DVEd z{BDATj0_E()K=w*?n|FR%5u^Ud8zdMw5mT0>>*%Hp}DGV3>^nGXm^z!R_m)$I$w7; zf!OM&MS97qiKyG=&O+w6%0PASfhg4M2EMw8Q1Wo}d&j@Fm=z1rPhH>%Fqtez@{NeHrRN>Y8& zM>*cz+hTPothJ}e#}_xU1q1s>G`4_j5H1mUYj$?_ofc%z-C&IoqY8n~4$ux4$R8R2 z_;Q6|^Or^&OqA%9*e(Rgb;(2MG3mD|Yf$&lXy^-?q1=V{vEtE|>Xzs}?Qw5KK~2*a z9||4ZGRd1UgCw?oY^cuWfA+@nGrb)GiU+d^_D_iNWi@%Qzw=U{qR_WC=+kgbiMu{% z=$fZm%Yq;-1>C7i7yux`uPmQ!c!G$Lfi=oHu*)uslWX-#>9hV7V1p28X1XnLT0Bb+ z>TGk{6prk8hl-b|%Z#nYMN`4<%c}4-`MC(lBw|C^P0(eduiL4JR(y^9PMNfRD679h zU|4&sc`r0sSryFiK`E$EiKGNjHuQa5Vum9SKW~ulNKZEu^xB|1u#C1$pxp`r8Vvt#m^#gc~m3I+6do_n0FRf_0p2Q-dJiW@`zf|7~>vBl5t!Gr+xen#}Twk^?pzYJ7A zpGEWVGLsRC8S|IFPNg}Gn`Np21~an(0XO)njECNW;HgR@yRwy6_J4T+pb^``UWA;r zudriNSG`52q?4LbZ?ha&sC~a{VvcoLOSm19lqCGA>`T98d2uj>K1$dtLBtuJP04devkQApe3fKz-*0seV_N<(H6o)Q53e&YRR(QOqXfbmQivY zGr{~=J|ddqkVmnC^MpweCM(0A^Dp#uhD~J5oo?!%%{)DIvhSysK3iiq2?Gr2T97DO z*N+LflCK?S{%F1}7T3_}pDd%-s>8pKt>!BO*v@Q=dqrP6S!C=Epk!zpCYqN5sOG35 zU&2R|wy{?GrC;P@)o&SIS~IxxRA54SRWhaf>d902(D6KGpjtOdICa@8a&H4>5EJ_g zNB(q!UbUA6L!nVBjM6h*J2Q&cYn#5u97s~c1?E|iv4#$Gd`7`be1v*Z$t<3rk5+?N z11b+kg-zdLAg}O0MS8S0my(j=zUcJDQ*z3=F4I&buX5gJk5u=8W-x>AAmxx?Q-2*J z7Pi#HvV6KrzFV8L6>+k!34}#;NR_;$sgM8%CO|+_pV{r7Yt#4im4E-Pu~v57hSiQL zadTZN*-i?)VFGU54GDV0L<7D{L==#Nz1p~i_uTQ|+Dt*fXZnjhV7f{a5dvWH!P%lC zcG~6r7^ZYGHM3$DWI50;>N*|wC)sEL7(hk)Gis8MWQdg0I`rlTa^g(c3Y`Ivh^RZzFpu}Qe6H5>mxbiu@*;F$Zq0;@7 zp~71zxj6#_65v%vf61XkV3x82iY;!VMl2&2s6}lWtaK? zc2O>IA3lw zceY;x6oB}#=6$rw6|cowo8Eu0?AkVYjAVMH-aRXm2PNS}FkwBnCgTpY>9VBhIdUR{ zMGq-gN@e)r!4HeUNlsYDBxq!_n9Bjz4q6NOpyM_ZxPS`13i!m`t;K*+v$4A&O zbaI-*^1FP)-uUz6uk^ZY0M?)BcO<)Z*y6YEQw9$G5#r7YXvwHiD$M_)=_6Hvvl-j` zw4ART>T!0V`WY!~T7+qp-LFV0M8(2cwG}mNvbqPtX!OGEcupTSW18!>p~i%IBkYRc z2ZyK(rLrQ-^0i|0r@7B%*>$5?3O_eHwxanrRBZI;?Ju^JZ{fvsa2E`DUry=d0IEh| zl{U<-9~%DrC|?9VUZ@To>umWc&$zARtV>epou0~>m7!ytj5zE}St{)AmYJk8Z46HbR)pO>T&O3+uFhW`dmk!x9|g5}HKAi@ zWF({eW?=3kfIP&^gdkoJ#E}+{#82`%W02&J{u=wH$ymWItKZ=DO0&cM3MkJf2Le7W zHbASXsbS&Z1jnVr)MhfdndFY$H=o{m8f^G-;P}xMzulzz%-Lz zJ1CD7NS;cFIN9Q%rnfs^cwm$#gwL4iM$_M+38iJ3^qHo}aK*WtZOF}5TVdANOdtSu zP?2$Ki_C|_-mDtd0Rvr1%Yk^*Zw_UO_H+4B%URuSMic@#pRe$hyVWl`|93Q>FVCzS8Q5?pz7*sZ575^BqjB1 z@azV$eAH_^P#&OWB0?{wJTgH@Cl&|~13QHHlR=Ul2EA0N;D6lED};9dRZqh+MEwpz}%*^5l%mmJ1M>p`S)g37uW z3lc-@9{ivnaeS`nV=y*(lZOqaig}+0`&M3Hf6!*XbQ2^#XDL!!S1x)}FM2fO^XGTT z!j?3m?I(I=bz|AXLv!acZI;8Tvo#ZT70;4S2Uv5bGg;6JY-9Y9~t0@n7)+ zK@Jc>A{vH1{FY0N&5Sb6H_klrF{`R1FL=%Iqq@94iPv5V{U1QTsmz|$18&=i%Q9rYE;5bY%SW(7+pN? zJIdH9_G6jELHqTeKR8^KVY&QRx|8WQx$grS%fs`aUT=K0k3{I@;mSoQaEQ5o$U$ zQxzR1CMI~FBAPVkE;;RlIh3F*K%FqKa+n>{x@SvR!Q%(NlPNnY=7$?@<7;dTjrIAe zTQ2v){%7&K>$36jdqHX`Um6+N-c$Z_jX?YG%m{f=+!FS$>@wG{quzh;uQOMSR>WWh z+EX7LSq-&uSf6huaBlW1+Cfox=1qX{+$jwfDJd#|YJg5Y6n@);xZs6#`S0z5Zh8|? z0s4~D`Hvvkq776(5sfKZShKRSvbGVaQnaQ1?f&?R8DQo1^$;~PgCnXS2@htx`RXXd zvF-M?eI9u6`EGmPTn|IHP~z$B8x=eUa(;Ux(fc9MH72FQt?99eVC*RX9V|HDB6;OlD-!i)Yg3 zbfxLx;)3HqWvU2TwbS>x16;xCmKrD!RK%(S2`Cv=BrfT;xE9cXNtghcS3^5|b#qb8 zVFt{I`LWKJR7D@jevHh9$T}hSm!aI=r^yPyP_DS`9Okb!Mbo1feZn=%(4z(Xkimz3b)YccIz0ay1VIy^fqD2KbB9qlawd@rC8<^dQk!|zFI>7S`7oSU*513 z>tBv$FLJoQ#quwoy=Td0)cs{W;>06v#LGN3gTL4JG1Clx)rOjzN#S6z^LT9&X=Gp| z__*z!7y*3`Q>@9;FZ(p9!VmB*(l>M-_D&Zx)Ixh zbjQ=lqNPn5;W8*DyAUvkx$oeLRjYLRbK%_(*=uQODgAiYHdd=4vNPQ%2IxD*!0lUK zAA(_Cb7Be##Lv78}J@l(mODu z-^|D`v9bp>_%?cpvsmvLoJOzj*^jm_`x8cyiPI~e{)s>}J)t7$I1MDa@DlgiB4ahU zDq3C~nwn!TPy73hxB)^ny?XYR7hfaW@D~B+H>5~R3L#`6p+?U0-^gRF3WynMF$8Q+ zxa=m)(7;g^_M-3&jjNSLUn(0K839oeJVW>Qmz)f>q+m_5s$K&P;{04f@BmJLX}0FM zCjO@GhenaT{*_;P8Tn`M&F>P69Gmqw`ums^;>gWc6k#vGh1kU7xO#T6Ze1Ner~A0| z&hOg1X+Dfrf7I!Zix|JTfN7KER@cg$w%t^%P-z(1u7_eyc;gd1Iqrj|fM$Y@*LLSx z(x@f)@5}moUv#+Osp7H5*`MwT#l#xG9VvUP?yCE(rSWm7>3cwDR%4CYprVVW^9fAt zaC@`1pUtN^t@|V|pW|3Rtv-5Y``y?1T(6=3{kW;}0K2=KIOCQTcuiFdg$N~)oSb|W zWVJ0>T^Piy%2fLq0P|#q{n;NM9R+>YxOwex7)1w=y%^{VJ687VA!Q4mW|Auuoc8Y| zPR>{(`rPk7(*3;iMU|21>A3mbl^w8hB#7LBQFkm1wUJ?r_?#wSC0t{VO(!gsIUkW! z9$K1XHde2rLnI2ud^23c@| zM}jU%`5`3}R;%Nir9U+8Z}|eR4(B_0R7T-BI5?(!jQ@VRgTjk|)`;*Tv8)u}Rsg1_ z0IS7QE43j4YQlR9qe6ZKL7R&~Z(H<^B4LSsvJS9UAa*L;ufB+$a(e32A`1Wl)jp1cvu}LOHpQWbxb`=HHv=yD7Aw1jcsK>j24i-`o z#No?Yins9>i1;5Y@!#5F9q1kiw_Hk0Eb6Wh$kf6CzU>HOiU21q+eQ72I0j05aCkNO zq^Bo$2}??Z+4ea1xU*9iE?`ig^uuA1=Wh{4Y1>!bD&1Gu zP_>UHncK4MrpZSvGd*_`RvgfqsD-M(;p~7aoa5|m#p_l1$GE-S*&Mzr*L>#gYI@zH zkg>}L5X)>PowM?f@Xml}az4%4_SwManDCP*Bg8d?EkbriR=iRpup>}bvq~w82qiqW z3)`Abc<23GMUcm`&A@$&-?l$=u}GaA`Zl{UG-2ozP^_!R09n=F zx8y(164(&_2Rthw9`(uaP&iq7qcW(|TtsAc6)K1iVduNf(fXP!QoQR~gzSyokKwsx zf=^3i9n0FjVa+W(r@#M>RsSOm0`75ErcuXCJ^R6|AC~LphmPPP%J9VDw33z(_n=L~ z3zt{X^PCqxwXgdY$jms(XjK-2gQbXqD5|BgQ-Qxacb;TM9;WT_>r0KwP$tZi|D{&$ z@viV3CT8hkUr(Z7R;LcWp0ONQLF(XhJ0z-XcLx2*2!VJ-gra`Gv$ zgIjB7YcIHClF}UeO;K(V&ug>bAvo6%r)NOw2seeC>$g7US}zgyoxYDP$P%U}P=dZc zO#sf!!QWpT81RHm@vBuW?>8+XDw4uB&B?>U8J`m%bgOBUi+dxvJc0eZkR|obvnzdS zqd+keQaWu9jY6)}gY6FZKU2pX0f#se$A?NtM}Ts=Fb5TvH?~U32@>3mfMoRU`x{s@ z9`832QH|h`XT+TXW?XPo-O{5%iAcq6TvmECWqmKlxHRV_8DlyRkdgL+x zjf|Fq(OR18^YoNBRcXRmfFudoJfk`D{D1B&&>5f^WD7l*Z;9=LUG!lab#!xYGEp?4 z)rbse#L8>@J30gIjgdg&{Y@?6+7ig0G&DL?!HSJO^+&VOnPuCuUR#Z>g+Y`sM$CZT zadTP>WDG_h%X7!@8OC`ikn;9sI&iT!q!GRBc3V4Cpe|^?`^Y3U%jo3{!6kAL(e;|Z zxp=$_FW5OH#i+*i_-^%)#OF1~?IFin$-_-#{}^&>1d|FU$Q5Afc+!PkvAd+|ai}x; zC$wP(pV0ScRWsnwVIR8NR@m>*0dnT8;752=?D%?HVphougbT3vr;GTL_dp*-B+?2i z036~Qwo_aoLhLo*dhn{!L<^Cra5*zCXX$p93tFM3SDqh43|k2hy432ak&Ky!1QOPF z`V}%E?wsfoi*pk)Vt~RR@P0L@Rcm?i4;Aebb;X%UVi|#Cz*Zh^s1+|{-(dD?%C6p2 zhLaSqc#niSOP#`TG6skhne|KWy)SP;6sZe*N&Q_u!*_=<0ILf?g9wKUb($cKBD*&+ z>34Gg!u$IF`aXi9``P7e0|ZK4&p`C`0rRxWg%jL*pc9|=8(C$^VH z>EER~{F^hflNa-dK{RSG#PSU8>0mHR_T4sNW(F)9mq2y=b5hMh+dkBRh!ffiZ`3K) zZ1J={wU6~CQxMV5*OSn!%7z?d{&X1KPK}$h-DyQZXgs*_tr=OGv)*n3$Myb-Fej~G zKlb}~1c}Ec7_lf+xElK7_sFa&Boa72mpj3`AhPZwk!!L})%my7oMV)&K5vmt6-kd4 zzXfjwl!f2lG{?0R!TzSz$7PKs;le_=JO(|o!}hohEyKPwyI!V+`Ho9e2%b@>Tq-Ci z;N1WHg{GJD@GZ!H#s7KY*lBZK$!o2VLFTg!GHX#mQCD>`N2B+<>Z*)y4@oXU90rvH z>_%-E4h90@rxxL*s}g0j9F8g+o@nPv#zVFhPQvJ$-o`Z>DrgCQsF{AY1bYio+jIRa z>7r?lpcQ&stWr()XgBkf|98_>+d-DkECY`B~j1#pHHQt z9#9?@esXVcc**BdU;q*Vky%h>BcQ7d4%dtR0d%nE> z;3Vr;s6s(2>OEs&eJpB^=*=(k%#p->r~nf6*Wfbb^%nQsT~rPoq~|ah_uR`@)6eM` zj4FOuF;~32AoQ`%dK2w1{$cClbnK^1_FhMtndj>9r2*GPotR=8uRf~BgfdVafQiH) zOl|^3Iw%mMwtS0`4^3Ay<>6mAm+|hJDZ<_*$zyDRo-VJa{*a8hAI|0R%;pIQ0sTDG zPw)RnHTbVwoeC%*tI7Y4et|tNUAg8QVxH;voT%`T@Pm`P!wXlPL0t?T5C}!4uPr^$ z4MZvCG-aC7;{R+|3*EQxMc`A012xSWf9!c$!SY+74vBgT4LY{J`Fil!AA&XF$;EqV zf8m0GfTE)$c_B2EvRJ1@j7J~+{;Z9SoqhV}S=Td~(gqLt^JBs&oMfsgon|48~T;wW3`Fsk> zAK9fgFP(M=+>xYAm2S&nNpJ0rXb%3Y#^R2_w8r;}a?9X90qw1ciVvXwrPPVg0kG_z zG$_>nZg<`C7v$!8-gQ(D+Eq218Hl4mOsC&b@OHB>$Au?*g=u&OC@fngSf602KTwBh zO>b6`VX%x@%=Ipi7OD{Eo%t+P-e)7RC^g0-+v9W;V$B1=^J`ApU`ytVdATu6aTnJH z@1{C^^eiGup}32yI|3;wsVu}r<~{FLpuc*c#;|LcaXTodr5+{P7N!f2J=KNp{(cJg zHlbBijMr0#O(chbXH$Zn;{@n)3m(AJ{{*xG(4=3k^U#}*tZUC+bC25Zz9&x6OZN3n z=&NOIDIWwi(VHsfRQ2cG{${cO71g=1g075DXWjs%77A~%4k1MbKgODi1)waZ!#n_7 z zjFM$#WiKH;q#j{nhYfI;OSn`!D-4jxdt8k zjW}_5ey~aeWmpFJMM+qFOm{tMeRj1Uk^KpBDVS;+z0X|ZrPZu=>i z%Xn_u{^CkxeO8n9aExDUtdcvWIZne`?W-iq_Kw(}DX~A}&CAqcH*BEevUnpv!(OK+ zl(;S)WZ`|N(%z4PzU#NS$G46#nj%*1cQ}3e(=&Y{v8*{?vocuy^T*=vdeYx@ z#vevY)ae5>wT>8||2wk&KmYN9Mpo7Jhek|DlKI!%AxRiIEKwM|#E{-EweJGqGL>Ld zm|53tW1hWAnj$%jn#|cpa1!(0e9JZ8pqaZ_(3Z=;^(UPv{SfZ?q|o(VMbm1(U!zEg z-K4>e4Vx%~O5mI_m4P^?oLZMw=d6@?Vor*Fu>-5-c?vvvX{SZdkhXRwt5-9LpDW^B zl~hpIWEElKu@p$sN$WNg!Mb@8p1EMoH7hLT|-U}9lN2V7yhG7<4 z+!`@y-}rr@lqeT^yTOpjXZ0n0hI5WBb@KZRkENG+?^$A*h$ZBYmdn)xeCsmGcLpo| zG>}ko?_iX~)j$h8%f>+?s=A7F{I(aBl;e2Ikg4ZWrz~!&XlYXlHCY$BnB#Bxxs!AH zFF!_3gh{0Chi7zzHSUf&N*aK^sQSH{?{x^sDFIy zh)@XZJc|&iYvKND@q^xCWrH7(Bo~!n=uQ-Ik-mV@lFmMJX*f{NGO^M=oW66X1Wd8t ztvdL~9c#tC7~a*&GZ}6}E_G;;LY#j2N`npD*#ZeON%+7~%8RT_^A9($n;|VOmhQ0M4m-vM~J3l)A>Gr4ToV^BM!ukwUGFum8D7h>;sV#0XD+tbn^tE}7+SBKDj2)i>N%r`){u9!_eU0m zhyk(LPcylIZu8CAl2%P`xE&0fB5BVK}xzy1f)w!y7QIp2I+2)?vieJ>6Y%2 z?(X_G-#Kv3z25&D;~kFe9_$sf)>?DUXRaSFC}@s95I~^VCBrCzk21w#GQ?tN!`V*- z+>y*Ct$A*#E}1#4q~SweBafEx_TlBxE^BWQn2qOTbvxn$_s+`h9NWXFxYrO96LT<(rXVQ-Z^P?YEZZ}ov}4mUkb7k z#A~f$UI%&kA(L45piu>`F|zp(dZh)*EjjwGA)3usN9znB^@S1DSRoRBJGFu*yj3`3 z2_i@&aN(4_FNX!EGh-eWs#Il2H*Aqjygz!5h(|9W#Bz+CR5CQ*@(}?}1x_oZb=@ z&7%fwDYAUl(})7D0w*e4qqSsI`N~VyD0)z zqd{v)7=^%n-K$x?UlfHI(ks5N78Bs0EfjQ4IKV% zx9oMiA#sMP3fgv1-81lO9gv!zzj7mpA3nqVR3PvubAL(R|NNH-E!rM4^szxN6@Ub-yT6XX|67FrHO#jW&%9SVUvzB! zEy}!*bO;czy>tUgN1y)3T>kY9@SZ;g`0Uqe@BCBK$(lfKDN8L&xnYk3cpAym1E(yY zDLk^REfIu|rr$39;g!TRC&nX*{`|c8SR5e8F{cBk?Gr(ekO6`aIhp)*Ki&VN!{2j3 z3Wk9m?EyOf z_~QWvv_l@czAx&yi-bJP0D^-Zla#=$8NW13ahlT5y z!-f;|f3*L1NAB5T1fyQ79Jc%2HTf*7zbj@^9fS_Gg`xyY)5Rh`A`6_d?j$}jsQrOK z?KraP$d8>o&gn^FLwPrlpwTri$CK);u8t&XY_@~hw(aQCKNW0*k$zhFh$3T7e=}eU zxz|&X0AaORD^?zi{_Z0zg?QRy|9{#*G@%~Ul{l@g z;j!5t*3Q=1YJ`&qVi~3V6oUMHNhkSv4`H45&+pe;JT}K`2SDKeG&B_ zzr^YO&Y8_>?fWc3^|{<^&$#ZFqSc%*;!m|1mWBOJh#t*Ry^1<5X{1@gnXs!QdIgKV z$mT;olXj-+B)?RdkTDy{!T+Y1MMr^r-h@vC+&~bj)&9LJ8{ZaARMF%=Lm^oN)Jqrt zYjxZ|l=)M?IH50;l`y7UkJ8xMs704Qz6kO39^Z1_z1SZC%0(2DhTwnkwNJg`vbm`q zEf{N7eei~nh=!B2G;+iCwg{uAeMgHH@CpffP+OWHXB&Ti{@=7lU zc_gd7R+*@1URBBMe`qWElKJs%?iZ$zhPSic>A>bWpl39_<&UzYrl!gWQtR=Sv8Wv{O-gw;Q?J0=r}PqZ1x`WBY(!VzPsA? zQ*Cqtz7{7eZj2w&pVM%;l;4|$39Yxc5iuceZo2HO}k7H;uaaxEwD0doN#fQ5R=Kv&=j7<1Q#8Pgv~j*o!8Tgz0qOWPxHD zoYwlVrWzbmln2wbtt9WSoqKJ{?t<4`g5S=GdjbhI7k)*wvIF^bgp}? ztSRA2rP?r8APmralX--8XK?SXO4UkcOvEPy-2@70B7NqvK>BaNVr?J8`eJWN9YaVY z!j*cWDObnWCQG$zT|Lv^oMQQ(Vvdzt_|}->q{v&HJ`{=Fcnm z4WoQ>q%J1#E#v$O`QCU%L?or!u(DA5JZQM^n%s)lb`a~&C9s%_ z4*k~cyYgG|%vh*;9!OxS_{*D#iq`e4Ns>gZmPzk8_x>&-Z)}S-f3lag6TKH>a=Q`3 z2?i*SG8a*N?LyugUCboGvwxb4Ms?CS*B_izm!UsMw*nT5$)B4~g~Q`04?9}z`ACAU zDcy+AB(66Yr{8vA57M8U=^uDS*D4YmW(ZsYH{f^(X0!U-ev~)19p7-nNgAI)4^$t%VnlmAPGtL!aV?!u6sbOPItR9>8Ji6H(TZ!_8Y3NCtYF0 z2|9!ELciM`zv=YgT*b{D92Z}y{yL>$RtCH(?^L2OJy7X99t~Nu&|(~VG$AWT zZ@OyV9a{9EXV!@C+zH$m0tgm~cES1Fw8m~J%|v=4Po69gpLBC1vqE7k({Td2HQ~d( zS!GM=&Do6oDclt0RHJrw13Wk{8^FGGNEnV3rN5&RIB|L>OLi-5JqbNzkSM8E?cB6y zdessj+bo{U8JEavDPy(Xqd8jy0K>{>G9^}wB2jcNa(`wWZ!A` z811`3DpaTlFEjalBllKDpQ-6lTW~D=Ua94NLSL83O^O_yJ5=E`J>Z^RL)0T6XRa|Z; z1Kdz;ouP9`q<*~y9KQbA;<&FzJQ$xc1=HxMd=?xBq!46SoK+zHd@ZY_UZz)ga#bI~ z7P2ORg9R}Rn-F-=8}y*Lt)F9KGMX6)O$4dq6yD5t4)oRM+LNp>jl9o%0{#dicfn)uqo zE4f>t$zR90@apgSN?%LcH#hS!xVC74XjC1Ch`e`?-2&XOA&x7*4Y#6TX67TKiS%%!n_~7qn%HObj=NYdq)hF>Lm31ibBH^6mO^G{Y%n zh09#QZQTquvn1}nxItGWb^XAVt1mpNF@3H76Pb=r?UTJM`+^F2@?mAghr(C3z znh0dzf)SKmfPCD`5-~*3ZVqt7`T85~L5#d$fm1@NM^qB9;QzgOXeNef#)4Tt$m&bU=`%^y`12fgi643*hLoy)C{cszrE z-rf5NlYGT&AGt!Amg@A~PlK0Wo_pMgA#L)WBXr}wsAl`=0#Pnzo@x;r#{s&CY%R_5PJnoo5&(KBlv0>}X)NFMa&f!zb_rJYUyWbB&3OUASG&1;+IM+oF)O8V&O#%N5 z3W9q$FfQ5xJG|TANWA~S{DMo%hCnP9WduC-9fkAL3H7KQ><=u!(m-M~nq#G&9&mfT zZ#h+A9D?hLwA*JSRimXGRI4DS?s7?LEL-BGE7zfO){8v^PX?KR>o_z_Evr;%4nuoE z;CAiW(Z|LPv6M>rJT)Hh`2jm-SJW;=>0!@!+52 z$VE?#7bWQP)?HDo1gi@O3Lmb0bwP7+h3lOUewBK*pxv42de2AZ#R$fce8bWsXZ^i( z**amw(&W^7bC{G;w&=UEHEeHm0voW8X0SbD%CDWNBOM^(JOV?N^>JsrZN6UEF`3LX zM!Yl~!9dlTrsTX9P!eX{1g#Gm$Kp)TXh7#9DRK2y!$j!bT!^v@RC|5fKTBOVE8{k4 zGl=89eO1Li;PeX`B3NpX0s;a;rq)Ckl@_>SaZM&YK7jJ9g-b0}i*Bl5C%`BN_#6{lWLs0}d5iOiV?(u)UwDRLLG}=snnTQqZZRRxHY)Ukb%#y9#L=?%s}L zY$^(y44=8?qI=0d*W&4lB_QxZzT9%>_2!5ng37t*#ehwoCtPwEuOE;7QRHl+^GpV# zSi*-O^$}`zLLQ&o;B``8=1j~Y*Z%lZY?K?xGQ4%!3s1t1rY5LY{J6EPo5v7IHc4Tx?QT<@t}DakEf5Wn{(@W-8cqjCc4%S6;I^c`&o_xIMA zvDM-tVJnNn$wv5P{{*v|(x(LE5 ztI85JD=&X83h^>CLzIb1o7vfAgP~Ei&PGCSARgEXSHETmBNiFgH_>HE`>Jxy&c$&` zN`px)^AVJtGIXt7ton<|(}If_>MIc!kH%`=Tl|TDd&~Q_?|5RvnG)%S&lzI}>&o!# z?vMBBTfAT+SuM2^9C4B1r25uHrgp~49b@HC*knjVlZoQ$^R#{B+*vznK(r8eDd*zF zSuE#p7YhxJk}2D|h{L(22w=>=w*}B5FTkowHa_*j6eeOi9d8I|&eE-@RkYEu!AE?z z!*zg_dvy}94skn-$nvv{)mv8#x8sgEKRPz9VxV^1>M(ruIZW(la>hmiN9ryPFI+YX z>ge1N1-1Im5TLdLK98HGs_9Z|&;T=;1U}us1iKRP!T- zHjdg-tJ4xECam>UFOGTr+W1SJXK;g&-;!c>xu2Km0+zES0L=NT62P8%qkYXqG@Xhr1i0 zW#JA`*d<)Ff%@9JEN7qBhM7Xg9Wn&2+;Oaiwc?4K0t6=x)aY_(#*;syVLz_xuH?X+ zfxe$tm_~G*1|Im%mPC|?a9}I%>Y_tXD^O?)HtF`N{l>ZSu2!wnlMkv?P#!P3Ru18i z7`*1W4n@i`n#iMC?TrebMBv*8c4t5Bx~ zp&2CzgZil=4_zGE%FoSijagl2z>rZdfeJ|hu0BN)YF7IwoqNP8+LkHN_BacERe~#f zF%oY2e6|M8S_YiRd-J=^pBTL-h9LqQIGUTcwZS;xE2oZHx{epgOW2Q9AwsR+c=?a? zIr{}2o;J2;_&Hb`$Fg(S+@$*Wz+kI8)XjB#u~SR2XEv0RJZd@*?nfoMW$m}Dw;KB* z`_4RBRrs}f&5;OOVFW%wu-1{FjxPJx#rOHt%z9`M14+}#qyLw`HH+EH*>iF~9!XWExlB=Pa-}(TZ(V)Q zMW5!%=?hsUpM!}yNe$wzFSjcD7FlUG`HV}zUM|FVqWj*#*|4#I@8VRxz+&pNzDbk$ ztF6Fk(#H4~)7<*wa-#7}o5WD!OyzCezmTH2Bnyluf@LbdQ4aQ#kym6>;O7i`!XqUo z)r2V)eR-#GvS)l&q}upVpH!FyL36NVao}$8myaxt?rBf^(ROjd%E-Mlp@RA<9*3>y zj1B&@eQcrXXmXm(Y5Zv)=FVrlnXQm+s{w(@-fYQ~=pK3F1nb`1J+zw!nBMv6Nj!Fp zf@=@(&*!k14+sC|LBNN1jYUyM{N(F(Cx9+ct84LU_HjhdbQt?Yl{Nl;Z-P?RHI$U< zjyKT?qdIthu1P=HU9MO`Iw=@SxAA+dD<_f%0qjJ+tXg_*|9O7N>sO$YjUn+a6TtP1 z5tC2vS^Hix-*cKPk!7nTY${DzfbvUusNu`f*B-2G`?Av=itg2c+k6ySXspbgK`BIx za!Nj-+xfhH-WO0q%++9qwpn`Ym-c%%LXEY|Ga9{-Ly58w60dxJqeT)`B8ssm$wkO{-({j3OJl zBlLR!568~#092r5&Ckq0C73X~y?P;rkH_CsP6-z)jb~J$CSGJ=z0eltbtCW(=W@hA zuf_RD8>HZy*p5WPVm_sJX7A`u-BhYxMVjLP+iPJc5`?WRAXS%K%&%0G|M~Z8tl{30 zC2bQ-hdos`CQ^0n867KCZ^v1U;}dQHK0iAr+EAVwQCQU-prYKU#Z(c6_u+E8EDMMf zm9$yWH5trc$Dhb!`8#rgY^qFm+}OYfypRq#yV!K|Q}iV~R%)4Fm1UgQRWQJq<0taf2F*ayg2+hC@RtyulGhOADYkSt}&l?HQZ+>`9 zt)NA(_o;ogdb!;DX`$GGBjLrD;5YOh1Uesj(qD*=!boUhX;faho>#oaW0!ILwcE1P zmJm97avi0pm-!FY>##E8Mg)ee$n@7srgpw+a?@hii2ibEVVsU|>%8!osue0yj}t z1LzvZ>DI1Ke0hm;WwW#92Fba2CZtqwAZr?SVQatok50Zs=J-->5Q2;8HA*Fw6T7+A zOLs@U-*lg>eCAXNj+Iq9BWbmMSndw+fm7<8c6ahW{&WHgNINnHyA6ZoM(@0+qn?BdN1FG%Pr^f6h>p}{U0*9Zw1&RQLmkJ77W*FTx6_N?z z9p0~B?a+fJQJ>;Dun8NNUaB{01tGg0_Q62WZk>$5`lW(|zMywX%e^ zbFN}lR{3b@_kyL+B=FA)d?ybruXPb*c?w}WZT(!;+=6nWp*C?E3-;(jcGi7GCY`ei zW0pYBY^}mhaoWU8rL}Cand*+OfKxDgRG7e<^Ovy6Y=k7t9c}2*m*EW!!WEx}#i6vk z5Pezeg|L3C^o6O#vl#!-^DfYHPKF({m{S2Ws@7XHCXY(>YFh_R6wQ3yT7n`hXWV2Z z4!q>XL(C@#VZHKtvr9>%Vts%xFUl-vZEbCfNp;cqGewib75Nix@WIcmPr_5RroOdK zomiGQ+xAR1j@!`^%-_|?3b=nRViCuhO#M_-zJE7Ux&Je%$KZ90btgih_@uc;l06V$ z-BO-G4#yqPWG$9%#tk-hB9pSXjnz4vHpT+7i>S*v&Kh!uAXF+0XzWKdcO*5joK;A} zefl~CpNYGqYEPwsA49>=+MPe?G_jsDmYFAXV&enug2FGh@A(Z0;X#n}BW17OGR&@^ z+ptH6sbujIPQ=tm@@$f|f5$j9(X+x+`TQljB#_SRok|6ZhEEJ_az2{@5tPxkselq( zZ&#^mzJfKI<6hzbgQ?~bHZkW}8%+9%NPsaJ`G8~3DArZFX~D(s&E4t5nJcF;NdfM{ z$QyQ!N}bZQ;pGT*z4JWufc_9jeKq4L6mR440Qz#qZpIF5xn?% z;?LOxtsS)Ok4YE2Vn>~THh$3D6%q)-<+4db&Kg2s3rS$k)%Sgg09~EGT*9YdP_4V- zq|mP@z#kJPLmCi&(~$3~YThQlvLr;A$7Z$E<1B;+2CTf*>W2aiF#*da;%(O}p$;P@ z{d~OXXe#ACaRRqzA&HF^EnhNY*=y--Eaf&%fmII|+;D5wST7u!%x)Tumxa3vhu%l` zPK60&CH72#prduS;oB5d8P4Frdj;s!K-~7O6b_?4L3Ej_xZ7%13)<>?BpK)U&#Jt9 zE;_TIH^$ZVmbVe|i$X28@)`nPiRtLUb*{yPQA@vsDV5Xw%FSoME65$7v`y?a$4O78 z_AVH8*_p3eP*%(w0-EXXtnL*QZQt*?+gUEh*Ed^O3??)QULoQdS(ylY^RAvRbr!>G z@+0fJvc2`~s(591VsUT1Jv^_U$Bo;mY+(wGiJ&*&0rP6LqWB1#fphbDdH|>iG?h^D z+SZZ_{!pFZzD^IaounZhkf$MgCb98dK?WD2yp z)SXsJKHwsiCVyP6tEx4}ToRyM|F=5D_s=UnH;|Dn{FGG^CZ&#f4`xHeI!W54>1d?J zZp=3IMO-a@b;*|qX3%59pp!-z%W%FPzgi};h>f>IC4;l#UU#KSaiO%mC4dcf!@})k zu)-F2b4n9S>2$WEe2a4XbEXL6-3*p*@#?dV&#cjeUFl!P51S!w`}+6`?i;NWzU_>{ z2QcW3zGqY!Dd4oLDJ*ljpUAHwDEM5ePjy1E-28_&r(z->_ z^BT4M2HLM)>_Z*9zSoc|ZPeMcx~$!|RbqmoweWh=iD{kxwUYeB*GP3P#|^}C{&Cu4 z1il8FBw^v3iX&!i5sMDpwysi-*=Y(3iR_Qbvp@_n0D~^d{0jbkK3eoUDyRSCINbxs zz#Z4Dov?qrKK00Mp)4{2(7@n*VM5?y$i0NBd63Dlp?<#=l&=ny4i&@tof36FiV}^8 z&t^1R3C$H4+EXac>2tAZu8DP-vIOb#<|YI-s`d0nu~=Py0NzXvI-!d&zyr5No9QZv zG>`wi0JmCXSbD`={HY$+fTG{`I0oJK+ep7$W4RXmmMm6?px%HDCWbi&y7B`p#sG&a zfeum1H$HTE4QJ*C`|%~GU`?vc-gGbB{N(j(cxP_!l6KH&RDjbj1v_Kbh6Lt9bJetD zuvPm}W)MLF;B@9=)nhHgq*K3NkTq7f3Pkb+#0>03- zHq%xOdxfPN!9HYp4@6Lb_-q!!H?Mo2jr~HC5!Jaa>w44c&t*8B{BKtT9g-;~nE2lw z@vGFhj%16ad$_8BBxzOa`@C8!t-h#0(k31Nh2o-9KHO+sbwzEzGt6DTkN^BK_Jecf z>oKV@iZ|qmJYEj@1m{OO{m#xCd>h}O_wZ(_!U4Bo9ZWA$60%io=J(lT?Eqswm+jqY zOxU(l1S%S0+{<#TvdAT-GA{W%G~w62)wr|2#gv4vb-L9V2rH;Jl(bRYihoNe(Ms2s zbob&CJA5M!6>z)H+kZwJoQ}UTyf>_zvcI|hHSfs1ItVh`rZ8S!oEuco`pffrD}e4= za}3RC&CzA}l+VoMenqK+QB6?DEg|_oVdDQR2psPEpOlJ#;J=E&PA78A?WWS(4_f8o zDR-WPKEDPoJpAZDogw%vm(GPHgMeXl(P1*3?|=?MVxX3%qN9`{iD1kg>@k72-R)!C zL5sHhQMydl>lFL)&T%4;%WHM-FidRcb+FqeFh%G*8C|NW z;K^ug2k6g+i7c@EQWH7y3#`sdSEgfWxl_TI=3s_O6+&>SYT*}Mc+pYF0LZ24{54J= zserpW+iNNP?lpQwLCGX`T|EIJeTGjVA?(`)1HO#@rSR)t09QSTR!PL&^9`e!GdgQz zZ&b0-KCNP9xr?+l%)+D}=STUx8Y^P$Ir~N>ks1&=Z>{yclE9$}quEi>G`Hlu>XkQF z7V0yWIo1$!9#WXCMDbgqepnxmZ;s4&NJ->hI#46>fdg3c>O1Lg`WzN(2qM zqT&=wu-rhppMAF6(I!zD<;AUH(eH%0cAc<*? zeNtRcr(bJ&Ag5tM;v`EegH}DMX_n(%rmDNW%;)ROo8sXH?72Qmff-br-!YTW!?uaC zs=u>5uy&q@@MRW9p-86H)BrOimoepN^s=T(2spbhEzYExH1N1yoC|{>Ndsd z+Jvo!a3ybhao&ts7WbVzXWrNar_z7D!}hS-{3dapCn0n6l0+e(wd&e@m~;Mpsg0u`~>% z3!K=HLgi-mE>KbMN7B9ssybjr`Sy3 zvv`+MOcbd{jPJ5f2b-Fdv1(yK@yJ~{UB*pFEv9z_#&kzy$2lxAT>IuyiSvBJOu&0f zr|=PEvUe;*hx%zox!o#D8^?-%eUvUFxqZgoaU6@@DRf}kx<9LCxuQ4a_aHR zTa!={rq%Kr1=1m$$-GyeZw+{F=(EKY8pCbjrT$FFDG8LCB3mzJn9hMLDGvXzOWmQY zmSMr?kHThEg4@d>CnZq5)1OdvwKYC}R}RFR!e4rRNAH>{dRLs)I%E_2t6Qb!8m8<$fJobeJm^?OtO1{&d;mamhze7%}cy#Wu@}*IT1` zWL;G^4ea+}Je)s+c?MuELj_P8FUNOhsD2lJh@gDm@*FDc1rjZ;M^V*pN*Zn4?a?AL zp#0ozA$IB4gI{4}n5>n`V1~q_SV2f>e5jhjnF>={Xfadto#(F9_snFr1cNqUFBt2B z8)BNKV;eK(wr@K#H;4CD~XOawI5upucQJtIWLN$qkI@XKV{;>~)pA)vtr!(ALYFsw< zPFIzL@eL-@T{fHUlzBD zWbn1>>45+TdxlGc@4EH}2m3gBQ1sn1Jl!)IH3r4j(>BX|@A|>6`@o~+E!`{6cRy!b zjG@-K1e;EZl$YY1Zt>j!hktl4@mBMN{JWZ1OZS73|{(90&!6N_< z!Hl&=`7aH>0n&hf-pBnE`8$5`;JOfg#^wdM)QnsfT613`V^NkOkzp^DhV^35_G$_7 zSQvBbOd7z4ZPU`E0Lfj#21+_!JudHLg^6uimNxgLChsh0QkpDUsx>TwC~82fzmPii zMOFKgw(~PaO8TJt167+T6pPf+S-*!N+D+g)inHqa*$sF&)k6~_4Bbfw>vC4P<#39E zU{pv&!v)u-(2siN6-zKa}g$nsD7#JotGp zs$NG}RuE${!|ow^y{o!n#KEHj%9$ZUpIEr3^UC2B?CH!J52Xu5+-wzi8&AL{-o6VN?*e zr!mvCpgC@;Iw|?01!?p@_vQ5!%qZTS8Pi?#61e z(=8dBx1hm!=MKssQ^ssbleHmCu{@3$p4k)?~)N1^sWVU@`7{V~V=Oy2GcJ zx^9QrIR0{>Lx%$iWdw&7Pf5SeU@n51i?J2nfC*;1>9srF6M7xz93!WCzsn|nQOaJC zKV`2vn9OB98w5D+8P+w~qHT?n)m9jVa}27|$NSv9bXSCZSU3Ba1D1&g>A+iQ z&Gb`(t;_1Rs)qs2s*3fEY8=wjJL`qZ>p^#!Bu4|I9&; z(4w8%S{VSiqLtjghy|Z4&=!C>!D^(KBgG@DRugRzPbOgm-#^FuJ$b%RQdJPyOHa}#SD^he0>G8sAWglY~&6Nq^?j#Oh z0v)>#c(&%Ni0F$VL2)EarqfzB#5x~3fI>F;i5#}-iyiG{Rjj4jaZCmlH)*_GJ|npH zsnS3Q$FvXW4S6JxpD0Rh4Mj}Dl)NoIaxm)2l)(Ut+XlZqfuSF3wo=@22no5j^`5Autv6rkAe)hYY=2mFjRIN|F~)9+fwX|iY@ZN0ns|F%$7fA3`MqWQCThxDo_x=? zex18=r@mDSICHpa;rxs~tf@wEaPO*MubjP4e>%16@Vp&XHn`_iBz&8lu?(cQE)F5) z-`kBve`p~0cYMG4=Y7UD-1tMbu@^aeW~X_EZ&Z_{20grn*xUOl3<-5&l}uuY7K(*Zr(H_`Ve})s&`ZD68p=k z1P%9)DeS=>@{|CM^cH4kxf7 zDC8d^&QdB%5;-q>JJ{gpk)ZfNV?C=9^U5nfgf6t8=PaPp+s0t-k;DV?4SaSLHuTE{ zCbe0G2|3LdO2yRjBE$#2l=p_O!V{M)+m9YDlLvZQcR%NQtM#oPJ1UdEW_bETG;$yz zF7d)USk-t7JH7539|BkA7xLt-B}mbzxz7Sa2hdMkSe>s5U~@+#XbnK%E13cV!XTmv zL79vLGfrL|RxfqmH~x^m7Vr(2c@)=g=+hEl5UM{{&r2_cniinqgcf5%W%&USo%fLS zQ;T-)&C^mKF%IwLDD0PxB|cW%);*-7jY%vqFz3Ni0~6)NH6YBmZlqWmH<>TqAg}z6 zhFexJ!!(gA6IhctyP`}`HWeH2oUvS6>8YIB<|%RVdpq=shU^w^*Q7baGJ}8{9R&Bg z@y7`FQKCwDfaq^#rVn@^Y5QHlODaI>4waR1G#i_?t`Fky_c|5cB|xjra@g!Kxbj%c zQ^26MO$=I-E)Tr)#n&6GQk$8)KkB91RKj2)JfmlOR?}9Ku?i_h&OH=w~Yx37Pj%=n*_ zwdOJr0NPN-LdHq_xa_)PMl}0JWRE6sewypv?t_Y^mGAQdS+o=J>@KemR6;jou&Dx$ z;{uQ4R-{gCrbOD=0Hl{mP+&tY_+lri)@yEwPKLy`1ZFR0e)RpxOJC1E9g0lx>gVb!? zn`h{u-d~=_0s@@UfCr~ZNK=>O)(KnGRJ68NDBTb~7-?jI z+!_X-O@%wFAR0^HjvO6Uc6qitGp+|hBviB=CZY9ZvMCrZ=@Vc^15QwgBQcsB;9!9R z!7kl=w<$sNCVD*ue`I2xV0ovA4+v(u`MlNbFCfIzF2qutxr$2aC0o<9om}r$S@Ok> zgMUq1EMQW_E|}I;q0fRC_4`L6t+g9=5z65;n=L~sjAy`JFA(MUSiA(4I-KFixrmSp z)XU~o&dp}tW2GJYng#{1^L9F1erG-!o4B3p7%gm0tUW)jEgYtEsLf`}HKxlN&0)Pq>$(T>hCQ-5cyYMg zi;%Viri9$owWH^j>QHNU_75p3=+awC-IW{_0ue7g6Frw*` zEf)9#E5P9@>%MZ9V|jAFjB|_=XU1*J_bo~UEMB~VK5YCnfIF30%?%Em`JJIpFt_RzDsv!L8m=9N_%k=+TTJxd#d$wIYfeQVP5hm?zIX_Ip{CSmbsYS? zcLM6tE5*ln%8FVf@C~1kZofYYSI;u?0ch9wYT00~9x6UxKR{_o*#dgy#XvN3K)J!Z zl0Nko=C^1Ol#SU{;^XHV68%te2e&s@K$Tmh)oPa&2xa&GFqdHV0nxYTlZg2Q=|l2) z@xgqainvq!S7M=KpEALy>9HRCseO0C&&%`OpE}*JeCmpRsz}MlIc{=_Y8@7gt=>+m ztvKn;asKt)-mneD2qSHJ38x-RrqG9+n!`*}jm|M^TB3IbqN;}YwfjeI$qxB12v#I9`vISk}TL? zrhf9(aE3IOyY>_nhwa|DYJHi6i+;8k)@Q9m5qWfROVfe(9Z1_QQx^$qDM{3hXLp9y zy7L+eG35Muo=~`*16JGP?=B5ng(CtmwUXosjEKmsfuMd+I%bicUjhaFw;7eGvRE{8 zyQ~U>L#whmN*c)oZWWMlNCXax8Mbf`6A9Br>Kj7Ux%CldMOs+Z35)Md33SEEA}P=d zrmIyQ%vHY*Mc(N6&3^R^r;>-wO=An(pRdzD`gni7fCVZ5lJMeg8j+-i$XKDkCm7a& zl0#A}tc@d?iQ-U`DD45Uq2pZvJIc)#TPMm}TX_2WpHgUpkSmbCV&H7M1*Q?4DB z=<^{+k{1wNcIioA$zBRM1Dv$MRX?}NT7}&O%?7nwB)EWV7B=U#f+_EV4QP1y-d$DMoDO)n`jvG9q>UgaJoO{-c6uq_Qy z0Zh_N~{E1u`p44>>pvzHf@??zO6 zK};(biS}?3Z>_wdI&~ntXgXIS=QYk4!B!^luQW=vuuvIrQ@Y*baU84G)J3|pBN(~2 z#zh4T$xnBZx1SJ~h!4Uq+>}kL785XGCbsesnbfd;BNYP$C+%ocjD>V@kINsQI{nIdd&pVKX=Jo>B%*e{0`Ver4$}6PCdm)>L*_iYz3vT_WKDVrAh%n zDzdRL2v~PImwG!guhZQbTPE>uuZd|@1POMMrIF!142CnhVsPiCS5NUEk1LF^+fpoek0M8J2F=qC-&NPt{ zq2uqx9~Z@>FwgM|fq5S7cTe&Pg4l`HvT2dK*~^!F^_Ha5wz6gYe62r$EORjkhb7Px zF9PQM<&rei`H4gl8WQs-xfIPh{IOT(PoAO+vIlr%AqUlkdL!k8*&Mxyu+omw5V2Tf zR~SjgLduDw_v{dz+l1N6*nbwdRB;q11g_qeHm zK!GNO>@wvrIQR%JT|d1GZ2@Y$Hd7Z(p)Jh#7&hzIXAzjpW&0T+KOKWshc!_Q$cb;0 zuIf;=FM@eNO2adOxwanB^aGM}_PR@`-eyknDsIvYKQ-fX-$jY6F{3+pz6D3e}Ahy<2wCHI0zJV6k~CtK$Om@??o_x~midke2VR5pk`ILz#{B;N9n% z5qkG3BdDK5n`BWRGL;M%2J-y+zfrkN`wUiez8Qj}vGifYLZSacgmD}i%>vpbR9{u78Y1+@BctiIKsY2W`Z(o^t7AQ(#DLa0D63~}i$9oLY zV7mOtsM|*&1dAD&$X2$k|66l@#{$kD^Yi0)59A(LMHOq0^vG#<{V-s<`j&EbpPy#1 zl=O^fF;(zajy28GyT2X=ACUKh14;+t|CkL1;FTm6E279_Cx0T{4`zYf@S9l#Jjo|y zGeHKB7T_C6F!Q81|4{1@3F3tb5VE!R<1zaaLJjg^1%`%U`SXwedGoKr3V&mJ{w%)m z*u}rT;(35Gy%IX(dz{CC>cgm25pX2qA4TU$hMpb>UIU*YJm26Xd4jK$@jV;|=n**K z9*O*v`SA4MuAlNicGo~VpC=Nh3IdW!;jUZt^ybe8I$VI-bhDvH9#0JL06^zf7NVtZ z9|!Q~+<5JPq5^F73lQ}#e-8<|^MU-!E$f;8h5novKxZM|0Z~unS0@2DqLDGQQt=Pr z{^-t&4EQWnG{*U)_i%t)1F>d#3+F z$@Paq59_u&c56@MB7DGudp(tV`n z4~Ufp(2~OAfZQz}3VX1=(NQP*|I^=1<3YCIgl+zr8Rvs+aRy@#{ui(|0nLMK!EJGp zJvQCU_#j)1^shN#9tZG9zQ1v7z<=G>AIdYVe+{+!{zQ>f0brCH7P`exZ~lCsd+YvC zL!rS{l1Rw!4bk_z4Prs^=8mU7q^`!r^f#V7z1U#wL9ul$5R z2Mt8U&^58rj=HxoSB;t_KtM@ktxp9MLnKBST~tuM1K6g?D3>6e|D4{%9{h-+i9N2l4c(17e->(SB)Y8wfc*q zO71JS^X<>s8e2sI^=!Ye_e=<_|5V9PmwHgUU+$~A{{?7&k>s`jM2zIPZBFZ9T$6}w zJfu*g-k~7HglIJU_`Crf&S1m8rdrwj7x?#KY>QzJYKekO{a=(h)Qxb_Ruec)welst zcPPmAxV-5%?jYQyn*g$IY%@DM?-|$UYvn({@CCjX8f@- z4AMOmWq4Jb$iNnw;-fMJHmivaYh}|;j4AAwym^a+MBoh@!?X*4CJB_wbX<>u%10pAFPac$t*9B57@kFm>%Zv*=#v23s!|(>MS&DfNOf*J z%*Q$5xwT&c+e=~IA&Dhd@pU)71eZp<;1OOs$OUvTc#Cud`PAN@56LJG$rZHo=@m65 zemzJrToayJDDmK<2k`IpIez5rE5x24;|WsxBb{)OR{73AThgDbQ-joFj>T@QzHQs&X06a{eZ%F0AdV zv)k))-SgyzqX`X>6#&`@%C_K z3&a^BflT4w(TJDVe62cXiT2?HE>3YQwmynOHWyv=45riKeAn%8>s@Z{p1MQ7GtHb; zsOM^J!WlBvYLZKLcWVBs7qHk&nE(A&y++2~aJbsNEf{ZR-pBi)g4_l@s?Jt9mf1`x zq&qHK1}ga{Gz0v{9SN@;P*91DGmxS9zgV1R{D-Mb{lC860xYWbTN}4%=@u!aQ$V_< zTe?A7I)`rQmKYiYX$Ba&yCkGLrMqKD`EUHaujl-~bH2}uO9z}8_Uyg(^Q?8R`@R?3 ze)ZPPcP5Ui0F#g|d?vm3Z>(w{nv=SogIwVyXUSaNRe(uX59DyZRzIcsvk@2pZQE?2 zSB<}ExzR;|P6c9fQ><1}QDwdOK@Ct1c))(s0y(v&d>R)mpnv-cJKJLWDV@NOWwZX$ z^tvZ9bu3R;LR+Qha+=jdY3tNrwsLyIcL*l0w{r`!RbF=AS@ww;)Q0|m)ni`DY{K&nZQ2s-~F!Rb5vu6`P^c>eR((wg9FZ@r>$X zrTVQ*lSOK=HP%{l)@bH52SJf3$)LK5vki~=rQUxlBShxMyeaYPCg>l@YTI|DPZcnd z^eZnlWu0_x54FR^W@|j zEKR50UG-6hn(3A|-r8tR2}l{+cY_oD-q{&Ue(!L>C9nJ2>x~hIel+hF2Y{h3S6rtC z`)UfTh)J1N0P1^gjD(*Ls=2p|F6xU#_{ZuRe6zqc#VoPh#XmdC|{{-{}oO*ZCRUEDtzcLwX@ z$7k%V$mgAPB$n;yFJH)ZGb+?A4w^Oq$SKu#^+&!{mh(zQYK6wTX*WlLg<2(1n&KK3 z0r}!ch=?&`CiFxwyS=HOx#HDyri~X^@z5sFp7YK~VKJuf)=-8iyS>M9-3jN;)E3ow`GshM)Cw}W03WOS*_KmibK-{UZ-B;*EQrnJH3!A)#D8wEl>uq6n?qAIZhPU zN#XVRx<9_G55S{zKtDPG(7Q{P>exyF1VT#1?116*jr|*==)3cA@!5-Bn^~V<%x-6q zvNx#=sW`3=x_wD3UNKt{PLf8wa$vwvBgMVB^{Pv>AGt#Doy$_Qhq@Z%PO-*%nYMNB zNG@K72b0?@%1Q~cjo?S6#@pC^GzfkiwYe)KGyZ{3^Lek1C7cP?)? z;B^7=(QGzt-a)|b8i;42B4o>%mYNA&2f6?$-n$`Di{x_D)r)r-65Tesb27PM8yQ!% z7if{DHEP^e^EKmK=d8^siqyqsqll)tl;RQFn>wD&0XZdQ0wSz=@X~Ni= zr6F31X#zFtX5C=H8e%>tji6`(%La=jh~KXz)FL4+9YEZWGc{pFJSP~DjjxiLUyVKr zkKifDHxs-E40ylxz~6@_!3AVEbgjP|y6=7lis1WjF84>Z)(28}-?@@hla?Bu4geY` zxPy|uN8g%6z`rM@bsF6gUhjj2U{&|B&Rui+vgKWg&sO?EEVau}8>;`z>HRm%6s;qD zH&3<(^_5%?C-u?l~d4=yb!GL<{*TtKoUm5(j`#utmTm6_hAp)|39>M2q1S;3x zK6>A~Ktw;PHXY6b0I+N-xAr0YLc*Is6zRfPvF54Ho*2*Cli5OG;Q%&q#qZQfC1ty? zT)dw|ZWFF`$VQXmaxC?3XKMGgMzz12rO1IlH1#^_SH=d?Z3 z?Kw-IH!tV~^#W*JWA!rV%4SPIW0p(tO``ywKNdJ&J~d0fK}UZfj#^=JYDlZz^(6NG ztxIA&FaJXuiagjPRJ$DC&X!#8w#jRV?3i$NT&S~ z9CbQta|~Pl+*PQf2NOBIZ-Q|e@~cSXvKQk?E^v{fqac@`uVplwEH-2|lMta_3+_#J zGTwkyPfj=Jd3LVf;s(OBQWus0H=nQZ3@i5r?*nGltOcBkk8}vRJ$*6p)8;)HVWeCt zdBaIYM@v=qKaIkhFz$AzY?&VJ90$*zmlL3`c?C9#-wS_{n1LB~Bb($%xTqJ=--`N| zL#?jqc4qhJ%h-V|Rs{a?q2KIkI;QHvU1*UYmIYNxTYu}z0eSrp@FQn=^7MfmTKO;Zc{^2Y9#wvxN6VGoi zw}$v#X7Yfvv}*UP9wN^mBQ3fph04V#Tc%}uSFmguSjJ@w%XfHwhc{n!XUmUUhg)KE z2h@@oRnM2F^5lplJ6@a+vy(6_HM%T|dfkbak;27A=gGdN1)yIq4O#*jgLZ*}@f+*$ zBa4^2CW4(8ka?Zt_vSjoj+L5aAM0!8Vb5$6+Nx9K{Sj^2Tdg~TqZ0@P2dh&jN|bfy zs?BZ6v{A_KZHz9?3S4BpJ6ng7e)#)K;Q*tL&gNdm!E4nXtb#~600yP}&ri5bhv-t; zWyB7DTs8yVJ~LoqIO6gA2&CDcW6HMKRDc|t7cYyF!3TCQEns#M`HD@CN+H9vMHZEi zuT5Ywp;&DIzxg)hB4GeOnXuQZ1^IQ+oXs~BVWffk7GfF{eEva+1FJ1-w;@O&&pcyc zvt@VxB&*5p%k&FaZ8IuLQslj6nS$(<)&FIaY;%-)f{w@QJtN|MZynCv3KnVm~knoNmnVRfA1vy8hx1k^^WzLa#F)=JAE^BBzDbI{R{6)v(g z>CF41x3@DILg_pgDYNMAM4XWCQ75mdI5!719pT=fO1HLT%CtW_N;cVpa|6Ar6dK9t$rY#9xjRBtMSy>=KY2)&vQbz!rz=aAvOwnC8_|4h9FYgMYt??zjg>d= zRURC04Oc)t?{7e;Us~l4_k3FV^W?PIv)g$pJuU}Bqn!lZ_YfZBFK&k@qu#s`4RpwL zs!(D$vz-}PF^XtIq&~UhR;CQ&aZc~%4u!Xt!C|&??u4@m{Cdd&AF}(gP@0(AXC?cZ zXu4cPrZ1l5i_Tb|Fd$Qoxd2?0Zm!SQm;iensV(y7jQaHsszoZrW;Yei0)RcSFkuI) z^sqvM^ek(;<0Q$bS@IQ=Z>XAG$?9^(FEI2I=|nyE!+l2*aEXT3<9eDy9IJaa)}jU8 zV=|<9t=LL0d5R5EMbwwkke21hm&jkW(c+|gX%5)68D(R?J|6-I``%+m978Q%D)~S+ z406=h?^1l34cxX$7sqlmdB<}vh)>wl@1C07$5PVoAJKmo_aqIGlAMFzdZqEl1y;}L znZX+d5xyn2bzg(Vy&uAFsf=>vSCm%kqYgkQi9OmelvM$bT;0TMF<@EeQ}`8^kvVX4 zL3BJYD^`EqqNRyiOj^ zSD^Q5-L+;vxglXOe38i=7fmc!DtHM?9wEa>En&6V7D5%i%&YGIm&1X``Iuy*gMrt6# z4j%*#cJZ|4u47tM!+XY{!q&H*jOExwBY07!d%{hE0VZ7KDEQUh#qiGf%vwzgT)Fml zsC(dgZp=qY!kHcEEg27Qr}1hEu=ai;S^dD30V!oQ^HWAIW#Kx@D^5CeO5;xN1f*86 z;;5eC@RBU5Wx^L7%y1~4eN-Ve8x zy*NVfEHSS>O^#+yAtv4ft8DQzzws15kO3|l2oExR(S34<#p@xmc6wZqC2s@ z45l*wQ)+&mMt0GH`&ZROsn!{*_#GPf^oo7^BHAI5YCn9qBs^f(Q?gOQfvN*r^-0O+s>8r*lj2^(XE#d7etlQkgSV_9{_zfp807}eSq3ouWcp0>l#I4%3 z^uwZ^e_ z5g)>>vWuf5fasrNXfoYeOE6F3?h~2%u3HxC3}}n(xDe@YZCLYC{YofXztJFW?I$vBRc3UiL$Q+7pyzfBnp1 zYTNqm3b3>Q;Slmhj&g5}A6hhXl4{Z9qmSMXiACPM=-pKt5JMJM>*f%hCZyo}DkpKa z)uP+5mUK()ZLxRYsmVG}x|aOh^a8+?u?kg>GdRng)p{Fr$^|-so-j6iC&1w?*W`>U zdNf7#AME5;_oIVzH-Us691_4%mw57F13_#c8RhKa$yCGcJp$Tpr;qv%@j4;uD35K2 z^4(wem+8Ewu?H-mw`(w+JXu273?nmF^MKHv=Wcv$_m_?BdE6aVBC-*LY~p0)!?Uf` z5jCJ)4)$V1%e$LBzjP?ZAbFa|J5m-%>*NGiFo*Tr8`9#dWAERd0B4=yfX9jR_MV)f zGvUqv|K}tG{b;&G(>NSw3h9^M)%;Pz2ThMKG2lsThxYpDt}dUKe&zktZ8hPC`>h2^ zA+U$f`i}Wwu2O@ECZrC7!4o$CpY*+P4)?>M?)*G?E~&akjVLZ+KL)tO->%ZD)m}`c zvSwuwO9g|eQF`Ol|882fXquC$tgPiObMrgHFUTQ?aHW%o-Cj--M%hoM!X;|^h>4Y8 z#n#B<+X6ln$s_e@t>lE`)Yd4%5cd*s(^a^`P9Ub|uu(raq?4Olo*aAnj9H2b8ca@| zy}D~Y1axns2$|8Eg0e3uVpnF`F~NYtzg0k89Q8vSU~6dU6ueI5@Xbu)ruE_X^c`{N zYi?k`AmS8g^@#6u?z^$~87MO1_+3h-Ds|n_(C|=zedqd5PCXPGGJK4+335!jz(?+rIU9NwO_J~(Ajx(Tp%T$CF*+` zTGuS6D^g6A>8`T5usG~+*zCGfg|#NAaarm6bnZWQWzfJjvnyjr;#~A*3i}PHolO<6 zDpFRKy>e*VmeuYe#jdy4h9~%2mhMnHmn$R^poX@gyo_I z15U^0mRVpg%f>q~b$&Jz6gjh*2E~o78TwuShkLV)aXuX>G{@G-<_4PjG2dR2w_VmUB6Fv<6a;ws2l32KqL<|

    teHuRh zetGXjLjDnZ%WFSP5+k53S04Pg0>j^%)dS{D*KB^+}i<~~B4kNz1h8JfbQ z%A)9SeQzY=DWEU>D9tF$G}v8Xo~k667~iS zBq6Oy6h{;0%V%QQHrY!?5Rryt-F@sv5MAgh?}LP#pq~PODoS1HsQHkUOXB$4B747l zDvwk{bBokZf{Ds;J)JsH2E7U$_DRN?xX%=eww+#mw1lK4(*quiUmN8QQpUWbevyXV zUXo(Gm55Xa0e)6*Q+~J_2cqDrd3AJhd+)wX*xJm{HaSKArz2Q44rhTz(+W5aFw^3X%3F8IWl+UV+1gjfIu?ELnxo8(E7n-=?8kjJl~uc}`UXj-$MDn{ zAX6`Vs5QqAHw~;o>}*{oO=Rk@mnU-MG7Fk7UoZH-)Z2 zoH7;Zy=z#}HDe#wO~_qiAOf$!()`mt_cHCKFxrJ zX0nJEmyUCdxo5gXw}C2cALzmF_MHUJu)r<{OVmM;SDnyjt+|is%JLWeF5xdKRcj{Uf9N%(}T!{Mc;Gr%rZJsbH5s^1AO|NO5Ah=3R zq5o+9V!ca1=ghqWoY9sL3vKcm&?3aE2$%%xwO6Za>#^Cb{C1s`Fmdfuq#M8iD{HIQ z{`@mP!`S7SA}-hx6{%~&x8NnqY)g7d95$^%eNb9Lt;A50gEU6k7WGi9{a0jelkjU4 zyik(?wW6k2N|`XgE>VM7`Vu+BXCd1}cI41N1R+$5y|~!Rl|Vj)Pt|I^Wt{E{$4Zzs z_1$+t1Jfp4BSN+gkIP+Vfq44_P{nk+)m7n%{jQi|;!5m4XnF+-&_bNukabK0VEE)n z-`{meD0y4d(Q@UtS1_dni;D?_K8}*qaFRuyLLR7w#c0v@itIo!mAiQRT@fDElM{#a zBrf}3ZI(rl9khw6sEdjKMO?t}FsZpWiZn6KUm-ji%?ARJIQfd5ecezkBev2p^pGK1 zxdRO8@n(Vaeo8lW8MW@I^*pOy?AI+Bhh9khir3Lf1<5@H;6(G)7Gzk`ti@v0r)hB5 zoI5I8E5rO>hS+SaHTQt>M7y2F?8cHasM?T`C}IP?|I#oN@#^t*0J@+_5&+fJT~ z`x3N-Y!J!&V+1dA=&%6wjoBB;Tq$MK5Rg>r#0;mcMqvljFAY~4l~OFbGhUxVC&jD} zZoN!NFkybhp7L+y*#6eQBwvCW=&u0EeExe0WN>6kjbbAv{;N3e!CPeu2Jp%2QYz|0 zvi3cM!{lQ-#cKCo$p;ks2B%VYRkOk!th<)wi|0};a*AH z{LY?fZ78p?bo4qes^zbO1$6YNDXmpVh0nbiR+RE}W%rDk8i&&a-s(Crf^DWaQo6e* zyTd}~Ko((Uyj6~Aqj)MwME@`fV#EN-$WS&lxD${8{WyP7wbwDjbRc$1V1_v)AbH?j zA%F1d8CYL3&tnZ$YM}eVWZ;FW$Y9&==fR;UTg)L(?>$11VPNHJb6SSf>;eHU2oz%+ z+J###dQ{6iuBZ!`@G53*Hq`aQ zlP(u0S^@70wCUQ*0Ip;Oc8lEpBo;|IVM_Ln(;m6`uB*_K>@gAakmT{_89z8|XUQKJ z1;qF?YrzEEd>CW>`@vD6*2n`|X-I%vYPw20&D@KRF)fKn1&G~)xkT@w!pw$C@1%L} zo}K*CS%l_8vxw26;@iWjfW~#&6C`;}3;pQB^mWbxAw-Vp53HR`3;_PU5=@$lL3G^V zyF7VbXb)BlOoc*>o^%@GfxDfH<=HrjuC?F6nUALsdT~$}?#E0`&@SpP_7s-*ezg@0 z(oVO#OzkD)D~SF)0nL~ktYt;B?d$<@Yo5x6bF=ux#&6uy7+eax@{kXgxf7e+yI~ z8j532d*KfiF2o6h9hnn|13)zI>#`itZ|2YexTp1J{5=8~(8w!+BLC=o(E^BZRj@^& zy7ED_8Z@)p>-nWNUt}H$BR_^-WK#j-L5Zx=+NyBC=}d?r4Xy^AgWXEGWy&28Wo}1O zhQP+IOO?hch}JIchtvysX_%#%4?ocFXFlY@zPk%Vap<{^w&?MAx}&twU|A-2`Q~#? zFldlMgm|!@PVX+o_*qpmU+upn2VQ`jl14nl+6%Zd#1JBVwlC4ow8?g>9Dxj-OQ^F= zQR{ehY~H{(n{mOS=ANM zaB=J=7GxKO=r!tTs&*u>$2z-2XbfYRDM{a?kZ^l`!+}~h;!Ssi)?kSW3xXEW*0+F^ zVsDDHbxqb%+`EUMq(RRLl)2C!V6kF}ER=@=8ZW6PjB1n9n8MrC>m}xQj*TE~lHpsd z@*@~wKDI{9HtVDsrs3?n)Hs~06e>8}N!Q3ptghGz!_VNR>WxHmIIs#U*Lzch_n8F@ zvINt>2ISW|1R|{4zG3#?zpqJ$7?$fRM6}(=j6J&_=KPIRY@p4Mx>}0|7JOI@Ii>RV zXpN({A+~5^V%S@*&kTJ&`l;;6-0!3;^g#xVsVD*t$#^zCrMq^j$&?}%H#r;vN zC~j-!0Lzy)ydpD2}z-7HKWvvAW z=8LDFXoJu@0U1W+f~H#GsS69GO$1Y{RWeD(>5wjS_{v-A0wdmTkZ92L=-dF0YhQ)x z8-q)f{yF3i<=Q-sn~;d;1qsccw6JuP+}j~RYL{iCaBdA;a>}9PA|0naj!s-~b*iEA z$OviDqK@mH6iQxq#bK&8@uEWW#Ps`xs!m;CMx^I;MZuLF+3TsQE2L-72xEc$x3^tD zaBD(AxAym^)=YaKs3xcdjUv=^^X<32>K;BHxw>c8GiYW9ycepQTiB>8^`ZX(?DST9 zT+f{j%m_c1CowmUu5*? zrjU2Wm`6MFu5+@_#wxTD^RzWolD;+bpkaQAIO>~pa(%XiVln+19l~H>y%YcYqCGhK zixrCba>mcnEL;`WX4-MFu^Yys{j2g$Q8~h!;fzlkNHXRDVbI=kj$v&J9fMSx=2|PA zk{8MWhaT&rZ7)*Q(f1`y87eJ6u}M7qGq^RgQOd-(8fi7(+m{MSHwEnOgKM`7~?u5OQl?jn5R<0cuS ziZBOY=v4h9l})|D?p}`MsBv5{l<(BF7KbT;sn>}Y(R4NaY9}Bp zu-e%=mX1S^MR>u!*V;z4F7sddGnq61%u68gq5om}(_(B-ZgHRVq}qmmED>H|suw>9 zDpFMzoicQkJ`6rs3=M!V77#N72>%JuF3R zf=4u9vt*7w{4KT2Dq#5*a%)!-d45CBE0)3M7JuPXLj4SU5^fyP-+0s@vayEFPz=g& zv)+lm8m{ux(sa|t)lsYBI#69|iXtBrar$toJe|wiGGwl$hi>|C7cs7&TZ!(zI85+e zRUO73E5mGXjlbh{w6n?5564)ldj|$HXftAMG2+xNc*beed;0Cp!ImFrcG(eSOQDU6 zoVTT7dW{GH- ztp|Hody};ve7lx(sf+tIBzPdP`BQf^$DRnHH<e8U<$34GLrptx=R5J z*|k`B-_%vWuJQ1Gg8jDtkBR0Fn-^m`8oz-VN+)NwW@6<{a9!-z$ayHk*sU4mQJNeN z>^ly!wO?Km3P?B6>F+yz&@t&!H~o*o`*FW^1%l74$+3gv4+$2LL7$d!(v*shKu=90 z1#Av7Y=DVIh<91+&$Lu;FVG&JC{3SeiE9Dw0KKh(Y7(IK0d@z9K$ElQ9FP@YwY9H~ z`oOSDoncjUvu9#ZASlhN37^FpkZSO@r%k^f@phxHfH4cnYqaD^K$w{Qshpu$R5epi8t@6c-qtc71x3 zZE@k=&w6&iKVTXPC8s(UUt>c%-sI28SC1hnhP9Mq?H^$08Yveyt zL)@sEX>Y9$npC9)G1IilOt(D4&R1G1heJ*3XkD}*&EOH>xhr(db_U^Qa&Ps}U)* zOKM;66SPetq;dV_7G1O&UIHCIg~OIS>A-mgo7sTRF5ua4WI%Q5>9n?1b&k0snhW4lYz)XoK2rlYHVT=Z+lklK%J zf`D)0a?kDM4p=Ziy>h15J*fEXMe3897NOBqapa}_u04{S!Dtv4dvIN#>ds4v#`fxi z%Y~O0o#oD{3l(D6xLdCxS9uEXQgSJU?^s^WZ#ev>6`r@p0AQmjT^oF{C$1aZLie zWsOQhhoRL#$BBUFeApOgglf~5FBEar@_1JGzL0~ZEE6eGKj#c^GmfX;AAeoz9@6zt zRo|rO(vC|PxISm&uBSP^3r|p6<0_Mv3P0Yg#DmG*l>(wgNWgXDVH3aRWM{A-ruAHR z(S0f>1P2()tF))@bszb8jzPS{qZuX3HxZf!KH?~1JB=nOUP3cLa+x(*7ySZ^ZVT6B zJ%`82*r==7-)V;ZrsU~{r%KsheA`^2<2HPvJnj9`=23qm( zf)kn~15m>SGx%wlendZk#zfUrJ2*FoaM}Emdj5bsuCr5Yz23mcXD$05bkPVf^ zA8OmOKxWBuB1wqybqZFEuBU!l#>qj&Uy5}piB>cUR7VrdLo>dNtboa)IxC3Hw_|`Y z%c|Te5H*dt-1sV*KnJ^Flu;BUE9Zk*dS@xcCu;rBZ~h6)qNnW4))*-GOv(#nHWLMU zVoovO5gix?`k5HQQv-k?aV~MautXJYcl5Es#CCm*=p`5bZa~#lak+||D>rc zvMVc!%XEBYqpLyDQO(swLUDHU2U{IQulvfV+~73AjpP#XnOO^Dzb~2hF~B5kP0cz( zHO|XPf4`$5syNIuKNN4X8Qp#xlYUaR&YQ&JPO7>%w#qY@lJU?CK(}hCjFUE$wWdp} z6q!m&66~!n4>(0%yWi?gq8_yH<_aXkUM#G=nn2gM*>&H{!8{YlJNvmsdet3CxMZX>zQT}$7BYc6H|?n|`Uv2jZ^N{ir?uCP%r^E7zOS7}ZFkW-bp zy;kdI{zSt(+lVI&>T$lkv_Q$>F^07*; zn9K&VM~#u={CMCA*TV)%o#+tFXc%(Sb3-N3l9b8G+d%H}Xa3yY>QvYZ!S0tNG)o>2 z&;9xghEey8h=Gi@7_;;dh!WzU#=tbVzS{_2-JZ?t-U)A|Yr^nC*&Hf1B;y@sLBD6X zV+wy0be4?LN(-AV(4RxS{@e!yd{fC}VjFwbcwn zn855o1lzlD)K&kxHq^beTvqQb!G{Z5cEk`Aw!CgQFO9BXJB?;xCtm_C=W~pcADy_W z*XV*b8W9BS!u7AZgJ&3t)cc~r+$toHFC=9r@mQ^P3nYuYp*r;rVa4Ix+62isD^V5h zCGcPn?n06p#I+TN*%pslAp=Ld_NZdPGvAosIE+588Z`=mK8JG5ga*qH0nDKJp~U0f zFMJ3s-Ce_f*lOqZE3lwEtxt=mu>Tq0Y)%<>Q$!J(q*(K2dWv{jy*pBf<*{cAE zQ|rm7KUckMkiSb)76f6UIL==#u8#I6=one#QNQgf2%MyNQ6Yx1sj*t|*n+Sb5ySS2 z1a6Plc;I@BLZ=Fp0m0v{vfTrL8$1lC_R%GFNBm>yM{blE?Qs03WbZq6+4NhQ8fYy; zlln0NKw$@CdR5VG;c`P>hOntgeFcwfq7I_3lI!gJOFBu2;cx1vm`( zukXr51iwt6ttp`Gu+Dl59!}&YV)cA{=?}r9g&lf`wPh3b^{LjGd%a}AH0qCoMZ5MD zttJQp4%~9itt@934LcTuBtpk`(sE<4cYNUhq2aX?^Qpzw-Sye-Y#P1#Zp*cJI6svb zqjtHPyh^=%SCuvHxz#A_RYYyV%f(|CT5uw>OMZoD&zXStoYjD3(9X}N8$Ux|gb?c- z_08=PMvOcwo`%#iG^KkZK=B@~))Y1z^RsT`viS4YDH(2o8gQ*<*(m+y)8Y7Z1fk{p zU~uX^K0c#*B=_cZ=VtzmW6NrWR~7!{GiB<G*|44N_pq%kx&aAyTJ; zflPeQ%VuDAk_0cb-F)H3b3xA-1*9 z-y6)Fbe6EwjY9_>m6U_d;A@sODoq=u1Jt#KG(*Wy|cU7MwfPOON7C4xZ?jOpf~1dTJj@~odcc?tRT%jU#u zf_1i*AMsW}E_cjR^Ey8Yh0R9q%_cl@1z%QYv(bwv#gyEYFX%{A6y zA7{QdX_%>UYt&YyRD&H}^ah|!`!f;hb+8r%oY};U@lWN_Thz7$t9TxJe|^XHuCS8< zovM8CMhpApPrl|Tq~MTcYX$G!x%pL&$L6WA#4}86FuQD*jsR#SjmzVtsgBId>w)Rv zERKw7YbaS6=SPAB&QQAGE=$6FL?GxNgwyv$_M;Zr>H2K@ahb(H_N&|)3T4!Gq1!Xo zh)+hEvSQ4g99~nU*PiMzZP5!Z?k23U%}Be?C5;izqT8>suuMPx zC7(`A@Upj~t;TBC@8@MZq6#BY3ZCql>bTeezNe{2Aw$DG zAxgQW7bF1^Opt9n$;7#NmwmNo;Y?xZTg~o0S~m3;-j0t@eUuk}_0|Lg2)o&+`a`p{ zl?h_b6*tN$E;oLm*1HH6Es$J>dA&^#`b@K9bMbb#(JBctIucaR84IikfABMnojLj4 zi-DCTe#6OTxQ#Tdq3S4>edFpqm0olG#hafKwy&|{e>Kc7CNl+c%An(C)wKr`>AU_! zq~t~#C_pHCexi(>lM1*1dEoY|x$R?)mf5<{+d$l25=$&CY}JrNL0`5%95x=>S&o4O zz0r+&jg9k1&0F{peczjXAa z`eT5RX=8=dLDLuu9Y+3c;>rGi(plC%Wk%{Td4(eWGCE@(OFyHerHOAiW`n1ICx#6Z^ zaH?9()Twqq395E>r?$JkoPP-Gh(%(6uv&Jovzr}0`KQ$hYo$FA3sicO%PUN8+* z-fkwE+&Tvh{im3GE* z8T?q@Uf53`dKu1y*zbDRTK3*J9lfHeZwp6Q{o$`@DtE<5#`kPNU>^I0<9I6KDW-tI z>J{c?_D%ya?ImJ~ucOxfZgxi8`!x7)DVJr^d*NnvuKF;SqPVGBr867se*P(i2fL`_ z47hTF*64{kd6@FFLZ&7lvH(_jHlQJ#e7#?O^!JpuJWACAqyN$TXG;n-LSD03*i~>< zEAFf1@t+$I`kdaw1Y?-SAKtdy?GpJ}Gun}f(?ZVZ7PR$x;Jmqvi!WqRz7PW{Zb|V^ zHEF)Ulr3r8UeHX8FLkJKVBK5eXLcy4E|dB`ksgQBt7ptDRZSkBloYrLv7Q(_r5oVt zvb+BIujZ8^=FwYZh-=HA|6ji?EJ-t9n;QRXI1xkj;?v2+$Xj+5?LzzEbSi{l@klaw zqr-*ziqQ~m6(BA+pEcj`Dn28|wh?$#z(8F#K5e z|M_|&3oK;;Pb=O2ltq3a7+enP^rH|H4|8E{`9Xy=%&$gJ#sB=@lc#n4B}ye3r-pWR z4;+eQKHk}XkCx2O>Tyl1;lFsz^$#y422iByJ^#-MPyX5lz|aZBo3WH(#aCg;_AmGU zn93PbJqt$ndE>A?NN&@30Un&NzD)QRh4pn30 z_0<3VVzTdmusuz2_~otGzs?aDkD1fks3e5{L;CvPzy0y%v8?`z-DmLk z=bg;EX1=w)|EAY!PW3ruXVEus4C*^AC+&tQp=5P&<* zww!Fh<++n8NbFg~FwqY1BhloojH!acGkTzm^bGDf!87=$D!^CxIpM#`QqO6h!Txz4 z=Gn7gi)V0u*HHwnPoK}g_o>ZaSGd@qXNbTrY~cIp%k#fm0?)s|{L}J9=NH(2%CJO0 zy=QOK#ARfFtD3Q+iHWU~xt+7lNkJQM{?KWp#<<)_;$6l8x^akdho zc&ngHCT{0wLdMO^%FIe3j7&yGCg}LVlwVas>hI>jFChwZXJ>nU78W-*H)b~uW;;hS z7B)UUJ{DGX7It;ErL{ypML>3ZC>cAD?xTnwaDBut6Uss^~+zFQF zL^1f;Gtp-<5^vPqpYJasy1$epL_Ee9^ZNr+nk?H5sWqH)2Ps4yD<6M{f)**~3k)d* zZPCjeN##@|7>baWKco_t#4EFLu*HyMR?^&}XXl&sV9u$(}t&5`6}P>Guro|Fme!{2AS!m&f;`x$ysSn}7PJg+c0EmHfXL?C*xrn4)CE zlq8h=&;PHd3k*p7{|5YL0sg(#|Bs~q4J-Zs$TWxan1uLB(Z-PN(6`+^A}%wwUluX_ z{BYn~TE^A7Q4Qkx)DTYbc5Wi?nA>mAp{vy+6v|s9do&m!#Vz+>g=4+FJ|@p z-g%T;>813(v;$J72SnFT_|5-b+4s0Oz-WI70c zv5Pb;Ofxe`r97o@TvTaH5@}oR-PU}5k?9+eV1hu=baDA2xc`Q)FzutBNd-GaQC7hC z*UtN^U{!`v8N`M(^gHL8O$`pWOmvr&e}6bwF-C`_0U2{0HTq$SbO+6L zU`D7lUAF0wI{7;DMr4UT5rXOB6BkcH z#eOS6+v?B!c{7AxaiYN3QUDW36Qt$D`BM-DHGwD5-Zk6Fgh*tVTL>6*bPDS+tGAiL zvGHnZ_vwvP4BNJOyb-8LkV^la1x(5=kHK6C&~H^r2?{{6-RNg%etx<@acepnBQp3; zs$x8O2(Tm+O)GVPkI{ILTfKBV3Q|!wT}EG2h_fv367=2;wY;F~)FPcw=*`P_EX*@6yzUFuA>GQ>mK>iWT$9<_ znB{Mo_xU0~slqgne~{>RCV0#cr-7azsXs^bLj;&+>kS5uzJmb3RSSnPv$m|!=vt0P zuJZ}bp6BP`XsX`vybQnj|Cz7&by<%Nzwfb0CF)vA=WE%5+ zY2YBy%l+BjWsy=dXN1<@yIj(^PofY=%oFI>^TZ5n(b&ZPf39ET_hc8)&d5Z)c~7w} zXOjyb!9-KbW>K3aR>;K6nW)uxBEY*Omo0|EEopU2?Q4mxmPDW9>eS0(N^II)_RxiV z?s@;7#pYlN9WQU;|A$%lZ@~}+5jGzthk_ndf7Cn#NKxONSNV&k8)U-G#oz%earU(^ z(`^e68H^3gpg)ccN5AsIv83){u9h_{I$CLpdu20|d}U)p^5%5oBx3jr{_iON45kkVuLnkJ5M*48 ztiorkjI+ss1d`7&@T>eir|F)HnF02Utnt%|{em4i-a-iTy&Pl$ZzM1icTIu0H0FJ7 zPN&pcMQjhXVc=0VugcJ~d44}BI(lGIVUBC2n8&I5IJ^Y#ZLfV zt#JA~+c9ZXY?yy%`gV~qtxRm#D~Q^Y%~Kwizvj84sDs?8y;q7#r7ggml~A{o-0hK| zvUUAR%s~}tpcA>*t$)b_sZ$7FjBI$RG`3|8un~uDYnwJr9(T+MblMSsEg}}d;!=}; z=HKu#w>$E8>cbC&oF_$gxH+WjeBP_V558o~5GIz@s?f`)kVICK?!%-vD0`(*q89@L z1EV^5*B3`KS!}EYm_NwXsrwOpl942&B*r6N zS5JD|VeBhGq0rqrfr@kiw@d^KPbHh%h?I$9`&F^sIs+|L3QRX4!PFo|kuTU@Q`zhr z7{?m*-(a4DVU!@5s+C8Vm@FnKi{%9H*Cz34cu9|$jS3TWLG(S2CIj*l{2H}yF$Bmz z&%w;CBvRVQSbKvic7|Qb0v87}YA))0M?>6^HH2P*$ytSJ*cxpn0 zeu5yIjz|YsA*z*&X%~GMmVdbt=bzark}J2$cD_6Xs(~B}>G+=dTeBzf<9|MiZ#AXB zywampfP)A>q|EXXn4?uLbKP1dGAwHZZMMq`)^ zBqp4$qZd@=%PMzABnH^xGwbJ5F-+6XtTeSYx4%E}{FTxKh(~^f$RYZFcnJS0F6fix z!#X_PpRp)h-=m~i3qv>tzZr|9qKL{MdPfx6n|gb5V^Q60g5KTP@JK)RwU@dB9Eu<4|ZP zpHY!&@pRC&((mNP>vV{kRSsbtPn7BJT;61n$8@U6tyRf zlZ@aGXd!oamhlB~Q39Ynv?_wea`HXI4CO zgW7GbJc~Y`J}qPm-yzb!R_{M#PEdv{AI4!MQz%z5ysc0}WcG1E^%k9g^K)wJxVmkV z^Ue8=+U~L)nSQgYLgPpXR!6aID?jzs{?Y+clhamIm0j;=x8jq5NckuMV@iPb^Yuqx z`r_I&Q`3aN+&WGBh-REZl9j8W{tbjYw#t?Dw4ckdbXhxyT%JQO1N1CyAZY}KYU*!K#ZXo*! zLd`Xz)Umr6tMIK^(1|9fUx`nV&0%DJS~74F=SiyYwy81ADzhyf+?$Bosk1~0Lm1S* z+qb&2$)9L+1fx;&U@q6>R`%e8s`RI;j2n>+_s~>}^^op%b_&XTPv%i#z1Uuex}Nu| zvTx1uy~(f*rBfrEs+8R5|I{n~cK69cAQnR9g#R6g)Q~MI3lF(Gc!=(fxXGfA=P*;; zoo+=*V9-qjbmz``v6}Pt%6rJy#Pqs&h7m*7BzMlbcv=UC6X(-)8Dw zJy}8coMt{12+2{2g~erMogpCG%X3-O0xCZ>jb!9>^6ON<_S=48`vl3; z@HicnY@+E$eQ~m3iZtB=39{kF=mC8gqv74!=JOeVnYGb98=_E&0Ta-=R)tZwYa#A1 zeMngu_sgVFPuSU6`gPm{lowwn4%+f&>qsa&F~3@+`SiIuQ#*@^v>lObRwv%%GvPNv-;C5E6jv8+)9@ z82;XM{ZIDW&j4PP9`maGTh>^n5EvJMB*FUjy!d4@<*Kt+p;GYpD0zo^MTNeAT5yzI z$$W9+M^Qb?-L$27V2%Z?&e2SO=eNekH!iD$*2y&M64Nxy5#eQac)UTFVD8RwA|TBG z@6XG?YFk6-&$}he==LInuXN^c+|VqzK7C_raIg0tU0_t1*}8VhAF5xmqZ8rYq^=!w z?-^Kd?<8kiGNAefkyrBP{R49ZGCE3o{=7_c*|{_p58XDq6+U(@c`ILt`Y+dd*qqtV)2m2c zeU9XPYi971RAThvtFAL(p!+}n2#4uJ)F$r=S1yo$Jye=%ZP?%*__LEw1v0ydeKnB9 zf^i~bolsN+dg*ydv54XOMgEB3^J12m>`SWUt=#24r%D5q+TZ1eldJ>!RPLQP63V@!@Qwy`Q7SS0vprgw+^&e^YGG+GLb+Ty5-;nL7X|;!#px z>p}*tiqUf2`UI8d;$mW-G}yW#?#h{>5FqZIwZ4ywl7&hcc-7ep1@0$MICw;r;#jH# zI){VEpRPOOCL0T9TO$*a-wwC-F{@%+?`{F=z>@1A8;+N^Bt>CJ&I*! z7o6D{KSKGv!|O6yWH3m3R0Cn2pF5^3Ocq5P6!-rm3Gua4TFmLt`q?Ei;yd%11EuvX z=V$(ww;}9c8lr~x;{|aq@oca4H7ohMimcnLYgT(*ZfVZB589oT_zIm#e0TM7znTZ1 zB&|-9$mW@|C>=E({S<1##}!ZM6xxeN%1P~VQoXsKRgjAM<{Zg;0*;J?Ky{;*zo68U zV%upkU`uPDz^gJjpk<mC^zvEew6NRLl`3tUCuHL) z>lpABHnkYm^I`nkIKe&I`6wo*rnzfbT=jFrAXo?MmepgfIZp_+eiLE7;8{OB?8_s3 zWW-p*0#v-CY;U*QDHg>a+8h%F#_!Kd8>Z<_(A%k&3jNXKlvT6FPOLD@>)q>unS&?3 zS*^wo(NN-|o#H%3t!Ec(oQ8;~F>(*&x)w}{rq?Z3czrn(L}OAS6u3;v?UXw^wSEsW zN%A05w6)sX`|({zT7F*9%`oZ7=UiOk`Lv5av}?~(uHUodP1NSE-Gw7Qpt|#}Bdc8y z-f}Yv{zLC?;{wwLCTeVG=3lezt{9*OVp9^EAE!)Hz-+U^kZY z;1-Gh(u#6-qkxA2f%dMGs(5oy?Iod|2mQ1~Ir6AnAet|t_p7t`-C;f(N2aABgYVJ8 zR;fv0TfOqaOFP?3x$big!m)ril~orWb2P3Ra}?1kOo?fxBu&$!#)#ST@Zf?orp19O zRH>Y3zT5eMfeh13_WGfU%Q>_oI7i(#n}W+i9i-h(C$*b)w}@=gD)H^-~jzUaCCd9F%Iar7~_r|j$uSI|K z++*4-LRk*xz^p%fmQ;@Nch&3b*qrMQMlx!g>Q^xWkxk@O_wZhb$LqT7=jnqpXXv!T zblT_MwqDXVSb?ns%M%2wsYsnVGu!{z6@E__~>L-Cc9HU_IB z-19ZIg@=2~?JTv{CfECFgi?1a0q@l-ee?lW4gI9ozD~Aqn7Fw58l)RV#3w6ApXqw5 zb#MagsU`|@MKlY{rVeN7RslzR-2Va5Vy%0*Mq~1VXURRI>1w;cc3wB-kb+&rVi=Bc{XZPstY*3~rp}Z?h242+Xm` zod3YAf8Sxp>D0~W@j84UTA1Js%T#g3$}csxv>_#t@`5A$$ImOt3o0e*GHhtah4E9% z=Sh_^LNhVKe5Zof+Q%F+#W}&EdeQLB7ANia@yyY_WF6pgtyq`^r?LS+0fGud=Q?01wOM&KbLvxDhOk!={>~tPrCwzlZJ<43ICVB{8m&ScP4;zwzcm%C z(VUOP4C#$TC^3xbO?ERSNi z0fH{a@yes=d?Ls)z7q4?yJV7&aurpHZ>E$;S9?kp;s0mwzYUHxbAX06M4-SA`i z#-nDx(UQ+04t_aWA%K9Ya1~JI@Oa6xTJb529OTl5;s& z>>1~a+MhsxH291*bU4%WA4`&ghG(Wd-pH5Ug0e9CQ~PeS1_Z10iYucghJu6ICnBt3 zlZ0)PL9K&+TuBy}ne)4KdaJWK#@$*fkC~1NaQmmewzp<|l|o??rTJBTYS@e&6voAe z`xDNWw(=j1GaJO)I4*C4MAL=HGl-PNM2URoE4?u&*yxj0r~A)i=wj@lnpaF+<#lYw1u{7SVz?Z?1`BMEcM1SBPgz9bgXp$szu zSIEk%;Iixa<$etsH}j@mY55j17_P}sWq zDRiw^Ebw$rzaRX8DWs^pH+!J6Ei=z6it$PSGUzfiQaeNALo!j|EtlrIOkro@LI={U z(fJXIc#aoRs!HpJuE!p+Z(5$V>|7>x)M`!i5V2SO%PEw9c-G?JrHcq+e^jKEBD~Xe z$z4r0HRy08TM%73CCS)`vr=cR{w-RE#a>p*ua9Afgpc#xJtPw0hN4Ofme@t(nYsFg z3(W{Fr5X~m8`5NlCks4uYC-37at&R#@l1sly{E92x*cXM6%o^kcH25!oxC|7uT!r* zC2ONxy9~&f#gbpi8iB%DTF?OrCd*3K#b^UCFamQzBS$dxsmx@Aw|2z ztw>(KjN-jN79>3+RPJ}NN`FpTV0X~)aECEkm=(c>wP^w3YdHIfqCrRY{d8M!4ks#1 zWk&ekKEBE7SB=7ynFA17E+eWLM{oO2UMu`+&-mRS;>>{2ceOBKpw9{%l5nJd^l0rA*q1unNQWH{~M_ zW{4nUB~pSj-XPb*Imv3%VHL%`Od&#rEMX1ptIJF7*9Q?G(A`N-)})+PC6t28`j=l= z*t2B-72R>e8iW+Y}W%v9D!ca<$O^Rm4DRw71J0O*T_!LugVm|Mlx$jmsWsjcf!7&A>KW7N(NT02SZX+%AqGcy@fFM*W@= zMQP&hcJF|emGv_^iJ(M#s3>hPn@I}ao-Vz5FQ*q}G@ny(y=eOWJPp%;N@@R1{1*(R z;@ft^s6O$2cG(E`&C=H=WwSE^+iq+cRQM!;k)RFFVhP2tuu!c6(u0ePRX0*EFZb&4 zb6`8}u0c1a9JjzzC8=$xOC{nkxLu$7p3_^mNaoj0J^4kkwns-mH1mvJ<6@5JzVKEa8_L#%^Agof}TOKa5 z-uIm7`3|p}AkVz3Z)&P4YL#8heSRT#c6E#7C#pTFE)xnLFq|xL;;Ef&q8wwXB`oDb z!>`7s?xH6dg1jdZ9nA*HY;`@|c)w`T{2}dtfvr}g`Ep{Pdh@kpxY^G9GAky+a>$*K zy1cKhMufHd<+Qj7rzDAxV&a8Sd;=ctS)W3*ZvR(ow?X0n-6&y98~bn&+gi+ko{*8Y z>|&2fK}lO1lh z*6m4O+wp=?q~M|`bUN`3$D1M-r9~fkr%q`*5t23~VtRz}uT71#mF@MOEge;7=WAFU z`0!CJT0#9=-01@o1*SwUe(n+~HaBjfG9?GOcUlao8o~T19XQPQ>$D`9BkvVbK>3n zXuoq22R?a;(W#IkS#x-U2p0zhor`UJaCz?E@X5&hS9IL^)$)(ypN5W|AoX~8*1E0x{RM(Osvjpe`x}uAXBx;A!>iZRLUAsByRr9_A zxc{6oiQQv^<7<;XqwB7?Iy5iq>HXK8+;i+^!-BUai=Hk?mtHP~kqHO$Zlmimh74T1 znDx%H7%wb?0~fz6wceK8@yT;T&m5>n0y_+kP`#2W^qUK&bsx@T`_pG%T`FTFmmeo( zf}M}@Jx@b?4ShLeTvm4V%sNA2EVmT};48|He*sX^c9Nk_el94A&L)k`i656)uQw0= zY>O$6%L;XZ)^Xc@%5n{CK{9#gF{DI2QF|Il?R|rL;_bfP)E!Bndb342NVTP>>)qtA zhHw)9qspGup?B0xLD(!hB;-rI=cP)3PiTg!mVv-GIUxjy&$e1z(WCdWD=YlBGYp`Bmk+K=TYNO9g5cXU+hlQnv_yBdYbo(qDL zoXxsiyISoHp6ObySD_%6If^Jzj=uNKvq;z-aozz?~G7P^wt|g1kE4rdsb|t7*0%)vuV`|u+1c7Q)*pJC0xB;R0zmrq7SgNC# zx;%XC23aa)8T;rR^jN=&EV#R{Hc}E|v4QqT>iP#`D!n2XT+vOkE4>DZcg3%y9_*U9 zLPs0rLDqMlJ-#Sfd_shN2GmXkIS`3M^5?DAGTHRX$(nxcs(2k=6l9^?S5~(O1n97_ zy@kA=pTrmF1+~tnd37S zi|x^?c=~^Wf75aa^fE0&k^;y?(Vs7+?xOmAplm?SL5X@}PE$ErE)^AK&+~7J#LXrC zdE7R-YFWaqlY0XRK=?0Jz)ZNStB0;xr`9?fuGC$8KFG9^6E}ApExxdL0Dv@=AAPzX zfNe)^W~FA;H;6P@RAIpeou!F(jr((Wc=$dAm)8WIK|w87ZA}kjy;$etazR+hlNorL zidpFS+2TRPph3`S93rpt<=IHk3c(Qonq-{@S_7_+2Qv$w1RdyT;&8dzjOD>@-)y&n zG?5XHvK*dv{_&wcOUjpDQ|NLGl%r%6&kK@u9Il-z%+jjNhDT{rOFQns2&ZKQeQ9FS zf94s~~&90BZHis2k>+1u1qP3W*VMS{Tb{G`GvDtUwSu*qgBxR)V}($o5bmyEA{D;!Uw z=KbIt)yM6uXoAv2yTBkzcBh%VjFOKROZ*!N`K8wtMCcd=O1(+Y}O;KS@mR*=1( z2vd}#``$B9!W6u=dC;m5;M15BL19*GRDinEY3&lEvN;$H^3_aX7s=YZ-;OjdVeNWl zIf1ZrX%L#T%b{3Y*se`3YLT`uW;I2nbDc%bI#Vl)ehZEKd5YP}xyShc-pK}5r@_!o zgiCRV-)l`H^?pXd;}>Pp>0-?c9~>kTg&foDvLu24RO3|ne#`o#yf&6 z>(q-xg`2=EdMS7bO2a9XehBB>KCf6k=Db{F)h{62no_7c3{v9*(sQEewO_L$2mZLj zgmLJj?u7r3*zUiIqN#Y&JEBC!XJ)Z9N|`zB(ED)L&>e=2BCAadqtO-r$ue!VxJBRl zZy8T8Kvgt|N4M%6yb)d_SWBaH(znq{9uQiHE95rKIye3^@QhaLP_M@B$d}V}@G+kM z-gszEquS{3g$)GOv|tuW#Sj*&cA z+82^#k;4T&9CdB1o&=TV;B2MXN`&~~o6Mc6lenFK1CR$X#uBin^T$|FblaXr1}_|y z+-x+@c<3I+I(6Xsuq-S=ys}grBxc=xPQ^+Rycn&UnD1c2@V2a6`=&b{mWYaN;bk&T~}V5VISaqTN#Z~{RQ2Wg;u@yd4$&`(Wt zJt=$*cjSf^@tG2}PRsHSup-({pM-VoP-%tJr!w!^O+E)Fteo1<$#e8ARh3yDb`>a)!%+GRH2C&HHI?E11yawsL?(hI{}xRQ~p*3_Vz= zc4l<136+tr)cX=fb+{?Ix{{LEUFX-gy{JYn&$oQI<;{xX46XY@c4jk9mrhADPA<~& zOEg?b?9S`bhI{x!vVsJJER7M11X9Lz1Tce_x1W>mel;hSuSJI6P4pr(7JI%CIlWM{ z`?lM4r_@Pn4N6!^>q0i7K^%3L;Lb#@b^aQid^j16{9&L-69d@wC!|~7SKC@0VOpv$ zto-!nKZ?1V+B@1N6uZbwJ8`Z^5fT1Ea<@7}F_~X=GVqGXxmuGtq*!fZIn5o8pCoiR zF5Js({{l#+-V#IVOlhUhxWx=zolU4Z70CILZi|JnCV3c3 zd!_1MaYMk2Z9lH}?B;jYcd85U!ELxi@c~UOv#Iy@s7?|ihApasVjcFKc2_%Md@yJo zpLGRqZiWZ6Dkgxy0E2E^8|jaUZltSq+E4ojP$5&G2p)1^bOgt3hQLRpLu-$ z)qfw&e}el5$$|a9A`rgJ7pZ%AgU_tT$#>tCgj1x~Shd@BcXgzeG`u)z z9e^RMS~$HvHdq&8A(zCgUojaskv5!G?>Ak+HYlaq?8-y{Ac7bCEjfSMUAOJg_XJ$0 zI2Wl5nvD5n&>%zJVu6FQUskYL!~Q?$c51Q0eBZ z%x7x-d6(K9H|TX*w!t>IE*SyIwOzFCk%T-HBtxSue_Q~*^|&y`S+UR1QP%jpCral< zxmNn7bG=}{rvFtW<;N66$htXb`hHG}L7KP}Kc`LuHMielkB`6-8H}mm5&SAZ*0FzF zL~$ukDlS4T+p^Ne>cNDqe9x4v)#^OX{q&$+wY*6Y4n{Zh-8-Xj%J-$61{}4!X~7sC zlOj%Wut|p#AzA3f3VLJh+^si8Z(P!}%HSa+J>79iBDsxyGOpZ4TW-n$Fo4);x9ju^ znC>})vc^84(U`CAcHpseWw*>ox<|uiPldheK*M8b{-KC&I!hm#!d~msr)MkRU509# zuTB;P3SN7(rNA_EzsqZH^~)%C1Lj)&riyn^-dB@G3 z^*X_sn7dx5*yeQ$MZ_8Of;tmeTHbIAR`6L0l4a9(*5l7DO8G5qo!Qk06fND9U9HUN& zzx}Vzf`>qKjXgHKC8(Cl5Ii35(Z9MsdvBrqXp2{BSqR>+&}i+9FV%yrtyw^3oZ>As zI84tAu*7d;Tsa%x2Xsw-HBuMQu9?Wb++I*TtZN!fqj}qiFBMWP`w8}c%JTS^aM_Z5 z2^LNN0KuF9QVPxI)Yp=<(m1_Smb~3Qm*p^egehj0#0~h>->KwEp(UvFy9*e$d2zZe zF`B?Ny+88(-dyDK;K_L(a8+!V_1yVjj>{wqwU3T)#P=6UuFKJ7B7bT|{@GCBDkrJG zBACT$?{e<@R=xz;VTOOels$ydu0?SX{1)n!IM4C^C;s`!^5bab z8m&Ei#S;L;JVq7486b*=&B7NIXc_z zo4kd!)T6F%I@k`&ea`@@OiI1DWm*C`J=O>o1=PB>>X5TM+!_eQbv*56HmPZ5XGawm zOT69EUx%zwmjPhERQT7o+h}{M(RoqS=7r%+{EQ&>HK|4`_W>;B*S)bW9srKOJ*6SJ z#8M0MuJ0fz_rT4yb-iwW?uPF5Qq?qq))5e`2741v@K&<(6Cp0Uf8b?ytl?b(VpzIe zV|Txrq#UErim)Umba0geCLD}j@f^hkcx_g(P=RahE+^r`J!ot3EGl`tOZ~h zGpJ0<2etR?w-HHlnPuj1n^aGvR&=4#glC~`1&&`jb&H5V3Dr|eHqSJo1)5H;0)y4v zhAlv)%_c%mk|Ovw*4_otR?pS!F}LBjq?V}t}Tg86evT$Yyag#)%((_$F36GVv99?OX5by zl~D&zY{F7hF2jeLUYohEX+Tb)hvH+ zd$YOqo*AS*ZzcTsu&bE}7T0ACgK$`ot{*^J<{=Ap# zF;fkbf1}7Ii{sJH9}vek27*RTbn7hNJlqkQl7CazI@E35HPdOZkE!aTtYFf=3=mu& z9oKY^00pM;yEJ(c`#@cG+ay?8lr>(+pK>%Z6i6N{4*XJ-^XK*OciI{b_uG(xY=Muz zzd^>Yt}qYJAA{RC=}fmORhruxD{>Gg;6E4g2v;}sR3%e1GHAOCaaS*1+&UVa#^J$+ z3mXJwPXwj5nHz=*yXV)m#P^*ZC)k&8-txFCi;!|rqt_k?y9HZDE$19Q>POrHg0xVK zIH>7pc9w1QL%Y-2bfmX*2Kl0>Tsc#x@8^?5^13JAwQ}vgtio%ubeV~yv!P3R^+o~D z%AeQijIDei9Mg!|BXufYsT);2U)j%7@9vOS0F%bf@%Yqx&f_xbG}A`dM?6DHlf-Sk zt*SzRdBPGnr}BpDASAV-C7NNG2NYoI2qy-0ThMek_pGakNjJU}GwN%sGZ+TI60mo| zMNRIHlU`H7pRe&jBlTJ%3Cw5nxH$$>sMU}YZ-Zo?*h@pOgn`kmD@m7vK=@q;jgmz$ zQiTn&x9FomSdRD%=~&9@Mv;4zyetkAJfzJxm(`^_R-A9xzga#PeH4!^!BU*flPIY- z+5ME%hUiVgmCe3(x&d8Eq*j83I$}j@S=s#ENw7I#{SUx{+j{cgc)?=)e3=yf4LB5? z=d&Ny)QbfbcEwK0zcc#uX#yuhL>so+J8C@UmrKl`UX8_ONi$P6R{jk(0G# zpL?d6syBUbxJai_GO`$-^yn|LZaMR2Lr$xF6?m}pfXHcFu?ib`zB4f~@6E5~cl}u$ z4?V|$ED}Yje(tC|s{dBWihI0x^sL`g`h?=#v>Cu_Z;boGYMocdHB72iZ^kF9JSc&b zHG@V1sqjRCHr%c&Zy-;ws$GB2RJ|bn+hU8;;}7E+hcomI_fVt2;*LG0c$b^C?s9<| zQTc~djg|l)5sni5m%|$}MdbG{ewW%owh{$HhZQp4WI{NUX@gqJ1oa`mxzTYsk;yRsVvmPbcTI#8lt8bcK24)|9Seotk(#t> z>?NKLG+p$2euH$G&(+w)yBuJE-*bZf8wA`AF=t{-Izv{~>kWRgAfBD1@z~K->m7Sm z=mpKvx-S|KHYm!%K}@{_!ntiZ;oKZI2W9RD854jlqw+j=_ecWxVppcJ*2=25q)(XS z%|SQ|b@82h>im~4WG=cD_Fqq?%XRxrgHR1#5?Q}G=+<<%9#1kHiz4==--^ebIEW(T zk@9kwKbCvru^30A6hrjp^VKZiF*v#R9n7{`)w9!QE{u#MXVtbE9sT7EqK6KriKJa~ z*qxnjk5O`c-P+reBjR>;Iq4KlZ=7v6vz^dgK2Qm8(B=TET}QY=@W+EJ`fIWarW4L4 z=H4Vewi+@^?eB7$6?)!q;OHu!tdw;=ZxQR5DZayqu#ieXArM#FeNM>GB~?~UL zbvwhNSly&i{r1kZ)0m64`v=QmIAbPu$eqU{t|d>$@v_gMZXrP+?dZsP`}3+1ARLq$ z@dr=t0LrQ3Et!x9V zB{cQl0+Ny48S(3JEzMF?7Mwcj+TU9b=0+OsKm2- zrDcVMqIz-I(3%~lwJau5nEzcFx% z0rW$SAv{4KOEZtC?RC6@`ATg z9zS=wRc(NG_X7f(rG5+2M0Us{Pukllas)UCk&*0!tRh;cgByQ~>2`cFB(_2iKg(2) ztjW8lCRdFHTZ7iBi6vCmf(f%{A}|r1uE1O#) z7O?NU3_6;)ibeffAcQVg7R&yWo#dR%jUSy-{*fXL=E{W#H+SKWV>lyT zkJ-kSZx0fZ_Qp^&Q0c%VKWvwBNT-Yit8{1C$@>w7LD3nS1(q8hd!}Fh^S=PB&Ms~* z_oew@s?Q2!QUq<}Q8TBw87!vdD;w{uYgpTkJ*a$;n4}l+m(T4*3#CvNE|XTgzh*SXn@U4)LVz#4wZtb`P5P22d<`54J!!Pe88Kn)-rpwy~w|z zQb0YMTtT)305=)X`y~fCgY>6lZNHw*)%#8mVBJEn8O3~Ox~d^tmgVZi%`z#>NzqT4 zp+G#iM6(t*gz(S!tv2d0x$M{%MrUh1r_*n&RiudLmj(@>`$2kKq%T!B5pOgOn&W-} zK%H40szyTtpSF8^J39KHz;cVA_4Sq4QrKAA*Elf$T4IPF#f`I z74Ll_9#O+^DVzH&742ND63(%@-v)YUi$k*|@mc)M&b3jrv&q3dXrb-)NkTAIuY^HKod& zgh5QKDz#|v^$7jKz*}Z9k-?+NlOek`{K|NHIUK`cHXH2BM4?G?*_#hQHZH(_z<4tF zhv%Z{(=LnpfTlL-_`DHKJmto zu?OvLxsgP(J~-mcGKobkcCN;Y?}v+fPe-r{r>;DCDcWd?*7-(nWHFMmuCE`6PNv8v%JarXx3bRJV@(P;K=K?@ELqyf z3b2UjgaOD1Sda0yDxJE=VtEVohC=q zTe*)VV+lh9ysnWV?|Waxrvkf3dbNU@ap9W)5yZoZW|5O3w<_}h@Iuf7g*#PY$p(ME zY(idT8+X(#^+@Gc%vORxP72RxVZTd%w`s;zt*3;>dldW4u9iEp49$Cu6v`{ zte(UnU4??MS}rm2t63nk7|7du^-~@IckAvVbmCc=JEt=zeIQDV6)l=Uonbjg^&Sf67ytL2DU?S1(_yJQZ#{H|4H0H?y`BdxKS+dINFXvT z+KmUv#p$U1APV9>m3-v&h$B$A6>n`zAJPq(<4={ z!UBcglbPGs#4&9yi;hgjRtb4!f{0`@{?q7GU`wxpX4Da_7d+%q* zYrh`67X8DE>wLitls8CHQ6&J4coziGAs$FBWx9-5-+;2i`C! z+RU`fm|&LV1chc0om6zk%%#cQ+i5XgW_ivMxgErxY;K}l@`P(kneuq5Jp1)geylOx zId}U!c6+t}H{WJc@6K*t&xtXqti~mLyB28GC44&`*VlRPUb1MVkp;x=2$N+3iic^U`^;xOK zs75Zq+2KpG@!GH;67IC1i6#urxJX)ioKmyl23-lL<5PeZ8ja+S*qP(g+V;`nSyr39 zRa%Pd-?K(B_oe5$(_l`|cRYZAKB0^^AFvcm=GSX1V>m57J5@Mm3}$|N+|#92Nq+9Z zW;&85(@4Jpo1|BME_iQwfFiM@343YZFU!E)oo{b7TY5zSfXCoIA%d5Ft{m*FzS2AM-`q@9neXyC&XlSGXY_Z_&V~twS{f!N4YyKiH87CL7N@BW{xW3L@@qadQeLk&ehMwb6YSy3);O25?R^qnU=S<=V^U zkS_<`(ddMgWWoCOt3$A`0uqc98uz`MH__2EQKRb9%rwD|w?epxCHg()hoF&6C;GHM3R-1Dr{@DO7W-@5hTEz6{SPZUS&fwlis8D+zg`&_PHO2aiYfKp znz>KPBb6fJ`V^aPLn^H;B2%U3sm(%%>P*i_$vmhC$u;`ng=@BQ4k{%c&O|1N9%0Q# z(Ai&%9wfHNJ_EN zw|^a&k9Ze2HsBot_PAe5e;yd>A5^C*EUtNZV@ElTBg(9nX?=)*UP<=F-uWhF>bEq^mA31fq z5#|~!WZ0E|Ara6#at;rtV#Mem#jbct^ZD~k3s7gQ_ zZTk@0{8FEi_v9Xcdcn@9kxcS~j>x&Jwqi56m(24Wp`Xw3C`-3&kR#iZ8PcCc&BBC1 z8UWYVdXCVH7xqyrp;~j|jiWMARI6}A!|Un`&1%sVY)ysWt+ul1dl3f>x)~T|sf_>? z^hRqy>xO|(YtDFc#7X=(hJiM7p{qUBV#JWiaxMrcI7bFw2JZAOHhF)KB4G@Q+L!pS zp|jz%?XdPnb#ddIx-shY99jLg+^T{TXxAvxm5{BSSdzx}%e5cSOeaJJxJNd)X?d|2 zitx@~8!aU62^S66S7I^H-*)3$%c}lT3W;;4Va6g?pc8PWF|8AzbZ0-aext=uhq)V1 zrZkWme?Znqm(qIEU@P5o_?2JXLCc5`yU^&i%I!fylgf*j%Luv3n+Dn86@9btCkPE%d!Q49fe4^VO z*+1i>D_#o^lL4KN;$JH6{$1isECr1Y&sS%{*2F!j(i+80)pONiJeHLNPW{?)zsWAZgi51*aEUK78-2UMXOR^( z*Dc-b&h|vWQX+TLC2f2qpAOD(&XV^WWRv&6AwO%<@kcdBwKmN9$4(z>!|`U@QZ>0rO)S7KXUv zE{AA(Ua2jqXs{#Dz?0+Ao)?#h1LXMn+ZvqCH7)7egsp8OZ~J^Y+kZ6#(#|EJOWw~i zBasi*qJI!Rr83rM@caEhU5T1hp;7iGtHUna{f_7;W_NCJN;j5!6ETC+U&avS+AO!{ zTDeAyAoF}$6Vdxb^EgkKYoelPOcezU7y5cfjFJ013Fo`34UrOJ-?b=$@;UX|jfjd) zJ#yO_nrk;S6$pi8)aCwAnpta7N0F34GGJD%q?{T2R}#y!#VBK8?OrVailF2UkG4! zh8Azzd+HGw?!D~m>8~{pN%XF!y7je4zCoMXSI`(xJb;uQ-x+A#%ycoY7b~jnyngCY z=(G4^)?uPV#o-ZB@PmZ%&Q!ePd?u}I*3a;VHs@LaM^A!BBf)7I^cRN6IbUKsh!Q7? zhd(Zt8L&C*?(zI0`a92b${dZ3&{v1}{@H2CHtopsboCs3c&!LQh<}~7p;a7vM+OEdCfAe}i)~L;PUYMi0 zIpvd4yUxq@rkKRRULdfz=!*`zJaLsE4h`!$wFXk$(s_Loeg!VuIYoM zlE(rCpp(4KCTHa1!gL|EBtdL|3;{8*q&2D|Bak?da<6rV+s-9yq;|l2!f;Y<9hdTg zQC_4)!QMB*{rtg?R7HM;%*VN1)!~fFkyW-UZ{{0%<;B}7_m8PI>XRz)_|H>oPptTZ zQ(Gt>>@WBCHM}9S@4uu8Kw0poUIN%Ohxn@qx5B>_?Wz>0$9Mf6*bTlpp)IPsL7Yyb zK9P?PtB_WA*nNIK{7jcEhG=m5tDbqr@;&!*Jd4=R2Q z3HU|ZK3$mq3@&I!(dB~BP>qU`w9`zrU`tHC5heBUj!QtsBJ}Nax&k?(dD^tXk8QdTqSt8hCny$XHn<&M1KC87#Pt3CDbd+FhT+X zQTB${`4fb%<>XjcTg%O6+OqZya1n#UuZL4h;%A%r z2LwP924@!yK_UInam%K>i=O1EE<*w}m3oj+yi#r*u4LifyshCX_9Qr|<0*tj=n8}9+BB*BJIPuO8bLrTz1<8(35kV8heuv{Z* zqgevaAivWV6TOXbOy8F{!W|X7^NiVLJ;GV;!+xxd1bNXB)p{K-$qOXtiFGVGQX!G< z3b9Kq<$8~uycFYL&oR`}jr4HP=Sfb?1|YA+uKLGm()LevGR3}-<8>{edMJNZwyA9l z&dZH*oJPrmmUjgs(6Ys=QAjN)x=!ZXH}c7sP9}R{C+!0#OV4zHXvvx>6)dx}N1lU% zl9g^+*6qXb?(EZ!vLmTaDH&X8S^%iI`^HV1p6ZPoEO~g$hw`Zh0WtGAZm74P3;UWa z;h&I?VQ0lqdlGoX+|jhDD`0lY$Vb5k{?_%je<%jvGf2*)8bHaTcU|^wgX((LUJXb! z40L~gaA{4u@TOjj4QU6Zi#_WaueW^8eB)EsqCc5PC&0jTR9cwy=Fwc*e-nvuS|cnD z$zVWOP&mjo4j2nqU+0ql!5oCjkF0pVB@q=gvs@E{V61aFGDb|0vmZRaqyt)UO#WNq zAc{1=izI0T8^cCRp!R@}JV1jb?8&5n?|Pu_7-sa&PYf_elq`KLP1!cP_$C*cAEp6Q z_Gt(mcVz?Busr^KB$^aV$!ulK>XG>KT#(_6UxBG7WDI{ zdBx&#g{d0uUkqwulA0pge+Rd{h937EtYw%0j1x2UB@m|Gp&O$1e|!^Se`)R3L%s5xCzy8U4^Yqk(PBaK|V9KL* z?{Di+--y9GY<8ag!ojRa1FLC%H1x~x507J$;!>0s-zPJeUy=fj^0J)VTacS+TOAeH z>P{HW&-qBCwba`KnBo*%Z8*f?axYsY7cIpj>K*^iAYVgM1CJ~rQUN@4xPT2# z<3r_EI1<-N$&%E*SuCCAb4tNuvKDYMl6ss4)pg`{1h*3lPo!P1fiqH+|YV%KfpH z$oucjD`rK_rJP(~_FTtIMSx$k+Ja;M;`jqF_-b?3^#8LmWmzrrF-w62M`)$75~t;O_`@QZH%mvZ#Yk-h6+B4(R6sP`+A!oDUu+ zz?PtDyYDmFyMuv^T@WT?{JnVo@#z#CNQ2{)pl}q$7fsA%|ExGfMSYw`&uKf ztNt{m9cVesd(`rUkF?QEt1(GE)tSamkX7#X-wo|Q#}r)m&Y0__SO;|L_3+>i0NRC@ z&`!-qBv|S0NIQcF>o1GIh5L;2h{J2bF((B-i4I4M_&+?P%g_H659yzbwJUM{)ATqV zkQm&});Vd_GnTY0zLes>re1S1TtA5yXVU@h*8kDaZq)Ps9zZ<%e0}?m zGnpcR4^=o+kv~}b&pZAifHgvNFFyZM6ai1`Uj(qb1J6q^0IO}m&YC=6-c1xd19A2f z8mQ<0!vOy;h2oICmeRo4jjQYlQF{Q)iqX7y%AXvsQ}@vxQ+JUj;EpVc0|Hpmpp!sI z94)Ww?%y&i{k8gIA{0Ce!vTG}MHOuaG)a`ev94OTp37k#n^?|6I-Oh0ZEZh*4&>V9XaDj?OD1qk0{nW&{)_*Cd}qGt2|CLyh1GX| z5au_Dk(LLZRX_E(hK5P;Z-0CtMH3JZu!~|X>o@t;lOoy)i8G$3M@JdgX?C0b>!^RA$5*C4aq z7jXz2`~6i_*IFo^xi!9}by;`6@KEr7Gf@wZa&(f9mdjisk7cy$ET=sq9s6Fv79Nw7 zciy5@&+B#ME-@$i!+V>cJtG{cYdVV^Ke^)n^6ASZM^*69LaUc#f9Y{l3)yKDk?)lcBE9C zTVAni9+w{fe<2azQMI_1cYkKBAY27NI|ECGlr%GX)C5d>{O+K@e>E{uw632P1Kfij zE!7^&FzQoujX-iAx>-MZ4+5Tltr?K-lA9iBFmhOdr(QtZ0BKqG9WHZ$x zdoy;ANF?@KdlWO^=UWl*v{1S?@ZjHS=jHcKrN3kTP;JHw4-ZeBpOn{|$uaA&wI7Y9 zE%5xWYTm@D8>kV*Bfn5=MKP)G7x)3r`A01TWb`+m*7FWiyFR%>#~@4( zI9&JR8X6l>p~!hQAT#isJDE6f#haG8x_qDMp)XblEPLzWuRh~GjGCLnMBarbhdhyg zLO**HeHUYGqs z%^3wbZzHYqbo@9Uvl!qt<0*fB(t+xNW}#-8!3|iGm^o-QVqUGKt9aq zaXzq?;LrJKsr(EJ-A1=G(PyP zazwuyZ>)+%8^mvC2zl?gJ0P;z*`Kv^YbM}f)_nu9O+OEdo0+&Mb&EXje^zD)1=i#K z>1|;l2`dST34!&;uizD9TXB``anzQ_9fXxRUO~Ka)IrOZ(e4%%>eMotKh_``tHb`G z9R{5p{CHhL6k%vTemGK9;q0^+ga-Frp}Z}4Xw5PWVxeq1>Jz@FQW49|vT=|&a()tT zZ;v~0-CPNc93^Tdq1f?!-K}IMwaW67{MoJ0FG9Cv4@y?m=kMJPRioY>-mod0x1(5Z z$hyN+^^P44(?R{4L&)hwE&AD22Mpxe9TF}p4FFS{Xt`GFAIEQl2IIUSwD#dtm}>YO zb{rVNsQFZqy<%xo_{se!&Yu)(CAaj(7@i4JFj+Und3ng*#B4YUkM+Mo$#5*u&}YzQ zLmv3v2|zVGdtmu!6L5H?;QQ*dRzn2sZ@`pGgf$oH2%?lz-^580-KUtcy^aRcqFuN` zWHHf8fLrz5!R7MloG&HNNx*nG{dBwSoZuDTC=^9ys9HdFfn5sJ`?M^XYTP)2uAvbU zfSe8NRo4}V0|onS95CDrX$qzn2@g|*zRQ7VEXCKNJn);k6P!Z_N_fA4kaioGo7#sU z_rYzF_vuQ}`s0Bh9Q?mMP;B26$TyjNo}Y&@I!KCmMaW!nY_+`nV=?7$|Ff9j_0K|i zN6}}Bn&Y#dPT^gy-TO3!E7Z*@;E%Nn|KDrJ2CrC!vFFExhd&1`k^|+Gl$2>MM^+0j zm2m#07YiSw2I1$61OPMm?jRF@dtDTaDCp^MF<49IvCi!E(Q@r7duQ>~NJDr7r&0au(x5kn~?jw2N6(swRO42KKiU$m{tNxqT zau6gg>fI9lb>=%bzQq^`MaDk0Db~d%YuFBsfHE%%-nWtfx%8g8eJ`U8!yf!A=08R| z6$lxt;w@ycnwKH9KATVpPT4n?lRhrVT;Fm#s}w|oouAq~CPvExdNn+%!EgDwR;PXn zWA*>l3*%mnXO*)O@J*`3Km+&p^-h!B(O_;{kI|fPeD0G*`V-8isg)wGyOP7%4&#V? zU(we{tD4>6&qnnY$jhB=+0@qKtFV>jb_YP>6zlz0+)XE*PP*)vdFNSbuBr7R`dwwZ zKl}TKb;h25sc}ANTOW7UZ4Qc;&ry-9ENd#Y>SR?+Q_5A7PkQ5+fkMPb-)^9$p=kp= znPJ-QXVUjg$iD}Xax;a5gvh5$8D&8$LrV;McsflGD{7=~1O?NSHxi7?MI!-gd;3pj z{3TaBW;u-n*L}j^-Pj%(Ky`+Q=muHp7&eSoykm)@o12@D6EkI4j^v*i*Ht~m{w{OGB>Alo>Y{OrC>E= zS;nzG1~p;Em)K~88!E==C9n*)VQ_GLgWj& z%rK%kU_}kh%^8lJ#OBpnT5gp-j{rlE5x1yMek8N)2-7R>I`yNP0K!*F{H4ImupE8- z9KA%-jw*59HsZ8i$eECxtgT5~S}WJ9ly%<#dR@o=q{-(BQ@fAlz=dOgKCGc=@hZpw zaIlm>pRG@04*>0JP&epF)?={>Uqc<#d)H8xkB?A2tkLbJqm^x1yy^M7Ur`{r+Ohr> z@0U-X(4$%ADWeaL2=vM6Tgt4wc5;MFSLrG*;LROnM!Bi|vT7*DouTjWk9z&ftjE+6 zQB2f3{E73O?Rp3cyaA}>56_#W$+Gh-{XYB&;R?V83UHXBZlifqIp_->6>cT5^iTXIu|mXSNHQntF${9=q3|S!pG#-*qa4 z-%)q`V>pkJmRQ&yzaO{mV+l41o;<0o4LI5So>AC?bEc)a>Dv_t$!4@iIT~!~=0uUx z$Mst)MNXvdoRbQ8GiaPR2g3$;bWcBF?~1ZG54_HTW+m4qfd1`^tqeeB=WI3CB8ODy z!zwbHtX1H$TTU-$)~fz0gvVrRpJz+?l;sUg0J+|H#M(;TS($)SZLci355n`Hg%Vr; zBNYWW86PMWKPxUs8^Q!a48)OjKTue$|Ce zq?!ZhZ>}n`MwwSSY$d1S`JPPslx~l#SS)pK$JXarYECnHIyNZvy-I%1tU~B zL&nT{RE5)dKZkA7hTK}Be?*PA`C0-uyaGVx#G6rE#N*5PAAPE+?-BhjAI)H6(yXFN z5v^n9&n>Ot?5}m+s1dEQ-9RXHHa@(sZ*ZSMDth_aOuaN3jIUuq{)(xif4h{T+T=|; zYA)?@QKvzGoQ{TQ?BE})Ota6ABbBior{GH4y2$%hn^Q@zZ@}n(1P>OtbG-DKU{YWa$g(7i?FV7A9VD1>y6#%F9W$WlO&_fxo&4Bsba*Wo! zLH5^d3N`7esbY0*`K{WcraO$;w~F;U!=LsDca$YqzTmc2f#LgdTQ3Bd_AR(il$ph@ zUL1IY(9@wR^W=_(^+ZLD$Y4tDWcoi^O2ldG_fqkmNEb;g3SvklVi47&uinDHK{A09Izf6~t6VU}`!P;J75Ha(6qXkS>G%#RAE zSCqFnKYZAo3Yw|hSB0Ya-9skamO{I!M?%ZRHe%LEl*zK)59KUID*K9$b3BOI%tPnO$?D?aHTi6%d$V6$&N?NAYn+201qMaZCb~$)9=%XH|CJu0sL+0%JHmmQdTu#8X;+p08c86d{Be2a?PR=V$*q?jP zF6l{9Z>fo>>-biF{7BB{$$T|RpZjEy)~N24*&O*{{V)9PZp{mJqlj<)-uGhl2J?7} z6xzK$d3%d3^`?Q2FcGWzd-tfHJ!{ExRboSPY=bHi%6QEXDbdrD^Nltz-ZEM2?7y9m z`5J|=P(Hccz*tB0+S+TbVv@*Lp&6ekhIxA0$lcO@wSGv387JfCJ2P@;`HQKHu1evP z?-O+w*-cAK+Snu<&omRuI}LJ)Y+aN5ON?z3hbN-o71#6k!V;Jd*fxHUd@2LJ3!z*y zgHq1BakIR{UcO)1mXp)l%L?i>gQ< zNv`YEsM&gPUiH`x^;(#(*}CzHo4W?>zZJ>}tOiG`=(sm=jUHBi%d~jz*=oMqR6fy=RTebGu8gvbY@1_LS!nkg@G!m5J>Q z;tKMl%G^%1W11r+j=$E265Y-Fte!HjoM;jM5{*?YNOL?*BXiyTrnYB1Xy_QA)jK_m z>eLy)vvpbgC7}Q17f0kD5_Y`q+f!t-WaT=Don1HZ#xUvFwtr8Pu`9h$ak@Q8P_%`8 zGS9{-#_3{jam(}dAQlU!I`pvM!b3pDRneK16!3q_dd;A*;uz%K&-iIc{|MC;3OJRy ze_9%kq0g{9@KxxahCb_3SvRrG=Q+^uk@A}9z0+`Zc3`*EcVc{Py_0?N3vjb0Dr{o% zYqXXqx$TDd9t`HHi%t--HJEN>L*lS<@Olxs05sZBT{bI`CV^Kf>!Bl0?cX0a?s~HO zP%nHU`Hd(2b3;wwZYcKg`699E3C{pEfnYwqf$7N#1DwO%l_lWLKL-}qFK}Ac;1zq$ zI>I++e)l4UOYi#3Kp&d}$M*Qhzlv?(qwTS`&&75WWGsf>a35A{sdvsLlz=Oc<*F&2rCx5AE(jffXjEG1b^6)vE%y)PIYXdH1mrG4y-V zL@`?_l2@(t4W&!^{rklGk(tZzf%*KpGD#E@a|{v+7_?J}sh+=Bdl&uw3t?VwboD_)68h#cXM zi%reh|Is#1T>}{-$`xAheL&f{j?bv|W&Kf#$gJbeLOU&acQ3m?m(<`g5TKWlvF25B zWDc8dbn;XZ%!Qn`Z8mE+i~?XPP6v*{>lL$h*4%V-suNBo1*W|coPaH$Hi8Ipig%%OFHl*u)hfJfPeFAZI-8oUaSga)y?^vFZRUxhzkxPMd=OEiti9Dh{*b=H!vb{#b#5WUOOxZxp4ixnMIsMIYwrYd*&uL!~8+ zpBs4%;3!$$Gh$xr(kr^-Vp6$)G zhHXoopgLU5MtE`}rd^uG>_|?go>+)~ytUUkYb@NkbvjlY-}&wXs*7~*SA5SiN_fTX zhs|lnb4I1gt_XS$Z2s*pJvG|3Ai`>I{`u{SZ0sg6aNo;BLwYRLMMe$w;3t9UfP3+gM!*RO=Q{jn?V`<*!((GIzjKDZG8Hc-+)M9 z;iZn=*kNlZKZ`t($q|+!XIGW3GKGxZgY}w?2PFS%nKc{KaL%%@?2L`a7ZRpg@}Vh zCvqam>;bb~EC&SEq(NfWYrEKd1W^K3$GC%zVC8&dlYYdTVgVW@%mzKQ&N~@%$B;Pg zbnVK_rS575t8i zH~jhXX@1F`>g!C1wqbRP`)sl9g`3vEBh9Z}Yln<=1ZPMVerW7~J!aSld`!)MS|BnB zjo7WoRG0p2dI8Ao!s4Cj@7iMui7Cp|3VKo`@A{f8dP`Rv~R@)z* zYdoS>nwg~tWLiS|V6YA(rdPt=Yg`e%9Qt_Vqrs@1n2V9qvy^7=n1FS5u9sb}j$iH@ z%=Z&nnP^a}{eG0$h0ofEh%r`VB+Pn-%3-!YI06@i#TO!s^~+h!ve?ar3Tgw0PZAtQ z5d9{)QxJ9iB;g%>7Q(0MUp zC2vdyuwar=F6*y#)3;xkkz3>+95l=Zk9a-lTOK9xC<_JG7VVw&ZS*Ur+O9a~ zj>oQui;G)z*I=*Ut*sAiQ{TX-H70Z3EzIa#GKdJa+TOKTkak2ZCmUb2?x$fMl!wTzjnfcgKD$rij&pL+ZSaFdSAvCT*AswH}1T_ z+nf4%hnc7BY%fY_yO&M5MCT?iI3KzY-GusyZd|gl*)Bna(d*7XOutGqfJFE0g!U_d z5I7VdK3XM*Iwe)CV%BFpI5weJ2RDH^eP67=C*ro6%RD{Z$uDv{%jYroyR4FbOB!MytgY_k)AGJ+~1+_S7mG+5JKpA)a;)VJzixfxonQl-Q%Klr}x zL5x)*f&N(Wr}o1W z^CSFRU+GJ+1!vLQBRt~@GbQW~jQi@9^4$UzWK%Siv~D*HuQ)Oo8n{RNRW?&Bbsf>PIvpb2=}}C>8vA?igpLV_4IXo zgiT)moXPWW$2_FRK`kNoM7%h30`lI#hVi@=X16mSV-qzex{ZDtd&YX8GnZvZzJ}}8 z4D<#o>HOCNGX)DZ*|*o3SUXXg_Tx|J8|F;6P>?vH{8Gd39V=a`qNVV0t2hS_yV}HC9vq(;_ zTN}zks#143Y0WmVx004mrgt7XWD5xisbK3F@xHwQ138^Pi^#-wG^%BCxy6@t0 zjup-j=g_)Z4J%i0+xZ@gEYGL9^J8_9YHs-tA3p4^$9dHQX9AcKxmz_`k(_abvm%oH z2*u8|5+yD7wR^cx6uDEFM8s-_`W^~0cC>?<-zr<{tzwP(D{!1Xh5?P~OP39iPm81d z%x*UF{DwBUu9n4mMse!fq&Ev?b-4huTTc7)D(uoGGdKsk5`9=7WvFG+L0x&J5w^PL zfUW1lG%g`<*lZ0Ks<5D%_L|bFNV}Tae5G6>mihYMX6)u;&#n36nd?rsV;%Es(`K#L z(p*n6I&i{Z9g{a;FJBgJfB$H zXOk}`)wAxW7wYUG=5g`y^~)+i_N*iRr1H7ecA9EWH&146rsV>%9z)U#Yd#10s)h_W zC#asifY?jvP73PBJ>&e_xt)>7%6S8C70a3N3?(7||3<6&y zKnaz&u8QxRmYI=9T$H+xj7=a9$Q?^9+^yB6FH}KB$7-hG_R^-5sCC_} zf{X3l#;jWB@=O|6HI56%BHH}5p(uXznKIL1gzO#T0BJ=Vh5izRrgfl_P5LKJ^q@80Bw8a4$*pE9a{5 z!@iN0dmy)PW8!x<7QGL-k~?EkQj#5&cYHQv%$c&eZKkn(*E(G`mg;CnbR>SmI7KSD z7x$5FHjaT=3SuJ5*vI{>R5SKy3%llL;@)9V+D@8S$g`C^soaOY$hL(*gtfA>taBh1 ztldn(FTK-dZ!t;GXMUb6|10;c>H%4z*O?aP+k&;J#x`Lxvhi92czR z%lqw}|%5xP`@R>BkXk=n2HtP5X%-aS+QbZO7IA^Kc?>A(47{ za9rg#$^v~c;wMnVGasLLT*DQI^)pKXHe7fJ+UZiFi}glIqL0_=dW(@f+btinoL!v8QW0vE0w^7lRLFybO7T@9>whg{*>li-s@O4MbW`C%c4A$Wyyn7B(od zEVz7-otU*sP9)H!nInANgaTs*`Yp#gieS%qb^ncl<92Ja{CGb7r{BltqMd%GptjX{ z3V7w>k3%NP5j>d_+6fv#yfWyW{USdF1k zMhdhsW>Db@{&MrM9aLAl>%q9$dU@WZdqBviUcLD0G6U;4L9vvUgKMrN%y#ZtWbW00oHQ$Ks2zf~p~A3Tpp$zQJQt_^s;4n=`mLZG@+hKYEw`+1_sj*%4_k1YZI3*9))b zY9&La8GIadpqOv1o?G$IS7EKGmS09G=TIhh*&hqWdj@JX^8+-Y>=s&_cB@}`4p#fI zX+*g9mQ^BT)^-jcaWPo?X$0Czg8fnYo$Funm~9^H-ELK}TN|V1b>3%Vn_p;RZ(qsR zj^`T~hr}&IZW3!R?G+JL`G5W#!j4gs%TGRwX3RBGg1An!RW|-O(Wcg|7A&-FT5Xlss?-iXVV|%p6&QMCYXG-X=RihQ^LtZtxejGjiqG_HU zC&LeS+= zF(-BFoh~RS#LfoHgF<8P?0oI9HS0d-orY55agltAjDWf$CppAZk!3E~i~N+;$&37n zPLS3}8zz+wWa=D*&C;}2ch~(Y&+W_4Q{4%>>#!#6>V9@?TY4}hhCGPkDzNO4CuB8_ zo6v_`OfY?nb-KZRfi(w-s}fF;&_Hn`BvJbN7YpN5IFu?GBhJYqamc~avi3c23$u6j z9%u@0;jZ`o>Z3T^V9s1^sO%+8nj>H8$CtR7rI4Ywkx--WtOJM<-^;I5sWqjaF9W_5 zCMFrJwb4nYZ+bSV^o?295{l>hl}}pCq%EXvTVCQsA>d)8VKl*m^aYBRRt;{0_6&7V zFLj!hxfLxpW@Yt<*bgmMcDe~#nqTqvl*f791UCo^hy;o<|ZRHa#&Lm##l#zs(R$ixA)PQ`E?ro!<8JU{aAX4O9 zWf1e?3Vp#qdZ*xJ&7%fw5WovcU-YoN%1I^S1TL$XxL3sQ3c) zcrIQjy=Hni39pkB6h~S*DcciVl*8DC2HOZEVE|lveZkoUsZQ=9LURxMqY5+6xop?X zWoi&X3zOQ`^&!Zml~tIpf^m7vI(+{vdR zzHbW$&F@MAszsv@Whu%yMC&< zo1IABGQw*J$Wl;9w6zV!@~Sx+AePUPZ_MVtP`3sLKnyJv)uKRg;y|8ce{eP_xlHux z?n426G}uF$AZsV=puv5hJ9|joE%|7c#z+qP>L6PrKyV}@T2u*uw~XkW$WgV<$r#Cv z0+mGX`=rl>ljFPZm=AKqvV-);&W)+85RT;$_dlhFP7rRd+vg6gPx*Bj0mT}QHz@O*8!!x2r|Q_@RBH=>EKJ)VwpX*SEG1inR}>p`OCU26UVStUVjoZM z{OCksw$wX+{KNoWkvjrS@3xob65Me~|Yisk0x z+C2=7mx|4e=XbJOe8ghEj>_s{)~pII`HkqYUED0uyw7|Hq@YeTXwg$0 zt@Pc{qg2&<0NgJA+kv#9oL#DmXjqlU3l zhk0)?Wil8saxhZv)%*VSCpU0CC?Yw8Z53TC=?yc?<6Vp4e z@62*;w=>rfB0H4vnxFzz#{GF+B8Frga}qM34xv6-!7NhMp<+P)_y+)@$} zkqQ|fggdiI$;fD^B_n@JhBY-bwCreBML((G-Y}-gul{fXG2rF@HP8<&DJ(2>@~eQr z(PFISz!kCD-WK(sD8u`zl!V_-g8}BG^mE61B_T-Q=UwfNW?B#}zvPG#{O48#ithln zd9&%p4%CZ7c03oWDJ!v*oX(}q;7vc-c7a?#sR`+z6$=da0pSzsl7lyDYq^7RedCCODMH4< zl%HRhWfOQDbEJ8S5&#P6mN!--VG_GeVz#n7=1)Q$a0|GYZl8$~h+*y>1$V0Jj+89v zhY?iS;<4^A_%9}=k}@;^h6u>P6C8p?*)LmHfQqLyuUp)l!T$r^wuO!p@Mcb(rp)gG1SGN8>nqw+C>e{cg|a^ z2l2q-oLQ-%x)QG5YUzPa6}v-(ey1xw*@83P;y^1^O{&3T)O zjqbt>26wQ~l_y5=NB0fpTQK|U$5W5l{&C%|q45D=GKzBXxt2FX1OGZFDclr%5GB2N zKZ+U$b?qk&8vGJ%{3Vf0=Zfa0?U%WiKc4AOGYAs>hb z7C`v@!e}nI3gH(5oD*^ZgDn=YIG8O_Z$x$4Mjz(E)Iqoo_yCoi|Mrm~m;Q0S%qchgl~KBad-z_Az#73TpnR^`9}c_ z7lz1)QpAD}^d45%>j4??DEJJa08hR%m(e_Q)#Pfys5v9dP@{4(g%a2cciTxTV&Mm%y-=78hK_%y*IIXWyfR zI0>V|)A+1bS{t7Vrn-Q0#(Z)OGN#8U?slwEtsdWtc^#dH7Dw?#;wNJ|o&TyyM=F9t zSz@>@5u&lre?!CrB7zRwkh!tSonyL2fBcYwf?ok~2?(=4J9NGn^KG{5 zP%kw^J4?VsIJe}olXa|scl2v=6-()5c*xKbF8v&@)v(sPw)&>!fMxBcderA1&-kss zv>_APWG*%T1LFJ<^~bf?w~E4a&})RNxVYF9!E1N<`R^{BtX%zHB%zn^M+D8naE`Jl zw7x&fwU6xRYB4QiM{sBZ9alPQyk7)XhZp57l{sN3*PM>laQPRoaCRAa5(oNq!HoYVOjKwh~0(1=UH zJfHi~T`?hpVihrt*n#m08M^;^n*Z}(MFB6){xzYwf2jEZWBw0te`pFoJGc$W`X7J! zPj4Lw{q4e`OY{5(^!dAcC`=GIG#clv8UFLY|EtCPMdvrppVw*p4`cqP;Q?+VCIE^5 z*~s>{FTwSb2f%sTmqk|p)p-85I2Q{XGy4_NX8#ZY|NZ}Vk^op1{W>cA-}>s{-+=%+ ztNfo3@Ncgbqy>~syZ(K_{pZ2&Q~+Nc8{#YZ-}n-LocW(^_3s({pKbMT@cf@` z_3xecKilfxNceyEX;tAwL#2kov*p4q@pU75lRBE3u}-2t9l;yG<$y*;MujR-Cq|9K z{+_w&;^#-1r&)&IWE;3;Be`))bTYc8Gp|NqRm za{p`u3veyPa#ISw&7EggIA}9deAFZ6A^Mfp-U>;?fVD@@jq071+D7K5&dY|aCXHR+ zi(XB9r@p7Xdy=Q0&@vOutE3fiL~1<=_}Loax%7@^rP(MkiLZBpCBIRxDf`9br-#!g zNy-Z5*uV!5)TP)Jz<~-9GMb*~W8(5CrATRS1CsZ#sv}!eJ??R6Az9pMxIdF~QtN1M zr7)sx#)X&xq(SkS5|bEf+bL94CG`Hrd7!OH)BJK4=`Xq7yYVALp#M zKG9%1rIGHz7cT8AT7q~y!ewPQM{M9rODjIwH8&Cl@c|v{*~$r=sTDm5TIOD{2rilpxy3d7FxG)KY3*&)^PopWs0RrQzOd zNy;#_rwtU&W$hXw*o3WT0#c67eR;Re>-B5syCj9YN{H5ywoy&?sTCoyny1x1B|!O& zSb!0)C2U-onv()l&^5oFr4buZMH?XVDKTpa$_@QduVR*o#f`BS`)j!(BlUi|KYHmt zq6FQX_-47jul>E`UgHmT*!o|}2x zXneu=y{ossj~>l`I9AesVf2o=vRIL71BsaO7qk74qcg7ILTQgbo*$Rn3Vq5yLn?c( z-u6a7Z6$TjaG|@>Jya360*PJD&-|Tp$EI<<+O9hbEi9qtO-|f}Sd}Cqfl^!$vjQ`Q zFYg@r8i_$7N{#MXuBY7Nxthm9p7)EK73);{+vd8`d;4s}b->m|%s#1$c!jVaWp#bG zNzt;Kzihr34!>5o_Yo_uKn-)X?BD6{i$^n{91-)q$-ToLXwEw_JtsjV@8)~q&m zph2S!9NUn240TruyKTZYp#hWcUk{fd=Y}}rVQMrdh=D=AGA%NjyRy)U-IOf}17|&QN|5ST% z*WFoubi3QrQcmU3#i_Hq0kor2Q{xfYU7Ru*kLk&(A12KqlU%}H_Vgkw!c}Xe&o{sp zAgsa>1y8?WHh z6W3V6H+cZ*n!cT|sWTGc)9QGtyf?BMFXlGD%QCWPyEeAm;74vpylJup)jxWCQrGQ* zA>mzwMd!xN$x#jXmzUZ@fa)ObSe`b6#KE3n#u|Q<*-k+nRxSVf20HL(k||eiBSXkg zH!?&D^>yqG% zJEL9!29%v5J16_j>jl=5lBDu0F1w+u+Q`#6k-RsX&8*1wWlaVy#40yZc~Jf#hMbR=vcM_T zcF}CAi%e10f?oXmTHVwd{pgb>?O~8MMaM@vgkR(ac=7_U{TaNC8Yah zH(GVXauP1H}M!gYiwI=F{g$>hXoWZ~G{% z46%l1ZNWd{1jCf}?ugjX)_q}yfRoz=(*~5`#-~_4%C{daL-<4@f%WjoPL)S}Ll&|4prmwbfQu70dDyk_za6D}t ztWeD8-;)K?+RW^D|2(ToR`E*U&rOz$HiueWDur3rZ+pGR9&r0G*Ykg{3KP>qa~w_{ z_%s05Z#0XM5_`gls@$OV9!U7J^t(ANtTnDR+GHAL;S-V>KEuax$u@`Ez&v*Lq(WGx zG5m|WEMZ?ixs-UQA0pYQ0tla9FWJ;C~^j=jhG?$x5=hMv=h)G}{Ii^dx4esFmYDv>KD5P9LRu3Vveu7z+ z8qpk`j3tPT>3xAM@uMPReqQj45*fJJpTf{lR9?OE=2nP*t{g{M+EslUJ$ zb&{s|0YEuJ_~t4^KQs9eQ{G(p$jJjXO*lL}Pq91g)`O(mlyGGe_KnrlUccugSB{s4 z6ZXrdr)m~U6JTLVHF+WZV|%gY49}tr8a)&Fn8Px+Il`unpRa{c;@27mT+cxc!)`#+ z?{VD3M0o3(`~g`a%GZWiO$W3q2RUMfe4qcv)U;Rf?DVx*oza(G`^+LxPy|K@T91tg zXaMMZb1r?B1~+qS$HsC`pRy~~)lnUqFQw|E<>Si4NDdlgu#_Bq7Yp{Y$(W~FYxA(n zY1Fk?OkHr?An3EXfa{rz%UiIN&l1IA+*feJEwNI$H~x}!Z5_<&(8MRHSg$%@Ipr-@ z!(7SHE40avbviE)#IPBh*(^Ybe!CrIO7m958qFCi+WE+}j>;ZuY-}=LSJ)w?S|~*z z&w=GDDpQTmd2cXM1y#|Aq^dv=gOdf;1}Mbxo;2IqTDLz{uCVko%&2&CVI3+^;cc79 zc{<0363PTeZ??OW-p`s15eI0{8$ot+Q2eg2jq*o&`AYrz&`wRQ)lIm`YYC0I`5G;!O0IAw&IZ*dcuDNF7QEVEU)~Q z)Dxya+hAnYc)69;#hXH0Wv;Vn!d5FojnP>>%!6d}M~^lkhTUOLX8My2{8Q#asUv|E zpq59E#?s}`A-PK-kG}C79VR1dPO<0qrV1E!C?WM6S?3 z@Oz3Ck6f}W>+8$5_>XZV8I~(Xk{&shtSJKAY_LWXa|wf;Z`yNOb_AC%CX>+lzse}r zY*){Iw*4zpqxry}YuKW`VW^E&vrQ~f1m6SLUgL`!D|k`!_T@8R#l9RBYoNiG#i@}E zGR055fgQ=UQ4h+uuGvcI>b8+;VI@6b3f@JJt~T(QU8@Ii{hGu1A00ul1W$6ujPdxX z3t&m&*P0>GlBl%?kK=Q>vg+6BCi0ayO-i2Vg93?AxKhO4QE%}Ezkw6#U3e6a*PZQ` zDL*2#zM1E`dA({KFR8u}ADc2kCib^_K>BNPF!7x}y5vBcM_JkvS#io?O7A8FgqXu8 zrOJmYm<0L~PyxHE!l8g*c;{f9VN~{Sl-DOZdmS#3?H8>F!5-;z=bDPO)u-D6JCLGq z;Evz*r#cr`{VA$E{!8o9;pJ+$2?m6*O$mpq+=R5{R2gedbC87;dn;FGyL`I9d!h#Z z!m%IodiGbr%e`#aDKmZ+AFz6Gou{E}nO?!nX*nfd01&kFfV+ULQA(tpZ!-J<>kyKD zKkcqKXU&+Rk}dm88u0TVqtvb^{3i7-@kPm?WLC#*!&q(_Mdt!))3wLRB3)St4g>p~ z=^L)rLCVTJqhoQ z^Ilm#jeV{P-Bv2Sh8rLqZa)Y0zGX5iZyUdbF}#Q_{V4F$G|-2+58`2Qv@-uSi5*sw zRdU~A2|qtN*>*Kn@N_fBsKl114Ppj)vy2kFn(BV$oRe`enswsg%l+u}F$O+%MPa@) za7wKe5#w*x^w7a@YaDRd1A&9-#tZ5=SlCI(zYQ-CW>L7ieqjm2fS9( zU>*s5YGSp&OoxVRDF%x@Zf+abAa0S(qAy3-SW19b_sUMb%<||JU;dfF-@qPhR0Zxx z8rhg!C#@nDho@hkFO~8vEcx`(a`*sZd~u~dobe%c;Z29nG2>u3M{HKh+UvS356&-m z3@K@kSR{X%M!Oo3kHsYWudgvZ;s75|k(^n1O)CceA44mV$2v0~JQS<9 z7-0zjgTAOWO8d9%@KZj@ouP|qRN$IK8-dp)%=p|^mhDjyYNL}!WLRp*2v7p#gu|a)HBH>EPXnn#86)OdBiJXaY%grdDCCKV>lai&6moJ~fKT9x zlTewD_14uadxOCV&JyVgBh83krajJg0CW4;j4e0Wz$WRoAY-~mg&=TT8jtC*GSTk4 zDBtLXVX4xHiBKJrFJAQ}{Bon2+kF=Ql3gmQ^By&4s zKJ8iuZ zsyAK{7C|@^=;M}rz`9bZph*hglG1J^wu5I7sSy*+{*}ot#@$HvhmP~G^>!5KGv(tG z|C6sV;`VDbRRp&Z&Yj6B(ByW=ZOi+&+i(RDD!xE>7z(z}VT)FRryf#X_U*%QRD2Z_ zVH#_!)B?zUPum=b;oc{Qbm7e|gjb05iL!O3=<(WjivHTTUQQPNqdg>b0l`OY3+jm~d6P!f4fLqAm{~e|2}WslKp?AWs~3jI}4L z`l8cYD!y|0I>`xIHS4C)w~R}?Zzy`yvue~Z=h0m}X&bL%4Ud-ACWkN-c_a7sz7sw6 zeznRTOODZZRZO*5Sw@K2fHC{4R`v&T4GG>K&PD8u8QAxI=pf9sgdQZ8spoTK1%+XN z6I``+vbSTbLma=5NlnmPh&EX!NpTN`yQTgS)o%#-9U@;3lJ-ozqNX6-q^x8-*j4HWa!vkA~wKRB= zt-4#4wZ_XWS+QXSmqH3)n=|zqwwohnPrW>c5v7jK#^Fi^O@4i?A#f4VzJ*Q=!WIub zPP0TcbtN|J=&snP(v31DVvjS(4QctUvvc&M)itbwv(iS#q`}zuOIcODK#Hhfs-W3; zN4MeacYrkh-La*S-dD?N;2u4wILV2y!DBL_X~B+e*Vp{eN)< z4b;OqN(FiAmR~S$dUBiJLYr)UC?sD>jCw&s5n$PXy@}uEx1F? z1;lJt6`+?1x-K&x)7WUx;|3bb(Mz%9*}}r++bdfr9o4Qy?lL}_VCu-L1i6Za{ey48 z#|QIeBcgyVPS0TgiD|Ri8g|aRE6LDQJu62!7vBH0o`M<(=b6n5v8R6a;!p2B1x*C6 zV^68#@Oo|Ep1U1HaFagW)@ks}WSomIy#7?Yi_a$QiEy1;ogR7~WK|b=RGI!4e--7l zEX1$+=@~z>@M34XDHdO&h8!7@l{yHwYni;{O%Uzkc22Wi`+UTVXSxG@%)mq8$W>SG zOI@V_!(~m1g?bsyGCBOlK>Mfn8%jlQ7FXRB^1w>w@|HLKOI+CZddmr?4~%&`swf?Z z4^+s`bNq)yN#)A1sywBD4%yEzs`o)<8sj>-{m3HhoM5$*}K9!3w5Y%X7X=%4JUQGi5 zr?O$X7_zCybaAxt9OH~*WEd~bGND!r|rf~IQsPicrxxO zB73~O!>Zxg?QnL7i#O|}e36yI$m}F{(vhL0Qous%iT7qn$xiEWt3`*sOAC1~8b0vQ z=kGMn&h;pFx~@|2D7%u6b$=SaOOFcIwm^(LATem3F07~tL9y(2U6xM!$dX`|tQRAq z**QX-z>I^hK<_;}Rc$TpU{4-~;iXV!9du#A%fhP-fWOI(@U9QkvskChJk)?@0;Uk? zwO||KvT_vv-NGa;FghVkJ+4C{voJ&ttgYRnVombQDLQon$CV+T+l>2hf;(hYF1qCj zm?g6-FzI_bR1JJ=J_pVl~E$zZrx-~13Ju6XgcIp(b85!9cu!%`XUJO5`xshz7r zfd)f8Im)AFShO!qxp56$pm90TtX`X;oAGTtbzGVl!^ly1`UO1&op*BtP7vDUPK{PD z^~u+eoZphs@DR7JFD2Tie`k{Z1l<^^>@BR zi(;1^D7z=;fQQ62Y^WKImT#Kg)oi7*tSZ$Tt?pVC+HbJg+qFDcrGA?b{&IvCdfsEN0U1Ko>;ZO4P5@t$u7+W5cGY|+)IDL-g zksi!I)VUb@g_i)y?9S3=gKs~V6Je8BmV6pe*IZR}iS@8-Zo#fcUvq<&vk~Jor-}ES z2`4p9D4&*;qB_4lIz`Rr{4w*Sosk`4**niWtsZxmDe1~p9_>f;i8uS(H7xHwEv!k- zuSvDJ=J56=9bM@??WAy*YKN`K)9_UZUtG34XLfG2#KVvV0i)SjBtsA;u4s#$ zj&rU%vAe_Im!o&A?kH?IM4WbBU~&C`GYhMqxwWEjQ|jwWVW7jqkYt(c(q@!e(7LzL zN?AQZ9o@1YfNa>5+zZ2=d5(|LAII_>2WxJ~bZ%RPD0rT1#kTH@REc@M=^HkYN_g+$ zXAy$nRZUyFTuH6eTJl0=LnS%-?RR)lNM~l zkibQs1MknO>U&}AV?FQ@f~|}@FqZ&DXYMW2T7N~#VXvo__l-%<-<=k77=`CrJzM;qR#W=s5>VWBOQ( zrvTlJpa5iT>yTn)@;6*@SZ%I&0V=9`iLd|jLIGlLsw#oqou*SVw9cjmJA9l|iF2|I zafsE;kQ)nw_+6)PlnYp+@j52GqhQUhJeCtN8E z`%%f~1MwR?|1BN}?+=dc^O7AV<}Bu*1car{yUd?Wy#bj=Z10O*RLHN226#i?=HfDU zDVA$er|0;7%qkiW>4+rHOV0^SGHELQu+f=zEuIn->E(9A1T>tzPKHoR-4BK9l&hiB zOWo_}y-nQPgF0v*_uo>DD}GVQ*RDXXT|DEhB^GgRah6YkFu|D8@_WKZB0;5f zisGO1ZYBNjMt;&T3SbD2lJ^oRij+~zE8z3qO5@YuoesahCbIYbQmoaA$)m zY5t&sI2zYhv>vbZHIsO<8>B?~Hn93=m6n(vrmS@MC498l#Mg-$lhx=ihLGV_ydcr* z`H8Mr@Oon2pq;7PQ=v27ph|Oo8%})2I!;HKxl@Be9=&yTV6jxbPR_8BM>vVEuKAeQO4G zl~paY2uBb}F8xN~8ujZ}NL?DfMc73Hgu0N+e7}9V;I)RP>%0X+`!L}oCuM3c%jx5} zmgNPpNjcP*8yC0oQjFAgN>vsg&0QRFg|*M3=CeDyjG+6zgs~O1MX-NH1T4F>KT*Z? zeP*H|7p8s2+p;~#A-V92(p==m{FWzW>-=riY{%+~2w$54|F5%wE1R{xT4S_UmeRM! zEB$Nz)@DtWD1L7bq|S`x$TuWDW?<{xG1C}Gug3S5OkR+ezI%7^CGuA7oVOEqxv1^w zAw%qbh==P^^rdwt&x0lr_q3n{sDz*^%2w2T<>cE<_Fc6E%KWXV3m z@^PaxPh2KNwuXkJ@^p2GPvyCWBeqE+TWgnNbv6hgC$c_=k(W@9DAV2HqoLHQnw_4) ztlmxR;Il^O<_9T>!GqGHetYju>m;oD!w6jrbaT^uYI=cCq-dS1#WR zE<@(Qgv`yinM0oIGlfE#wv>%PU*X`BXtrd%nsbYOFVi+caalWT9$f(;aF6ISY?ARMuSu^2{G7r<*BqO-}-p?y;FD{+Vo~qEhofZ_e zREa_={C(#S>5?nn=bjtF9Yc7R3@I&H+AC}Vy?+?Liz{JZ5WKcx-j2UHMg= z@=UcI+}{^Y=55uqoP)L=ybwiJTJQbAmLr{uHGjJ^uGCfgFN~T~>KM8lTZ^0gGV#IW zq9(cjXyvT${J4R<2~i`ndcS!xW}u7%gBaR7W>|I#HO}HQBy$h-7LyCciQF>@*}Ryw z^|Y;(#aN+vU`N!}Ccztq$+`JUd8MOJIjY=??>$jT(deb*yr1Qh~s=Pb{tEFT0<`5ON9g(_!cg_#20~fs)9k z-e*lcy3qxpdXAD+#XNaZ;LD=BWezvC+_kFDcPV;7xJxZ+o9mnLgL||LENkcQ%=zr= z0|AS?*c4RFY3k^hpxfL=JZ6*Hn>bqun-2v<`C`9Z#!WSs`}$hcMnTj zovaIegAw2)W3ho?!?PC-mhl{fZQry)zUB#Gr%HBHev3BMkdYpk>=@yZr~PVMkis0F zt=rQo0Y;@Cnm|vIf`j!jtcBB@yz2lZ9xrL3+3r{KLH)w8+J&w)8b_Plw270i_FI2R zo%r*~=)QtG>T>b1RmEZO71Tl+$1h$*~j>0z((4Z82Ia*Bj*La;j-4Vi;I*n*S=Kj>0zfu40R}M zXW{Gy(vi98cztJ3FJpLc+2X~aq%X9+-89R#Ya*`C%*Moss@UyPv0t3cVmPm=Y$c7&JeVtE zS&)cZ`OeJDM3Fb;Bey(dUS_%ijqL~wWxICd41qtU+VJ^sY&+0AP@T019&UI!PYo1W z3wlJ(E{fzI@HksGH@*00tZzq_R;QkmZsM++sRj3)TEee6vR<)48ci{xtc6e(P^(~9 zbEPnFpci$BjY;UMnF-I<>-Tqf-WOqU#(2iBR9v67PQmpnxvbZqsulNM6kYvcxt#5y z<-1FC+zr* zJL_&ieqJ4-Y9|h-YHEr zsk#}0Hi93ASAG36^FfnA72gUvQ__BKJg~y40iM6adk#Bg*uIbN67 zE~{mwH&M|-)`ubY*m+8{x)7WdsV15v{Bzt8Dzw( zm%A`~-~~wzEegln99DQMW19*7;FP<*n_aJ~coRQfU7sRsH4~KkT)QFq17-8)1^KbW zu`}Wy|3rn8iE%diPoR`%V~>rn_FV(v)T0{TNs$aGO56P`7ULeRyhk)So=xW#Ul0y1 z#=22=D64^e^=JlJPzJ)E^_ffK1;R#byT;laRq9M!$g6ZJ@BC++SIvYX_-ulNZ$e9b zCNcT_$%~lKr^y?m^w*%#t$U!wyGvOaDKDNsKa{+MJI64K#x;b>s7+_NId&(ztw zzA@xRJfGWMW5_b^+=lF}^zu*JWc2vYXgEUrYTY1(UtRs*K|gM0hz-mrwjjb6W0a$Z z2b?cVfaC)Wb`=mDS8%pY$g?ynytR7G;mwSoMyE!iG5(HT}1^F6D79X-b@A>x^uID*@bYzjk)=@jK7?PoIEhzRu3_0(FqAN4_{E(4AKcGa1*4_3+@o(2aGuYS+D%MKa0;Cb(+k z8e{qcrZz?f&4+He;LqPh>gcU0gw#|!ZvDvFU=3PKZ5s-Y8WQa7ZvR{{Fed*7%Pu*7 zQF{azkR^}jE=S}kOF_SP#mxKLlKc@C{a?>)PaboiBL~r|h3!&` zF3MrnK1Ov+~Ny*LR)`J4&B^@{7HTfp7eH!Flt3-u8oSE_)57>oxrY3ljdrs373uGlSW_fe; zI|7$@#(P+|^im;k-1X~JXsKsTe0-^wBzB{S=?W=*Kci#aJuh~Hh z*0LD*PL`CWu6&mluROeau8>Uu_{U5#>V&J$wpYcNb zdyE)S=4VVkBuSU;^V5*(pmryz7oqdxw+g1=(>>2aE-8lh%eYmO z^;B#KR*zVAL_{M>0VzbL`fe|*D+z2thlNDR94(hoaJCVS?hdF##bAeYDC5D?y4Ai$wv*d1i zbEac`n1P?Q5hCZ2Rdjf}&9UeVYrt4~rOtp_!-I}SE7L)~;})r!oQr4v%)t!@XotGq z%!0}J)r*8_cr|6tAqwR|~NFu5u+?Bqv5lHi@cevMd&+o$kH!9AW-$c-)>YZC`5 zdf4WJOwvwRkM%Z~V%q~qBBl33UK?vmDxO=E=!`&n>L7H<+ciZ@F$nMBjm=*swtK6( z)FVIIt~F#@G;jIrauvxOQBWeb55I(V$_VJj`qdn|1#l)Jl~@G`=-dt*NdEu&~w{t%5v%v?9MQ3b(q z?gWMPwxYIy(%g$=ZU$G+`z|j#KVeZOdY^ql8qJ9sGw>Kk{nQ8Ls<;%Kq@?tsEqlbR zO-5h+<-NQrnU3iQ@o55Tz}3@e*SRBuP0c53|MrXkGrs^#arT>C<#NI7w06&Ahgqhz zjhs90P?@FU@)g04dVD9 gSGeOWE&3d`;;N;pN!-d_C0k!cUguucYsY9`s9-(lY1R0V*ixP#0zSkG{wNK_*YIgoT*LL# zCIr!TC5mNp<$6?Q*dgKjR;#mE1TfKQIxM%DbOUG;73ou5nx;bslqe z_MfW{I=DUJetx9E`Rs?;Gv1jiZmS24ie}Q!et2!@+vb!6lB8V162xJ)5|$JKD-aHY zcLelGzVb_(whU#$ok#A7IV@TYZ}@)!;JVZHtGgU^s@l$|RQsMeDtzP#+-2-bn_}pr zGnX@k1+CDCJi1BPa5lBnl7H> zeHJ&BS)gy{@~@GCRSN2E=GCEcB$j;S>vVS@dWn!3)T8cO-AzIMAVi#ukn=4#|D zCjc=wi-q3DmyYfCS?)-%<(dgn9$WR9Za#7|x^i}LGxFY0T>(p=T>2l*LuVDje(C71 z)#tVyTV_1n!UxKi&o0@>_u4Q#`s$d7VI82^?{)aK?@V+dw%^-3Y?>uVTg0Iw0~x|5 zi=|Ccs?6tD1F-Ci^P=6ptp{DkZMqFD1{EtE#B~*Z<6~F1DN4fpa()K1-lbu6A7y~t zd?u6|B2?9*JM)gIO;e=x{@R7}ZiPoZ!+(~bB;CB=mn*ik^i`}Ca-$_Q>gp zlyFAb*GDWs$8e;7&YEGSNRh|em6ySdN1W!tBhqFsz35%+dw11!8=);-m+!rf03_nJ4+o@st03&Dcf#B2 z9LKy{^wOGqYjZO`Uh>9fbi6VWt*A*0FDPZ-d0Wc$G1d`#7+Bu-84I5@7F*cj7Yq8O zxuNhOm;Og_6ev6w(nr_ZK)RY3HMC9b+M}!i21B3gI8QvjkG~Wuua@Pi}`*{F; zM0o4L9T?UCT1HFmLFKp9_?7KsY$Pd&q_OS$=R*ZiJFA|(piMkPO@}AJyhgPyyR5~< zvFGwCo@PrNLQ#mFYwV*f9O#U$G95!txfUevJOn zwt?dE$#k#^n3`2g4?5wRLBA2)k{C5erKFz-ZS{rR(hmALv@d@A>g&elgN<*2# zif0!OL{=G!6?%>gME#rd`MfS4RNQ+L0^zqhRak#-fQpo1I2*b;-9FsB*%Fg;)$r2q zP!wR?guTAC?_0FwF<<c%i)oS$$cS<}p(nSha}_RG~gN9dm?=6E3#azLNG+5)Loo z`P^zbWJ$?+3lAwe>PDv;~M7E00#?M~sSQBy=>%0rP>S>u;%{tkEJe z&TE6ViZHJ#vF+!9$fu+}chbvKvB2^9ps_9Vy{bqVQmY(09w&|ln4pMX>IYaN)ic#@ z0)Ng!HKt93^;5Z?4t=Q2`vzX#9hkES+F51o6=RG@1l*)}Gxss0ti^3;nABw7A8;0K zL6b@skB7_Zz9dJaXmjXqAjHqLZ3jhf;m60@CzmWDmfc?cbtTZT(%mTFk%mP?550b% zXln!|Z=!9tn0hdm$u z8C(16WXtX``S6E`0X)3^92Ga1Da3kLYH5yCgpWJ-T@V9p^QgAI-w4`>c<%^Hi~ZxZ z8i%MJY`5L{bDq@DwQ`GK$(`-i{UqAWrbg+a1+^g81~e9OYD3JlYf~-0k=Fog&$N@kZp=53Mko5d9`aoIdup1q<&RVYE6 z1O437v=aZE_bUgdC&s~H5AUg_RAL2N^g@V&Y4od&>0;#pr8^8;oXNA(N!X8h;oAz} zhU9vU+FmA9&JFutmt)CGZ*zV0)0U;ZDF*QJUk8K?Pc<_^h;ap~jy zVf$;&FO_KLEp$Dld5m!gvJIaV)7d(sNU4fHpFth{o#3^+k5Bdq4XQ@Rsa>>PdO1?Y z#~li4x0*T<>pb>jnGTE3cm;fE2(TNR3Pgwv5RYgX1kg#Pq4;~QhR(H+%*k23h-Cn8QvXCblQbSGi!SY^ zbxCNyYQ!G82?kL@ir`lYa%)&V^w8Y7d$o_?>&HD5A(| zH_hn+d0c5OQsLO)=C23IA|Tgpj)Zr=0Xk66%>meQ$Ls(qi>jGB-|NCM*2(w5Dp~R# zKHfwSvG6ufGAX(!3G0=29RPOzEzXKJg}VlbTo<9VzQ2>UpU(d#Z5ulUxwiKdrtgk* ze08-gcQ=ws@N3~ORY)?6mZ_RtYl08Xy+%#6-*fjFC{^xr(+QMWI{0vyV43QZ+d^CC z-jjI!)c3Jo>#2{6vk}S7*9lm|SnUHVe&tQ-K&Gct!@%jL&vdmizwzUc3tbk10hHhL zL?2W-Oif?_StB05e+TlrPTBOftosr`dPcVziW4ja?@T4Ky9Pn zP)E*nAE?`0|I8WIq*B}=uLCsHg}KjdUnSDHkebvOFh#Ai17q}YOoFzeD^-HrD+8b` zM|6d`qWE4{xhL#Gl*@f{`#uv=LrV+*MeXvw4Xr=Gk>nMZ%We&A(i)69zU|JAJGJo< zXBV#>9lht{(m!YFG#G{zkS>N`O6$2UbcM?VMqO8#tyUFunZ1`{^TkbmcG|O8H$>;1 z?bEJIj{&rY(P`zA3&Z36Oh7URr`PGiwwPg;STt0!V>wP5)}wCw98UhgDWHTWdy5Pt zJ~9OopmrS`Ua@qszW2Ue03y+CI!ig{#ku(GOXp5G&n`+O*m7B6se?n$7@YXMa^c~?#Y;%{)t;Nb^TDFxDb;BXlTl};KHhqHf5-YAPhbZC)$7Fp1C z=QjRYK;H84jdy)P+tnuiyw0RSAL;@9^YmnlaYZOuy-!K0M!Y+?LisB5pg)p@Bl2U( z5>MOIX%pR#k-doRVhdA(W9r_djhl8I$RoHJ4wqn0fIB)*ECtKAcx!LSWB@N43A7+D z*T&*tTBTVJ;u8SQHN^I&-~js7q>VFh6^TpUnc$IaRoV)AY(Sux++X^WY(QLQEqdVB z|8XYVVB>)g=Hz^r$8@8?MaExl!p-`-*Tgp3X|pGspFG?r62~S?`r?g}Kiz z>W*snQGGLgB#3DlS{ydAN&O{i()m%Nqpn$Mj}2PnYUZncU`R(gTy%CaA_Ek7Hdx_o~MLBE3|ILY_ej^5?!pk z=kbGxAy<~82{DUm;kCf`lkZM0bWz3xk7S*yR)XW>CdS+>-v7Pv!Ak)Z6>GUxaoaQA zWZRN^$eO!FUku#pft3CYN>Rs^8XAc2ng~V6hiy5GH*K<8JgezMV&dW&v`N!h*@k8~ zln6-Kv^&_7@z>jF@n)e0m}SC%ye;=6)36Ie%elpx?BHg0DLzkF_I})fP1E;GYQ%n0 zzJs$5$(*-?E-wS6kFm_$+%w+IXBi5E!^h1>5FEA@YZQi+uf-&-;p{&A{QDm90)y5W=4^j9 zsIbK|*-(!vMUhMtZs%&jZy^CuCtNWkwflhZ?S`WdlyCodb4FB0kV|9)h62D^q8!_q zP?rjVFen?APWv8!)w3v#Wt|T%#n$RvbvZQ>s78#{wTV4T*Uc2`WJM?WT88LnwPd@t z$%C1N{Z}V|MZUJNcoa&q1h+2z87t1jrRSypCRp)1NAJ_cH@rT!Zx(A>JwdY7$~Vqe zq0e^(Ll$pk%1-oo*`aprQ(Z^PXNG)=)22=uhb%jW`7@5g2$}i&KvCou>To-_e5EHM z4}Xo+kONVrlSQ52B)2xpMUw<+C4C)#??}bs9DBi}o{Y+K%8WVO@{U5j-2k=)s$+S5 zwh7;Jj>cGb08myL85_h77#cr5M5VHI-t-}c-O0`^;Z-69*X23fK?q*-TWdSDdG#B3WB zmF=MI&2isCW0TEZ-%(}O)_AW9-NAEOcZm}FylQGrueX4DIeei&Sj>Kxn66Vi3_6Vo ztGj)G=`nyyh=(rgxkT79Zz=dnop3dFD0YWfC^(Gd+f;>8oki2}(kwkFsWaXq4K5_9 z#0zHDYpp`_kxdbC%2$OY%RNTtNPTT)wktvy#gA9dc*9QW49emUyDci_KmYR7k=hD8 zYu_!=tb6Mob3`Z1OeMR$ozlYQP;xZu7TRU6HrtGM22O&IN)6PJGuxBKbid~6NJ)0Q zG7(JEa#CtD3NS`Kw8QO$ZZ9Oza`m@J!}RCwdzYo6ClB!vUB?s_q#{e3^61`*Ww*kN zRN$}#>e}qv<9McWCk1lBCm8fCQPBt0F}VrrPDglIikYC8TCCcwGb-2$tjzj>9RA1J z!#Pfh_u=R4$tq|)U8aKv#?By`GVBeu3SrP_QT8FJzFOo>e^!^vnUxhtjDc279QkF1 zm{y1g57>yp;a(@@``=lwkQ!wfG4|c|BSK!=#TyfQh~q3na#7UI3{bTbeDMBnJW!Vi zfH9i^{Ow{uYGY~E&)3Oj{i4Em3Wq70d%qLsB`BAg=0?v$LqBvvlrANq<=_eYso$p# zF;|hxM^EGeLLEsQeE>i;ha0!gyA91(YexVTkbZy|GMWxte%%)9#~3@7T+9h+o#Lls zbzAEJcpm`xwx+6$w@U(H4tXqh>eN!_4(`}hVDQx9Gn@qJx^ZuiP@Xqdtxv-QeOZh+R4&=@JLE znSh=(gr_|%x0D@fK^1f@x#}Gr0{TKDeDp4u=W zKIJ$xDvLrmwVQ)fffjOnRU|(8ia0?zN%%B$o#xVz)RpPkY1i)mj}Y`pd##KDa3-q0 zqhH!Xg9r2B5y>odUjX`1GNC=w-EtmNl^WMF?%z!d$Yciobdzd==${A=;5V6xjaOO5 z;cVAm1B}RH1dsUEHGh->X7b|VD0S_@m;Z;o?~ZD!+xArv6%a%eklsN+Kzc9IJ4%%< z(mRA2N>EWmk>0y>=_PasQA9ckMM|hrLyz{{F_uV1(?fv9sq| zYpywe^Ec;QuY`|2D)k90g9V+8a&eEOTmwlvoof_-3-}@h0{^l`=({um6h3!mi}t)H zFYV{wG(uE|!pgO_TT~=!-5q5QfR96=*yUscClD4W|E&4J@(bv1+q^EP`EgX&c zv+oM98L*g^I?DER^Z6=9X&-=d-LLGKq*}DX=gj2BMe+~nhAV=UDpA8eX0?}ZaP%u( zXu`6*PlZ}`LJ=NLc;hR4_YB*30bRmhYV`hEnKO;k=49csWI&5VYo81SiTD-PpCE(= z)(cTH(>wrki}>1rb4^F)pd_8vNbgl&k&CB-9i#pSrpCwC_r>%}Zz^8HG-1gHex4G` zs>c`dbhJ7Cgc&nDrBd^uIyIdO_L?>)UhhRUfqH4&wW9cd*`TpAojAwHGpZ-n!rk1L8$J`3Wv!2tu4DANHQqD~& zX`1!PLe(kpMxs!Znc3qbmGtvt^Y_GrGm5v3b0t7D!yDjQEK4Qi43izQ)G{M!JCCF( zAD(46Zl1b-qCrE*0EkjULnhvBPqvD}U~_uU#$!uD7q-e~WxRHc2WKWR zkm))AtU}YwKV@0UG5P|~$@2z{okRfg^*h6aEM19@9iV$**ruv&JjRqTpdwlCi?#&W zUL;xtpS1-VZ(J=7w8=h!_X+|GmAuUd9)~voHcHaG*!eE;VSZH?K!|f5-L(QAe_`J} zNONC~rJ&1zc&~j|rkjA;U zQWj*S_R9fbWZ{rnm{Wlp!?7r^V6GB>`;yY@Y&E?w|z*uh>oCZhh9olYVVC>!vC9RG>wYRFB;vZxe* zMokRogyUzIfNr~jNX@Jvj#a?iCYo=U^DW)q9O6M?t)e~b0ca-QGvo-2zcyLf0F_zw zlD+JCxUoNkoNEa@S0u6bt{NPg!7h~sT9Qn?-i7wh9Ed~1A|npfY%awb)pH#Qz&{P? zou5eR^Wtg+wUgg7yxBhVL*{+G+5vNo-G>*{dzb68#dU#YAOl>E64vhzf^0Gah8HGx z5kf~VBt6_bp-CWx`H1QG5nC}Jg4T5*#cDaWk?dmn{-{v}QR?0F09NN$W)|0=sL#H}%!rVY8FaCQ+H1vP#vpvr z^_mipu^k_`O54QRdPxWnFfZTiOV#6~A@9TY3Ow0XIogS}SYi-ZRXSR#npeE)fue8z zu`gjE5p*1G;AS2zyD73U~f`Yc{Bmd&HTAedW-c5(Bm-kh?Dl`cyVqJz#RN z?Iyi+#fwF|mIqJ~$Pkw>h09ndWt!ucl3`7BKKpy0N$$#g7y+s?dE;?REA-uCSWS9g@FugBiW8-zb zD?L!oJO0(W@Zz%MqRPATYih;c0MNCr?87Vr-9JJJpR9e@YW7QvE}m0S_PLimm0Fx( zz{j;}3LS3v;@E60jkj!qYAs9AdJMcFIo{=lk9m7CPcdyDx1-M`AZB)DL^Dzb#~EJv z>an?pE4CBX53qT)5`L=_{NSxppjOE&t^Jr*CVg@QVu$4E2VutTFj0#{Hh35U?!G;3 zPMA%X#i?(kEtj!J@$k1IPX8!u#j5$@MdRxy>^%1YTA#O8nj3szKReFQDNM%ED{yM` zdsvldulWiRAS7A?sKxe2Ql8XLl=Zc>4mKO3Xr)wX`z+yRz{vw4XZ$3fjeV3dd)}g6 zmv;^A)1-gMJlP}*T$}F|0_zb5Tdz{_Q!7s3t$71u4C4r> zG0^C@>q{Hu;@MahcRyzapQb(z=e6=xprL zis^=No-oY|Y}Cd6anPywBLQwhH_>D{%T*fB3LdyiN0W7Q>g%GvT z^#;lGBcN_K=n&QPixF&hqXv()mAAVwbXk{+4~JUd%hwn$Dpv}?@Aw9=V~>P~0Cw#9 z1^TAvFTbRuupDc+opn|(%wPUO4Az)KDPAl6w7IO+2R$?9u_U`4A!gbl#fcAMy9oRc zq}7TPJQb|K`CK=f#PR(zmeX$YC`m%PCS$FbloXxF#@l6kvb;WSf=Z_aR=+f4wEtU+ zAI9@lp<%fNON18tEVB}!T4FD`mU6Y%(jJ?J?P!UxZpn9Yc5WwdPnvK9l+FTg`v;)c z*dyp^x`qWLE-~Hi(nuF|k0n`=FRb6_P1LGRI3`*tjPkP&d&Q1T-Hjb}Z{}CZeJVAR z>SC`+$MJ@cT(dsN@6wN+o8cQaZ5SELn*R0dYv_$?59PYbdfEhwJrPq>z^0FrxH^x< znj5vE{W*hyMjn})ejnj(|8~T&X|og)zSkRsc>Wlg0)vbhho26oEmP^e($)ws`Yv>j zsfBZg2DLyOK4o>KY8QUiTK9Bdw)tzq+`fs9eSD<-$DZCi*DrVj1(F{5Fz@M?cQiJe@6E?*feenAQ=?_y6%8IB0b~fy7TsjE+x>1ZlH@QV z7ceJpMi_*Z$0nNIGKui-zq;M3&ryAu&m`AW!)Izv#kAffv1yM&eFZ9@kx0Y!{+z_P z!3`F)i$ae<3QBskU9+*(4cxo@Nf_j>;2EGN)+Dw*+tAU=?6Wy%=YF-&!i$00sAbIv z48g0P?|kqJxx2q7V1ZFlU!=HbMV`^IC8~^Fn%HKi$w(4HcpC2Ry|w7G3Rx;3qo(XT zcfid`>jt))j$Lsw3wqaTSk#6loW_BoL(u36hc-2tmLz+d$aUjftQ` zPj+1}kTH+Z647oATst&>)x2FtX2vv$*VJktkCO6$Ij;rqKzz&qj{B*EpK%|-qDA~d zvUy2f<_d|+P;6`}DuZf~)&bQFEi)r;78~r!plzqS7>|WG7^LwThVYK`sYzwiRGu2{`v)y9@cbyrpVXME_w$F?u;6s7d(a18lVM7>8UvO7otM~rulcxDj zld+lu2ESBK`+O>oN_6BCh2;`mwwd^^Vb(jfcpBc1OtufVTefeSuJEOYkn`A9e2JM{ zpM5eVajnW;{dE!Ge)5iU|HOcv8rHZ(2p`V7ho#?5fTq>P>8)9I#h) zzi8aI2$AZ$*z2|$y7bPAC2C{Q*iNX@=Q@GI=Z0&LqESP)Iri~3ic{jEy~4W=B4>RC z!@h_>f%T|s!>uk&Ro^;k006$ldG~7Sxw|sU0GVi`A)Pp`b|mAVYzk_sPC^vyzS^B| zy4K1V_VN(pmXH&n&?Qni>+{Z_e%zU#A?TuITEwYbxcDVOSi0S-h0P=rxeUbKnTqMs z+AT=6eFJ?SLHN`!0K&0Dba#V6rF=Xqb6FF0?gH*f5?`C(cbVsx@w6}BKb?JVTU{L# z5>Sq|*^Hh>OYGYLC2}ma4+D2o2<~lR7Z^_#B5ZeeHIVPlDZCmKF982aa2EGu5ZTuV z8=jYcLGM|+qTeTA$K-%B62lP8Y0-RQT_Iuz1G~|Ie*jsS^lM!1PgDg3$)Eq z&g0KJB}rQa-fd@A(B=^C_18NGm_&A?WRmnQ$b!X~EKDAqvgJ4=)ww`5u$xNxTY1D^ z*{C;OUApYK_LJ8}Q7*$5TUQ{Nzx+MBz5bc+UL0tOJm(JYibTxawY>rdb}`xV;N>%c zMvK)eS<)B430`FTj^4j9(Pi?kx^xJ5N26RoC)N+?6!!XHBcxVzAmID1wCtIrHXj+e z2R<~zPCG%g4MX3Pu%_ue+x(UHxbPi+2>Mb~51Bh@wT=ehtemtEouCA9Mp^ zfC*&DmUgm{Q-adktxjks{Yod>%S}IC?Ue(({pYpSxW08i_2qnkXusYrJW=kO8CZ6( zDjT2+Ln$2fDN&Lqgo*t7H&O^k2X&e>xUk=no|s6LeS&8~rg2eUyN2F)SxCIx z8^@-2VOWGrS-u*y?hS-)0BtLZt1;eoJdUYAIGHe)J0;RAY3?<^fSkxZde?+NOgRGu z?I+ZnTlviA>fs2CR{zmUJEIYzzO_{9={IEQB z%r7o5C*Vs<;ZBuo7qyfj1~Ij~5E0Q^?tbbr>jb(2VCH0-N61Jh&HDv zPr~vS9RLTQJFhv906hgf>6s(8UQUBs)jJfNKd~DUoR-N`tzQ!XOSexlp zHoY+PRxz}Tmw#>^RbyKhWiT6ff!EZeEbQDm6aAgu9+Ad^gp(59`yL#u0qy6me8AK- zpwz`JJ9dD+QFsIV1K6>{wZjIdofxw~SI3jCvr8in zpc-vYd8uy{Y8CON9985Sz7nC(8K}wc()1l!OxHC^x*K_L;bFi$v7Zds?Zo=yn`OSG zdabXg^>(>9y$?|}E*li!bEp#N^VO!wUwju(`ljn)!O5tdT#gQoN)MiJ(qr}kXy3M9v%VhomFbp4 z88|sd`J}t3lDl6546xqo47^no#iARlZRlYQzNBe%w*8hQ1@D=@fGvVGl4fShxen0a z*8tiNI+h-iU+!fc=jMT}=P8>*T%=?qQp*7yuv8G@r64<5!RJr?t01`{*54L>5${#e zwn4$Gx~Xax*Zk70NLEW8(%f^9`c|KZ)P@gnV~8Y!Lu` z1C_#?;cH$aJRrTHh~6i0u-?~JdzJPHV&wl@+dPv%Y?8oX(Rwf#>7^_*60)0J#5A}vF-J3 z_W|w?z+_RN7Is^?iuL#T42z7VVryNA_L=^b{re)L)?%($!Fh-%IF^SVxi7IpjZ(oj zUE_1)Oo(`Yw_Zg&dIuu2h3}Xo&%;kc_0&;2CKWasUw4fF8{O$@ zujFY@`#hTsE!WrAlAM}M{ICH74ZHqFS;Y`up_JoqZAkV$^=Ewc2kY+nd0^2nr zEA!cL=NvZGr?ZrY%T$*$ znK#h$VZ5?4LP80y&b6u-N{g|&q-Arw1Hd#NCjwV#nW{d+fCB}*R3VzmY~S{)?bCi2 zEZQ-d01UZj*mZ1i>WJa3|g$|G4T-)Me z%~{{zH-kz@W3(Q#pbkg-2sb05ltH56%*S@XQ<<+POWIT&`fyg^<4NGia->1d=KO(r zLIM>r<|u_bScOjXk>q%*#8aVk6LnYQqBN^t{iaoXU71Ti7~11E(_1(hnN}Ihu{Zu! zpp4hSSl!8tf>0d@&T3OitU_If9aoTuT|FS8Zuj$E2KI_H{EmJJ}shgulqHEdK1jY5_Dh3`m!W zju8tP7b7G^!5^P9|3Mv0uiSh~r1=zTvJFr;2S>J#&rr|mFJh5u3t&b-`zt-DY7%iL0x8l-O*WY zX0>@%w40TSk=IbCvQ6;MeU@&N$bG!BK#$qUrG3c8QWtq|Lh_THx?|U{ z2RlJBTegi12d}m!cJ6$wNthTp*u#AX8v1Fj{o_~LL1@JsUMH3hQaN4|imCJ|wKFu# zH0t+tis&ayy@Bk0ATsuT_?V;MnGkUtt<;i%v7Nd=3m*G{dt0ZL!Sav_gQjp`WQCzD z17DlaeH0oSB~b@}y1tz%0}{nl;jJc8AsCCk2tPCB>qL&JL*#x1LAz1~TTP97ba4&Z z;`Y1ic3+n&v;D-8Bh-x$#E6xxaq`$E+l8cWdjM)J6`4S|7ZAYv96Zxzg{lOgleEj4 zf-g^8!I=9TlzxOrhkGZJ!sW`nUl{7e^Z1-RBknq+Ojr}z&pY1iKeH+a5_M+NHWJes z)X?wGPjjf;g=jL&WkxNufUGjxALTyzy7LKrn2A01-CyZDlSg!d$$iLBd&%^iY;)XA z=c$V=yOVmHgS|;qA5r&cWxaRSh8#e*O7)~;bKsU~*#t^I1Jh}1K3c9yHb}vK$bc9YR(V<&FVqTc2v;^Fu}ndd z6Sb~|4PekcZ|Zux*=E*HQiV&v`%ev{v27N6nc`7cM9*e%SEYe_nT>7a0pX;MBU^Jc z^|Qm+G;^_+RnV4vuD8U@>+WQ+`VOs@$^1FHE0h_*^_NAxx>i!0VL?-zFDf!sPvm& zlqoLW*r?Z07>ar>Hc?#qvX)@CEvBsl#^wAJxs$+~qR>l;aLiOF3AA0sGNC@KJ$$}a z!>2_x$hVx184JtQ8*&*-%WP{u7(cOauKh;|lC^X2Zi1ifX$!QyO_p3Pq;%HHPulTu zWfTw~PV1g~Iuzg}yNF`IUu*r~Ml@h5C)i;?iz^`?96di(HzaVvh$~o!*2|tw(KcwE z`9|&o8Tu~AuI@Dz&X-Ka)eVNH87BF@2b-j$Fg+!7YH;%{$0JK~<<3!f7i3*(y=}kv z&7RHX_`Il~LsAn*seZgJrLgYuuL2&flW%zM6$w{L&*+TZ zmcHHUg?2+VxWkwF6BT%cs73|Sb@3omJ-a}{Rq*K!ZBLSLDeXv{RpD!Zf|tSjhKStz z)%qEbBDSKZ0mGAtZ@N^R`UterAG?riNh91T`0T+q$`pdo>xc30$#vVF=1}I>=~vA< zR@j2`z_LED>L3}tjY>Q3A+qeZngpgiZ-cC|q_Qpt$My}DIZV}KYI|AGO^gj0ezQ%n zP}aaK`m7#9vc(&)?w$0WyQ#947D&w`M;=4jIcnOX<+=UfQO!`3-KL8^g|cKB>sRy} z(-7y9h<8UA>CrbT{5v*RiIj$I^p4_|;Rb~Z20Sc<_9=z_s zOhe|wE(>w5rSSD*Vcq$UO*v}O!cny1&*|`%k=Z&mcG7j~Z6C1&pvTBHO|m- z(DFx9Un+cLZ{Kk&$`DaR@4ht&Gd(}X>GY)WM4V{pewIM&tUEJzDP8M`4Bj(v-Wd}~ zsoAGaDSCNSdwi87b#XkTpSkv`smWGec1T=$7dt*(kuY341SqIcFgH+l*6;MHJgR#o zI3(OIq%Zd50cQZBDQ-pu?mf?;`LI2j{l+7Wcg`YBXX0L|!yk*VChoa*ry~haENU}- zFO5;mT$bnFmD8yOzW5&fkkZc6nPOA2z9ajGSK|V5=1NT)Tm|9T7ZkV|a0RMJ(JP8G z_E)?>${5Q+esdR`n(ue~oqibOiKCC~zp-9t@CXPrU~08O&Y4a>8LRK5_A*v)NBh!C zF;Zs)_tWZ?YQ#d3v6_ywcNQocrCCBmOK|9ESS^RvB_w2!gK zl?yn&E@3$Kq8tQjfD9Ez>IV0J`Xn&~0~xHDvK-J*<(e=G{^~x;@#qH}WU<0JSkT9X zVkjWuCi<6VN#VLc;Pd{M6rdrc;(o>4*Fsi1AuYjBN9Br(=h(^&|Ir6-D$&YD7TSWJ z9b)eCG<^=~@fzVv*CJRJG3=>pLmnkikGgw{JcS+u4X6D}Z8D<1YrZylZVa`{B8+^N zqc?A!8@+SmCsnjc^f>biIj3LAdxc)0LL72B-(%1wA)?ThKQ&*+kYdE)w5Jtdn*(3D zNZb=Bc3mtMio^{S@4F2FJ=y+{xcvjpEhje}as;#OBb8$ey(+eo_GqwdC7+<|{L~a& zYkS=u_lAfv0W_YJq)=EpR5CjEv>>P_GOtx&nvs2 ztE8$=04dycO!-GpIOfZhjO3nY9)@rKG{=lDK65excjo-bKBNRdnDv)9EJAVTgDo95 z9EyQbDMv&^5HCJ)AmRnIpQig&lwmp#Pgb+)@sBxXiF-wtvUkKTgMq@w`S*df@?J>u zACJ`i>@uRy=Al^Wp9!?2azE~pGBR%8^^ad=zH;NLsazoJjW{TVla9i?+-pEdjnwA|%3`F^$CAla-5)Y-Tce^kRTc5#1W z49AGh3XB4{Vkb*JuZ3A+w66qpju0e$V}9|%0eox`Y$4QzccB`~{AOur<<`en|B%3E zwc9^h`_-)M3;vW109?Cj0?17cF75qi-{B9l_2=4y|Geo7>ot6LYX@2|cC$KIv zEIC*H(x4l)fGl1@0#|AO6juB*JK_I%!WWhRTj@-lpW&|zlKTkAf@ptA0sY7A^xMz> z|H=Q15D1$ay+8R&gRbxZi~Qppy~Mxnp?}#@fByG}(Tf*iaNYjDG>Fszki`|;;h^}R z)WpAgE;z89P7Q5r3)s-Z42NBck%|-d`H@_tyHug8O@G{Vt5Z zx7Hu@_TO9UcVYZ}YW+c%|9xuxCXByNtv^~Be;b6q*^GZ1gnu*=|27DJbI1Pnwf^9L z{q1Z0E{wl@tv`6lf5RBR3*&DX;}00k-!R7S!uT7;_}z0S{Ts&kT^RolBlwTL!SBBGpWeN22N1>}J^nxUPWb&){BPgM z#r#5BcUu(Zet+iw#k(M)4Bz8!0`{9x^s`h8F_-Xn)KII%+B?DM|AZ zAHFAbp{?WR<8w464V?|KY*VFtpNtee}-@|I5Fm>{sqCw7-rB zReNz&^{>Bjxh@lWJ~=@AZ2b=g14G&sU&x!Kcz8Y}h`IFFUvm0va!kA>vs77>e|<1; zKe?0}kHd-X@RG1!CS&~Tukezq_dAqECBN=5|MkJZ{chX=glElAr-jpRP>RZS;&d89#+q_Ey71!gnubLG_ zbHpIq_i1-J%ru-^uL6`m+b)k!qzbi*#IMv*IKg83kX|c&D1b}6Ru6_Zb$p;=(Jz85 zEIFp0^ttbz?IHWRTus)38J&E#w>fT|pPPcvy}ae7!UUsc$aTh$3+#Ixp?4HKCi&*f zR4UZH_r-^r57KQ-6}bY1|J=4IzfCw@+Np z<)sh5G2<7QrN$v3&gYYiA;oA$t5?UvH7L^n>9#!SY$E&X3`7`_5L1mrnXpDz!amGv zpjmcriSWkq=PzGD=k082u~Nl0$RwrS1m21y^NUB@BI0xZ_E1n%yOORmJE(ilLX;NH z>uIeR|D1|UHP(A)c{Hep%{(_H)^`?xO7=$s>du9w?0f%TBJ^ib5)cIUkB z^sZqYH@nh^N>-VO*M_*GfNS#9avf*7Mc7!+c0DgwYm+C&-I3a(;>~C<#zGkCW#Z*$ zp3t+Qy>TR6Y8(5?aZ!9Kceq`iO#}iv&6g?AP83jk_kkdHcyRmH0KGaFL zz0j{D>^%82O*F8pH8QTUWMp5tO56ti2JOR&35U%4oITnGix>5=6px`&S7o1@UUmqc-kM6 z1^3>Z9B-<m_&DOen9dXx#E0)S5PN_ma zQ>3-~EjRAy9SpWfOAyK+3;TooZxwGHGz$-~XIJKRYQ`wMjb7*XpWX600Ut^kSFiI` z!xe7P$*|{jemD{@YKVk2``qwpe)bdsG(+~4mtl~eJ&A%w z4>RojW_omRZoPp{D*T3bp)Br<1&f1DDrOJWPHvKIH4XZyNct{UEbW@!S!#v28mN{m zX_DFC;w?Ov5AoDSyxzz}maLzxeA*Kd4j3g^6YDzo(C?6Dxqe?wfNN~&plCTXHSnNV zD@ZW9)tJ>lUrH+*eU%Ck?lI%EbUVNO(F+A89+G9zkZ6m}Q#WCdi#D z7xgZ2&aFA-mE66)w(>R=YTH+z`G{9GSDu#r%xl>M)pGH%)vWD!mKlJ^FWA7++_Ir25@Gm` z^I6I^@eaL#PCdW&1YvPq91-qW?@#ga>-83An;V{i$k`ZV!bBMLS-_*7M6P|c4|f+{ z!Gxo2@FrC`9FHQhXolIcDleRrmYO0p?Vc!ye^sL5ByrV)-&u*&vxuc9+Ol$#R$#fV!SI6$s1>l5zs89pWJ@C$x-QF`w9 zt)S0nX{lJSnRr2ljVM>ZPL%^w@31)rdE)+9R-SC3ztMi{0M6UNV;W`Y@kLuix$ zBzB_AEH6|R1D+rkl|1{}e;@|(BlMN_>>EG5^rv%XmKm4EyJx(0|yoB5rb4rN68!8 zFIT{=S$!xk$|WGPB8~2zg~6Jc>Rd~}R@hQ+s+K_U9{uhIKK(=CcQol_f_tQ&y z>bgxgau(|q#I%Az>)8D=LeIudUb?A;VHkysBwfqCk4AjZu)G;?${k~|8bxln19a{4 zljMB2t@TG0BR4{h&4_n4nE0PJhCo6$KN_akKP*12Xh-j7oVLjkt!pG#s1^{~vVwO% zi;SH5ePKG|72^?gpB?6Lf!ZPN-Cnc(*v4UuVsR=Sv9B+m=T%rD_8zUAN}(kre`*dZU%yH zjhzGq%p15;3tA>+1&Frf=F-;J?;pAq*Ghcp_c{;xeo-lHjWN*H7;+BfR=>&Ce{z9~ z?w{Lo3QXgDq6y{?9)v&PnFv(dF3D%Ar2_g21Q2?98fc6~0w&<$h$#LSM{&JRm&g(o z(L74T#Xdz9Gb2UDrgdA2&TuWKxdW$Q-^8DW>|V@jq#IzgfVKsUqcqwqQE(hLdMLIF zI_w~m4ES*YXTaMMFJ00sxw9}v4`WBW9rtxX4<*M*s7S}N1Rys#6FGCdZX(~f7P*y( zl=|6Bc0#XQ`-DgoRBO}sJOF92b%NOkw%{J7+1C+*UU3t;4?<7Vofe)3v{o|Aqi*#Z z(l&DiB4*;I*3a*1(pY+G+&`i}yR)8DT(`%b%v0pnZRk@@$|1<{jxCW~vUdEyU>t+v zNgu_vw|RaK?oN8(TXi(qpGW`5*l#m!Q^wDfcdo56JNk;_f?AkjwSHqb)p7uFvF&Fpj-Dj_#yWtvw!MEG&KC_*wZ$QGts40TNQL5&YdZoYNK;oN1ml)Ch+IuBn+HUWz_TM2Y3o+ z^lc2dvlNiLr@7Z^Zbl142bBMz=!8A|;8m&xoOe3dkoOa&&9BI95F=QO{SJ47z-Xs~ zz0yAiK+R_`T3wYTCuUvT@jOke5tIrdw6L;DtoXrYNZsRSKi^w1aG&n!N>}y?))eYx zRQ8>FiSCyqdll%<9c-hch?Ct<`gViYsdUY%VNVNLB_an+Tb#Yt0^GiWEIl6R`Oo@gHn9he?%grRxqb0^{7Bl%(;)7X{gd!L)`dms ztBnuTCmy9yan-zOoW;E*uqov#iL@xEHrVGs!(sZ0%uiRB>ltbjI7cAgUwyE+{Syy8 zaynD^DF2#ETfkPcHeGX}g)~vSK5pDeV{?M(Lggj8?KU`M^(Q}-uw83bF6dj0Zj0S0 z;|S9uM7QCgvg&iZw~!p8I;ZpcC1kn=Ux=V*+UE4uy0cq!SIvx_Fl#46+qgcv=847U znTl(b<3r`Du{Y}MD@&2}C=$ozuSaQM*O7PCci46O;{B@_Dz*}x_=qaz_9U1}f(lIF z%&=bQAmRR^NO?x^PZ$5JQq*_u8&hWfjVVyJ^fB)oUMj)v7f z%Ed(2w9&r_X3QAtV^mJ3lPzAfjlLRuGw@PYWnm=EL}^DB)Z{*c@+AQL#7mIJ$zz^~t+EpmNNkGY#Jz8G#Bj3I{se zO@5RKbj;Vo-r6q=huC4JVccJpqej~cf*(k6s0ySD9H`8!k3cH6JfoeOEn2__=eDM=+Z#E_D2ii98#wjRE|oV4at6F}xl<_7-uZOv$0cp5`->A^+iNB! z3{x^HfgukRIQYa;+QR|(xmVUb>>Iu!kX^D9J4Y4$8*!pYljK$L_fzF(PO}?pV z)Pf9)ZIy1+$N9@Kbh2lTl`e+zY4&UpH0Fh^iAG#p#zW93ZgBbbYu86vw?1%Z3s-A~ zaAdPa-8uGSvXrM~l{0H8{**9zor;9q%vl}yQiH|YX3%bRUr0--q;?W4jZ#YtSgLb+5k4maVE)UhNIFo3nM+2}$SOg5FgiQHPY8F{C6=^ve z>kd+kCNzz&z;F^ZR#srRnPFh!`118z7qNcq74hPxI!&(8lzZUR9ScXQhau4vdsRb8 zQpAB7A~hwxbD-pip+2(OEGK4hfGhl_fcL5iG0uCQ%~ocAuey~U#LW94vr6M5f~`n zP}rT>oTKReqM%Y|hF@IZFnjAdRe*tYk?b!K!fX6NBSO{#dau`!|9OetK7DEvlhwN7v=) zI^ZAHMjR3T(e1)a1;m(GO+Pe3dUQeLCuRFoBv6-(k3vLGLOx^u1MxFDqZ#LESwVNe zTrL3cLQ{2YAaJ@N1GM< zy%;D7IOw3OqgHV|+Z=e5b@MTE_3b=Ql4FZ%SufRBq2W%G#34sGWR2B`AVDJfvG<7JCC~` zZ_R75?#Gmg9_aFzPxYI5ls?bXQEq+CF^y8+ct3=u9H?j6M2XoS3Rfx(f1?cdXXo$E z8oA+lC;`m|yP8PVeJE$pza%uZWms&N3H}CjjWjUKW*I(W{7UuZmUYz(#n~`xIFrWY z-NX-uCT9Le3e;u={OMN-d}n*xDBE6(1!gpWMSy<1?6mZa9x8-4EjC2H?@3g9el23l zxGUWX4?w^CXj|S8_+jh*Pvbo`Pqo{8%@BqR(uSQ5AC<$ea{0I%d=Gt2Rn?fdcJ7XO ziH9m41YWmc+AM01#+n}1WFgl~bf)g>jnR%;yF5ZBKiwZGykqi_(CzsRU1i_By9@Q1 zja+QkoBdon!17LlqfkSOwO#d}*~Wq-mCC&}amW%=0eI zWUHI6C2%N%g{;`B{vJ*I%0BT`DsPAv$`kk8I@^M^=10HJ)lAtCIqCjN?J(X z%zv9`1>&>M^@uDRxV1HoAv+-%l=SIdvHbLy>y+*866R{9l`GiEMl55AGA~Rm=M{d{ zy@I~vvmYBBLo!~i>ck4c;_XPPb*;8)Tb`RPqnkWE*~o(eJTPIgB|8(*mDt#q4-fZ- z77aF#)+V`*bBEFmh6#N}6^X`9mil?8mFmT0eCx|9IJ`A`^FZS7Z>vWlV$MYTj~!H= z?)uV`9x?uWsYc0WCYSqgqyM;?%b3_zn&`eV4>>oIka%$nQ=(sRDU~E{_M=r^rsG4t z4k=ZZAMj`M0>)Jisk3g`M6c^*AYbc_!}QCvw`9K$Q(jwWh2yqW9!2ORWK!4`p~1a2 zzFc~6i)`^4niYq+;^eG`94Beh=6o!yR_X3f>?a?hB660er^HkR_-f&fnX;}xN;fK( zc!`{KgsG0WoSQ@Q+=58W?m>t0$SX=78KyQlO~px?LqS_40cs=fmWfzTKni8KcXH*^ zyti+ajs=iVQPgS@89^YHcmzqz+NYfz#P}|e^lkfrPWny+%iYQ1(24CW@RtBRCIZiU zlu5|40d!Nm1p#`N|_ zc&c)B93`vq3u8&JL1k|64@dRh*Q!CIX$K}L-$bw}q$KdHpLB*AnC)yQ6h+_hdHYeM z`PXZQpfJWv#??`=tDFn2k|8XYcSh22hu$GsvMM5-$ zWOLY7!}KsiIb1Mau3KKi;QIj+d1$SK@M0@$^7*s?TV3M519Vb{?SlAmPsk@A_R_s7 z3R0s_H1oTdhYNsCJlC*qxutI`%pD@t#dDwkQaOyem!G6~t(J;=gXW3ZNDeGKR13>w z^BU&g@-dN25fw%PL7IMUV^23Ooa;o5wSavWzRPgjB!agf@y31zoVe$+3x0Mx?=F( zX1d}z+r~%twG4N*5_tNvTbzUPSg0X6D~<&wS5*=PcywIB{d?1zS2B2RfujgGZ4q8U zPiLp(Y2%vy4|8uB71tJQ3!jq&2oO9#aEIW*9fDhM_uvk}wIKv|hcphst#PMG0txQg zG!nFf2bV_P&b{xBJHGq9cjVvu*F6TksU^Est(voD1?mW-PgqZl?>jc2u#4C`j>Gme zi;tvihg4_X2SrM_E3En!8Vs8+ovzMF1=>f6)+ffA+E!`hGstGkqh>g>REJIM=3)9J zX(aXsXbfer1EJi<{%pP&<_faXdvBVAREwUh5YMpce>#6zsRLs>v7a?gF>UUY0ZOn} zT{GI@YJ1L961tL>duKaMzBAEhj&Z8H?tDHXweE+XO#f4mWzFyQB3Ni0EeCfxp*lBjwz^TFWwhcZ!(~~e1pk<<% zy6~z{k*lJ#7J)L0e}+8o-%2GhTKWzjZ!)Kv3v07Rjw#1$u%30{-X=*!^4DM8g$IOO zK;)7{1q(`xb;gN?h}M5m;u|++aw|`@ah=`C?HOf%x&s1oXn~1y=~&O&yAK{F=LXix zP@)RA)mpLLUtf44tt7OP8T(*T*TnwqA#^Zvx>Yh_V$|uiB2r{&suMr3@>& z+%gkts99$;kHVA(6G^0iY;vWKZLqKPQxb!R72J{bbH?XiK&BB`yCoy#aE~Ab``rzJinQCWg*RG{5C&#g*eC*b9fqG0}F)tyt(;!H*si zMJ{Gsv%7T6Vu;+LbFy2UGF{6?XXfW2dQ8z&oOv+W9x0ZcIgbLUO{Y!)dp8!@f|ep9 zSI!-E!v0dXS_8}ANaJV?u$u{tJ8S)iT;a?Nv4qL`^-CKe#LSA;wJADDZ0FY#(1iEa zvHW%t=-`p(hE!4wlGZxTVQ|pGl+zT%WBZ;)ITJE97!EW-5rOXuP1X{B%`|C~qYsgk zc{c2^qZ}0~dVX*QzIlZ!eH;h#dbkI%Luo=d)egx|f`)x^M)3&Ozn*}|u4#5pUQ*x;`)cT`lWBJRp)t z(aAaeDXDSc-K!KbvHWj8zQgk?LRbdbU0y4@@~qsOsB4u_>(*$mIJuju4BH2CCcc=G zhdda8@K+het32NR9joK1j1CPpBEq};Xl#KF^TPD2QL9vpo^*N?s z9Ft^*zVy))g?l@bzU1zAamxr7#mU0GoZe;fn+CdV*ok4jUV}neCXdhtwH4oucjRdh z+?Ihs{zPyL8S_a%2!D2b6jK3Z&58q@vAk~Ar5?$-JgK>M=+TNYWt8)C^34Bsp~R}0II{ile7MqjS9F$^s9CLff}MAh zv-_DJ&Vie=5P`=wMcW(<`l)g6NFq(dHneVgyp ze~#52@;zS7=4yFsl^8$JfVcitSl%wI?KjSh&C|MDG0EuefO**isTKCssEpG;Bwr+2 zcQP0>5*Y^}Toek$gMWg=Q0Q!>5>e=^&Ckzt8Bh+?4i&5&lDf^~qdJ)ej-Jqv$9wBp!w$@)<| zx%$33B{e2G^+>=q)5RoatF(k?aG~{aZwxJM-z-(@cf3dPRdf05X>VM^L>c)uR;J?Iz4qMooK=pALMfdhl`ZksJvPcJw zh6xa(+u%kiy@c)5M4X?MQgmLh*jr^0DI~3I)WkJ~>wf$af81gwGe?t-eS(j_kYp9M z*kv9027K{}sP@M5sC*no{&>*su}+ml8ham@sDL4sI9-5jhOd{QNXqzkYKK2=EX?80 zh8M+Lt~-`5F6}eLh3)qNK>Ig+!e37zIO*YJdyM(0*{OQo@BS4#`T^HA)ZFPejG|iO zz;e#qjHs1diKOpCfl1c94~)p@JblowG@`Q@dCdSV1Thaq@qa1d2hBjUDd&I13$vti zs4Mj;6|0WkZV9l9lYT+n8_h|wj$oyk%D%OD!mlQYgGAE?av_O>vrH+9azuiatObKG8M`d%Owl+pV`o_V{TglqqpeR!~XE4FNwk%D!et7{R- zMKgh7&x@5+JktZe@w`sz$J(0ol@KL6X-9{&cBYmy;f*q(hHXeZ$F<&FqUbGUz3h6j zSrCA1QO-M8N(4eG^3Wtkd}uRR(^%O#^?VhZ-+0~O2v=MFjR$$p6jILCw`NQueOG^! z@@Dzncg5?It_XhX>ER!XF!E!%hOjp9D275!7YEm5N_b4DKo7>wb09>OiR-hzD>gV8+$>HkLNY48%D~bNnG@j9 zu#;IPpSmgW#9z5{3fYgBVz%0aU++urZ25I^4w0p{>{**hEy?d1ywGF{NV+?|?YjKL zdbeA3(_USNqZJl`qi=m#-(+`#F7UIF)#N;5_;q02yJc)?=LeRs@ z@HX95mn8eWgdHu|RAbhr65$n?AEBp@ZWEvrLkb-1;@a%oOclD|@>9A!rQ45(n#^-O zymrw};z1~}E(b#A7@QJuY2hjY{atH}78c8KSp6knhk%%@)!H2G5wH{|T*CjZLo4ET z&A(Q_6GD5Pw&eMynGK*8K`G~C7=FY%lD{kfgC=7LS1n)CZ+>aSJmOpYN|dse)MZPI zf2Q2$Td6BIfe7w<5AaXF0Q!YR=rV#UjfIOIdB`mX;>f%I%c?c zmali79F5B&PS3ziyis2xUp@5IUgY&*9RrV}N0RI*7~{~xA&7Wkl_xLt$K@Y+tZD2h z5R+ELppQaEW%5}~fC3=e&dgXwr5EqL&P=gu@C3UdTDhli@el}qLiM}`^}Ok&ZGxA| zU{3c9jz_2z>@&^64f3)x-l%tRx zk}iH_`V-X8n=)i$4W4VFOlHl@sGJ**!rV+VsJd9-s>`_3`14NvRqTCUAG^( z(|KsybM=D&X{`&fmVPF!8ji#>76KWUhJMkCtpR)(rTnn=tE`#oHZ^TCHZkg?AoW7U zsNvOskklCOyWx3;4u_f@gBD&}V9?M!q;Ej*&Z$2>Q^XO6njEk-O7^wVZ3N^>7QvN^lv?P86Sj;PmQUhoH3#hZF%{8Oh`Qd6L(G1@LO z4A##-MLs919o1Q|IAe3QFA(6LKl~s6F5qSsx2gV|E&CIDU-%{THENLcO#TVt9@8aq zD1$c7UsY3K^pX9O%U;#KzRwDT>m$jN;>~`IpIbt0Knf7t-NhE9c3wtHMWP4|2b=ro z)!?)jx4Oc}=!NOBu)-95^e9~ZLU=tEvh}l`KofvEBxCiS15#8dOerf#SK9_++O$Mg z7)DV>Ef(=3x(9TpTZ0ToExvFVL*-L9xfBo8k^T?1QdEY%QWgfSL5(iqjz<_mhja{b zrApu>Q;H&70iSzXgC@^#xVZ0Qb9R@mHeqJ$BfzDj;g!l~s=XVaM$v#cj{oT|W+ktX zNR!SUBJ2y(98AY{jPa8z@I=DMG~vr6NS(I~iYf{`_g5l84xM z7A_h|lMy7uaE48v8q#DprNHwU^yRL8Yrae)D*K@y|Jri$gxWbw%#}9}6o|Tiq%h8P zflLY$Jnf`b-i|wQ%htW9bs64nuhjVzQxMNHq{=R1XmVmzxoqE4=%3B1b7pofdNF&1 zEZ6SwKyV6cma%8-{<(`*Y)mgNb$?bp0_Ohd{Vk6@SFHb7?5{ET;K&Kc>E zkfG(ZbV}Byo`-8aITo%O#8G6(|!aH`` zQ--QDP|h;|!*BzG}t0udeLsn zl~xz+52*`wQ>*(;t-o^mH*nXJZoJH`#PZAL{TJoH4cp~Frj=%_c>FpGo-q-Ji&p>g z0*wch7mN6A8S1DUa|KOp1K38p@dgZ--nr*F!V8x?+N1^)=}P<#gF!pDv8*9@n&zJ@ zNIoq0J4Frfp)$FH+o{5FZOg{^e7U;c*dMOk&ymA325DLPoQ1t*hn|^QlwuzfuEP>m zX3yJxVYuP7a|*t~-}4`V(U?J=-Yz@jUkxk8WiJ*L0|?Qw0fh_A7?>u7cqpx^+Sa)F zDl7fj}ORK8ky>?CrOj`FMOop;<=#XM{EoLDHG~i?d$jbYAXfx)GG_>hz+WXZ)NrsjriZ zsxq#xo~bpMU6JL?wiPg0cl9Afh=Is zFY1jokFfF7Rw<)J?A(Ao6@J_5SLm_UW-Nt0JQFuFK`UrDRCHyk`FKrn;sJ+UE@h;e z7UUpPm3mX1N)vKf@`n#qk9OUu)S($t-3YLgvoqx<+e zCvq7|9L!>@>U_D12E+dh2^OgVD5WFIUK%)NtA%ZwY81S8Rr5!Y)&rWE+B{`CG$94y z{<>jpvPcf)+~ZpBa)sckY>9R#^hXeV^yvo|OwyIRj0&Wl^s>A*5`&}`bY7hjzzy~3HxF>vo4JFmuSlNQ9+um4weFILZ1`i0U?Z6 z%lqv;U-M-mQv1;n5?z|wrKFOEwV&4A$aXw021&O0)z;BKU)csy{5f@iPuQ@G?d43o z%JKL+LM8RJY1B5H0k}5rRV)@_W7$RCI0oWNEwQHyXR=Xy)xN!vInjG{*(a*jc3Ah+?YzGx8OWhI8k2SE z&0d~-Z8o`djBX|O#aFQE$v6fT$Z!q^4_MNsTz zj)yqf3oS8M7Vx<_iLGxM{`wm!^05RdV?t2sSJwG`iFf*PiP^!qC~J!zsf+i z5u-X8A`VjyZ^OwVhTx&4RpKh|I&jGW8vD`(v@&JQzcNe^xFo7*JKs>#&sCc$R_bEk z;(RHm?E~|NUwCzx=nJkCEYmJ>6z#V}_#OdCpU)qN8WlpxL<=H=zKNIa`#Ebc(N8kc zvTM0HP2W%DMEaRrARNGV@J9@{vhy|4qyA%I?c<&4>s1rH0fF z{L?3%zJb?Y20a-^hl#?zBEg`7MU{2&B167lqZ5SJk$l2=WFlqec+V|r>qFg&3|$Vk70dXA=wTpExn z4!J0;Swe0(4uP@bey@DO4nOq86Fe^O>d(RKVMmUE;uDqp)-jtO=30^ht61uZtiw9V zVU2e7m*3;F*IDB=j;!w9Zwi6Fvq?(PVq6NjyrkE6#NbHz%eo)YPfw_uX|_h6YgPg! z8k;R+PMC*;+{XIW9Y40Y_snhrL!HF^qWep3cf^*~a_vR7%4!hjf^bO(A$Tak`u>@KnBWh2064Q6O?JG=&858rPPt5N7Q>GB zye&A|H}H4q_e5DhBuMPCLL6mZo}y%DT-ql${-1mTkJpe3hCbMi>jnAav@;mz>y?~) zRq+Zv>TvQ7Px%upzQPdzvuC6mD96wd5nmfS4jMI640DyWS*H1C72Fg806G)c6SBjRh3Yf* z0*84lBlZIsS>Cza+mrNln{6y0r&884>Z+&8vhCR{&wpA7vi&|DG+SY)4+M@B!HbNa zGTZZD6U9AXAy7Af*50)oH1S^f$LFo1rQcsvDg|Szm~`!M+7ZT_IB7Usn1Oy3kwblz zduoy;nt>(@(q9AOf9K-HvfgBPK0VVPX-Be>x<+&LkHUP+O+5FR2U_r(IF-AiIPvjeoXf@Av^lUQj1FzzT8(s;A? zi=jf$3Cy!uRhP-ibY{QaaP?bw57wApHb-0lrcFv+kGW66f=`juZ;_vKl1HhIaU4p0)%d>oa z@#=oPDw?Hc@SIbYvJ6+R!-~uPN{#}l?Nv2ks`UOzezgI&VkeGKhcnJ$JYKZLw>W0$ zv@_fJ+GQ3R*CJLLh{m1XEYDd{w?S<=e*#F`NL5r%7PG4FOY36-G?gNnP0^kelcr)@ zZ#lTUsB1YuT6vXdk3$YHT6by;)uk+&FfTEpLtc*Cex|&9nSsQ)F&;B(E7!uIv5F}5 zKGGQs*eKgA#Koc^&*DDy6K|CB4u4{s((CjmCQ>3EaHQ~J!u7=tpL-e5y`^O{M-$V5 zgk3RoyDq8;IMpvMjlSIa70XTwjMAL5<^?QaJ`GsW+&KF52R%q&$r-froC2mZU;;^!zgTzFmF zJa?f&oOD&cyoL#aVuoFV%jY-OJ}c#8Q9vAV`5H!@qC^VAhRHMDp_}uV6n)ms>^W+uho~%-B0vTV&{3^H*Ox|7+2xv;tJ*u^ zzPgTa@FEG17g~lT(|3~{QQ4oP&(%7pb4fH<*bda<-&~eYt$p6oz;Ii=^Ln$AyX!BP zREj#utor^*m}R1VlUm2V7kLXu{I;g{KDN40y(yv)KHmgbMgF` z5N^c{L9?nGeMS;_m{VK?k0mgK&>oFCvn>&k-Ej{a%{w1M59Ui5nUSUvCc2v zVwx#DNMU@e;F&*E%HZ%AHEI{#qE%9ba|$j^SbA2Dtr%|I}=>h|3LaDB}^#ucekb@e7CSbk|g-U?yKI$FJGRYwt9Z)~LV9p2rMH zkuQ*FJQ}u3jh3Nm`p{Lecv559Xxgg%jlYRTCcCR&qij4!=cLaL^il+UAWDAW4=|RZ zjJ!4}5j~bxk^=Pwg}M}O7|(!O5U8_FQMxppWliV5(L}e$;u5&bB~xpgH-7c$~a4F{Nam zzp7N0H7o;X#gWm?qThX!j*iBnN6%3wmAfRi3PIgr+!-Wo^KjEi^s5f*Trgaa9Ebb) zW*@PsuPHW_P;Gm`9c?904v<1;MAS__g#+X}SKEjy54M5C72gBEDb2XOJ}F1gM(m5c zQwRc!nMv5%$->*E$>h(zaE*59M-)&0I2W!Qb$WAmTZmOsF@V2*Z`h*Hk~m7BM~=Jx7RoCfOH0u?Q-cj zCsresvkVn9n4&+?{WMQ6-NEaA3shUyPEn0sE+cDJ+~^HFM!$4t9bGrsYYy8|h@T0M z6w6pxemmhDR4$3x>FXodZA$_jgJS%Ra@CHBO&)sXAy2Q&Q*e*xvOOJ&-#SH(?96xa z5aoXGjn%EgB|i=foaqQJ)|-z%mrux3AHQV-9jt77Gp_v&M?N^~y*&>4Q=iDrNNG`* zS8%n}G<>=_k;w2gU3NXqW&HCnr5$kDVpdUnDf57AQrsJwb8L(JiW36@M{}q9!`KEr zbj?!H68=tWmhZ2_b~S}kQ{;+gK9F`cl4*6uDheS4C_ zZ9x6G`0&snHx@ME(rnQ}|LFMfe3b52;Jw4%6K2tHX~N%D_vJ@IQ*<;dtXfr=YKO}u z68F5j79za6?clK|jD6;JFtU5iw-bi8RMS(0@FFE}OIgzT8%g(59k~deL3ikPIVuX3Q$keFYOcWXuJm)PHWMilK%s(`B$&M}9`gY&ttDV~pN%)q4=M*8M%| zsheQ5#T~H_V!B)(mAM8VlXXjph9spewsYHXWKVAPzNOkB)Une03-5`4vZrB7Ue|Ux z+Y)FFz>NX-YWIb5=QMkm?u%SqO%?vtyI?G>#zyu&8VcYF#-Qp?zBoC@mSlJc6iqm3 zA1HJhNtLkE%E?5<#vxM}HQ08vMDm{xe$nqpu###wX?(#mMrE(8;LMGpF&yOT!DFe*`~@#M z*!x@^U3)N&9x|Z=o$Vc@Cf^;nMlY22XhuPV)ySKbQmgxXBatl+na9NbD3XPf+?JE46 ztECAb9S8R)&anMYW!DQLc_Fw8*R>7kQTP_h>wV08XO*vz9IfK~#~C zWPh!zaj*3x4)^k>;WSiqb;GnhCCsD&C?$FlU4rT^BcZ&jyGW$oDN`m@Rv6n{ z7=54ll$)dK5L4r$2h>CFg7*Teyd&)KW^WE!n7fe8*Y=P%+9z494M*WXE|d}>q*nXf z%_>}Js#i?F+5@1Xu`ZTprHQK192TbRtlYblsSfgNh?%XvOp}7zf0VaCE!IO}>ND@L zMypkbhqMZ6jW`&nm(2;W7JdGJxAcyMk$ntqZ%#hsdhg;l!%d!W`Z8Q_9KuJ%i^Kjz7p%!b(bO8a)|hU- zJ(AJBTWokZd!kj>%HXYGfmk~OVmJVCn?L+oBBn>1(^T}eOC&X3gMm-Y4VcsUDtPFp z@YjJ9qGXt~7&|xlm{)&oH+Rq!FWb=DoQ-5-(4kZ0SG%c`ftctK<3zXl8!u748- zJLb5Ub(@lSgHF9{#o1o}3>o<~e>3W$N!U^dT5+soxBdF$`1qoJcF~J$3m|)(9WVZ8 zjqDLQ*^a#eFVZMiAZ3r+TA@skE$71xNal~wfj6Dza+W3_-RY77Ru*j+5+<>bcOB%6fgUa3}-To@2RcCZf zPns~fl?)gXW&rvxT`%~+&JuGFMbW^Uh6X3v;Y)L?2^2`x=k^RFkL)*<87UQz3Yb@t zOW<8`(!TP^TM{}Gl71!iyLzqOqTRwn8sAymVO8bmaRpvVo(bd5v#Eu;knS9niyRfQ z`AjY|HN02cC>*m@x@ki!d>m}e?&rdHIiYw{eO-ixpH#EChV6wMzW;#o`_{B-{x zj;ITbOF@cAas#m@42T^t2rA5RZ{A26hySq)<0gdbL6!?79_wG+CVUo7eYwLNIR-{X zvY9}K9QV}rZRhL8`*+7f1=hwWi8Hn)?o~hg{Gf_`@C>+^RBg$+3H-$FsXui81|<_W z^7>cj+lU)eSe;uH+#`J4Scl;UjU;c|SD)RNv+E?YU5a$fo0BKcxpR^;BwVJICQfFJ z%$I(X(wiOUm-#mqS2cP%sXsRSwG&bJO!ntfHx1bc9z^8xHc1v5r@4!QUUitezKZ$twgK*~MC{S&`0;rdEe$F$I#P|wMF>#wK+uTm4L z^0S6NSwcn{RaQHBE(UzaOiZ#aJlSiew?={#D3qtH+k=dR^B@{SG!Qh-95gt%?$(O862esbC_gy z_uO)J>b&$pnn%WAQ<0HwZ~L92vjab^wd=$>oy!KzGM3&(GVfDn?D4ULv62yv8^X&b zh`)H!Q(NNiQuzDoT%qva?5o~;h2lNPwzT9T9Dcx{h>t+IyH~s~7d}ywl*%#51Mtx{ z9W;|dQJejZq_uada#6}eoDn%MH2cLzsfWG&7aP=Al?K^3D$rg#x1x z#X0hhm-!IaAOy>M{41& zj8TLg$Q3hFNCOC&in?)8tSIga?4ezsw7(v}<8{pKGLD0u62*yHLDGoKy3E`jg>SiK>Kq>%}|Ue?#sr>f1B0H3}WH9x|ZPCUZugRIMWJ0AR#~7x<1an>ksJun++$XyC^tC|I>d&z1w#pht zEA9a$wIb?UY7{K@_^NNVS|R%cFt5{X_IAV(XLP!gIsO7I@em0H0YOjV@oe^V8dM36DH| z2n!8%J`#~0m&xd*bhj%Mar8I(T%Q^LASnybxg68DxL|6({jt`g=q`X~R zzj=*#wDqSM0+kD6N4(T?<&#rhY~e%d1VXpcHTg= z+zx;FL#YHH!Klgf?9NADKfy9l)@W>c{%~j(evhvg?UYn$5xI`UM2~8lQ)aNrpzqO$KCHrYL`;Jy- zQi{xl19K6yQxR+j11a6bxG-r=18KZMCs<=_+CEbC!sG#Q8RxORB;upXbDB?*k9gp| zWE=Mz6FTy+&3f1iLR`{!ygS|vmXiMQKn*msLBF=yzijZBrwq90izZOc=seLHrT_OgOIi7(>FPyDu9RvGpx}%-TSCHE`f&R z@n31@{kMRrLfxa1&hd;M?7WssA9K6zCjt4BewNWs`(&AzEM;Zio@Zfk+YfoRzdN@0 z7`d9kThWEotsE^+o^4(30brT?5S>hDjU z_I-Q7{`hx?ssE2s;$MC3D+Q)$VP@O-AOGUFiYOQ3Yif?~b|f$Um4W;yfPtm_2&w;r zUgCeR`TzJ7#r5YSH!iv1&;QE6dS<{t{$`)CAMsO7Wc0V%>HelR<_5#YQ@v{5!kseK3Tv0R|YOT z2Mqjw`_)SNIRhJh__px#-_7Ft*KEJHFN2+G=)ao$#@~}ampS?Gc84OcJJ=*WNdML3 zeW8%+`t!XD?CZ}0?l^$fUd2jv!GQXZ9s<_^l!fTJ;es@uv!mEgH%LDnxV_9kfMxV`S z^~aO-($CkJnVVBvtZt|4F2*_yaM#o{=9F(cj+^U$H?aR2&Uzy7w5!wVHq9rrZ0|0X zP}(gjzkZAK&aNQJ6xNBkdRJlmPgR0nlM@45pq~miGxD_+%DS|*;6p%>W=_x3y?;73 z4LslhgnS8-)Cm~I-QFr=Io-JO5RUy}rBvZ?dX*IVebxQ{(eCm8?ueHps#oZ{cC-h*??3&0R3$Jm0f`<#U+Xn9&6K`q2Is7_HJ@U% ze|!QJKE;i~M2Q%E+9w~9S~m8*f7lyxJ?2@lY)$k}R|+@k_j@@IGf5R9$Odd>YzcA> ztbe$dskK7}waSJoXZMPSEE5%GSrQ`_IlCyHtbZl2ka%>g0C*%(e|=+?zaP)*ryxG=mJFbaM$_(& zNIBUWRvyb~e{^AKd5bUTaa=iab^7tv?MGbovX3D@g(z$Ip%O!hM{zp7!L-6CEtuXU zrucZT)MCwHc5lPXyTjJMNFCMh`rhf#Uz@l{1lQ+$m+;zc_p-s|fYhMTzl2=OUq)$F zPj@II;B{WPskHmn5W%c6mM^(hDXNwE-|qaSqDLY=2g&DT{6mLg0b2#=#P)*pk?1eB z2Gg}zMDy!^?pUp}OtprILG3Un zsQZI1r)d{GCNX<5pzJpcy+c!Z%l8vxx(!VMt6v9G)uR9dAP!xI+F^D(Tq2)}K&PYoc}snq5B zQglU(yX3x^!~3F9qlH4`XeIVR>OPLyS}JwjU9B`typzaT!v3-6*mpxl!OZcqZk@%i zHB7~01Luba{E#FQ|9Ordwensx!jIkh6DZS(MzaQnRx{DMae1=;1Ea)v9@%b9i4s^7Q2jJ$@Rzd8e1lsdT}FBFk!aL3Iq;wsX-Uw=b*}T=(@$Ux_)9Ja-c%1tVD5xxLAMktJ^X6r@JiP0io^eqH$jJfbEJb#+-oQ0u%gMOP~D(hESC{y~m$#Rwf^ujoJx=110uTKRj>T}v7bjx*Oj57s^yDYc4h<@ zR6pm?o`~3J>R1{KR)ZaI&IIc=8RoPu@7Es)dp4R+mpMo<<49<)h*vvS97P;td8XMo$k~B-!U5W(yP~>|ft|+5;R;f-tBYK8cBgflGN&Txd zUxnr}>XfP8*-vZua8O#VpncwLRcr^9lAN;XffeRn|rA z3|jk&;&)lI&F#3Y=H1;Jxm;p@s_T7i(}Nq;X&{*(|6eEh|7Af!!l)-;Zv^}fZ~a7; zHyt^9URaR!iv?T`e*Qw|^mcR5L8th{GI0T(a0J8wC;Lz`0mZqR)t82M)y~-h&hN(q z4h%JvrQLj1(+w88<`+9-w*=`YsxA@kBVn(+3{`XUcnVt?l=TL1rfnACGq?w*=XuF% zbkLT)kVkGel=GJz{59U?Iykwd68Sf0WLCZI!WpO@-3K-avu-)|T0GW6FR7aRhX41( z3fN7&tuQW$380C@rbx-+%S-b>d0~k z?wbQ7>ycg4tA1i`>tWT#?V8vWqA#Nfm@>YGV=8|MI7;WW|4@|*5b_|zDoU3=Jn1|4 zcgU^vl32|qJFZhmc`>g<*DX9xtke2*yJu<8r**LkW?GNOcebF*2=l%BuPpZ5*vUzHj()q#F3tmHcRj;?xG|TjDe8^2E zm@VcHL@W>2Qmi`Zz(S0EY21rwg#?-tiDEQ-rMvy4I}459u*hpg}j4TqvdlAybc z|EPr<9Nqb7W#uYb>kX=T>gHgsR+az=c5#f;w>6q=gbhA!aeUFpN5KEzz zJ$Y{_Om_a!xfEkDI`sjdwJ|ef3)L0~aU`#zZ5q8nsS8#~FCPK(WbF-Xk-L~RlgR^SJ(VAFMj?-`ozpq6U`~r?nPO_~4JFHm{&&LGmM$I;AL}tI z$R3r)p=EzUHh7GCxEtk=t37GP<>57^a0Dv)c#aUOL0pAy$CM@i-F4<*eU48*wrBMN ze=fFmm5oltZIpJ+ZTW+XLAOo!&(q1dDnz7vX9X=W*n4i%VMg+ZQ*nx3lR8*@(zZe? zpz_+eb(yh58AZ)?=W25xdDIre1d$`mw|%%A;LIQix(~^M9<`9r)!dX2PB&POT8xvF zLp;PmlwfX~Bomwc(d>s7!jn8uN5<%$AV{i{5fp5f51b7~4~2x3+uU?M^GxzjKqc;YV1<-RsycL}!aWSuBZTr`@a`HmPJR z^sU*5UFVx1G2dTx%bsRC_%tkx{ymWmR_)5}+XIJUqJ1Y{WtF$eNE%C~=x`Lh2#UJ{ zuVXxSPm&LaU*gVRo_=xv@q-3=cLKdO*jzp9zS8=F>?R$5~GrHNE%q6lTlqFr#ppQmI|T@)!-!cS-FW4VDbG_b|$E6G$`xsW)XUX%K{+x{@{^JIwJ2`Y^F4?yM{4#3qh>S{)lP zV6qU0kL^Td6n9q|lWwbdq{j8{W-w(pV`T%ZdgGIpLj4Zwrn`2zNE;lvy;Da{pB&`W zH*v50x(P3_Nq#fNczIsXeremYNWHt|oPMatsWN1po;mVS)zPK4(dXa?hX2^b05nf@ zE1-6O^5|@y6g3QeyZtkbVVQJSna`e%*bV>kb5R3%=8ZD++)#`H|Gp0`!weyw{a0AOo1 z^Yh5*=TLCIuC$}|2Wha%H^wr1v)r$I6KSc)3#z1Xv}esGd_QM8thErYkR3t2a@XEY z>gN>zCnotB$rSwE;!m!P{d;Ks#XF?qA1X(mbmN(1Kxa8Cr6{Sp^C02Aruan!kjmzM z`{R`n`pCn_Of!eKtTsCf&7;q25yEH#!oEV|lt@1cNmGR+a zuq$kL*(uX&5Y)h-50Hy$LYrwcE(uudJ zOeHj^5M`Y=4ORkYYRM1w66~A|oUGRE#|enJUz+@*DhS7+6Kr|#$M&!(LE8Sqw^yBu zmEZOl&t8!0d)|bdlkSDIarTgN(K4)UciQqgMDQU>m?5peRubF5?aEj>d|W-6ZM`5yUz*?)IP6b{)2QtU_uyrDVS7 zX@*dB-cQS4aqGFa>2g|Xg1Jn+S6vZpcM4~z`B*ZligyDA)AD+$^S`Y@m$rYcY?P_$ z?EH9i&C6iR_$XK-vLYg); zkvZ)-q*G(j3P58>w%NOVmc-du7#Sc`0HzNBlmdO*xapfT~8OOr;Dyg;E)CU9@amHrB%5cMqHf2Ff{0!BJd-s&f!32-- zOi@2~pwR}n+XjoYlsCMksj}!|wv_Oe6skT?q1Wm-%Zmt5@98;P^1n089k(ZK8cr6= zZgi0P^kp2;i+waqQR<)1-<`3Nk5xBJNZGX-_SA84YakGv6&Hb3?B(!~x8!I0t2%kb zLr{XJNjOJ0dfJ<(U)a(S<-3cRM03^ne6ZZmirx^6uy9@xw9Foe;e{7RP2;L!3<} zEuQbqTZMyBXW`;k1>IdU#nH=6=Dq%Y&>+x}i`IH$K^hAnf_Y=#q4rJC_1(AxF5+1M z;ab=97h(8$p#KVxEEsBC4Wf%BWa zP+?FWOP)cDts?trYhD_I$OnRb|NTTrdeidlW4E#To66ZG&!vrSmciC4>@d=n=^$Z> z$^_Wk5h`wvQ?8>|Q+02MfXKCIoX;*2rDiy4rKd%p?3Ue9m$Ghnj5xQ0&$1H!NH;4M z@GH3@t;2twg37f@?s=Uno$i=kI_J|^GJ@~+EC#KHeB|WmaEU2m;{FziJU01(Fz6ZT zLirNz(Bv=&Dp4L1V;m{@Gj}3O0JjW8%2&|&jrJ5~f zn|CYhT4+*p;cM+zJ=NQimFBY09FX{oQg|o=#0|!9J9zU_wM^|i;&NBqNS?B-Cr-?? zM^KAVn9MY|xjwgc*5GKOBC>|<{Fn6BR>h*);^+#sX2I(m@2Pt)2OXcPTKi_f8tpQb zamf4{Z_W2}Sww~px6GU!i^+aMxvVwkbT6>=1gvUZHO@w> z()*SwTgjI+XDVr-)3y0sXylD?5BYI(ZkBfc|6=bwgQDElbc z5)jEslq{m;93=~gAUQ~;L1GgmNX{rx2~B9ANlk_(b(3kJfxd$|_g-hOv)4Y(n&(#C zx>c)+AJD}I-}j9%-toTA`#jGW#7a7A1jC6fbXk>C2n+aLEb>9rGzw!^_6{!MQcF|z zi=H*54n6EsEo^a5fp>x_bbf%3JXbx=1A3@m@is{bXAw`=f%Dl`v0G(`NVaQIHBH{2 zwgF(kS6A12soXGJpWaxHa2sx_eqzcG&1cm-&{evfr|Npw!ufog%1u1#!J)&bGdy zQAwkQ9@;t~hZl{1Z#lfC#R#*S(KsRDn*{Z1S4;8p?Xri%7s{}OE$YXbzwQrbILFXuFszPCM)M4IcI!(;u z$LLoniR&%_5zu){ID!=%hev4It0uY1yc=NUt~L+B>ppMAw50bsq#28EV7cPZlTP$?qwrJ|)mhw>q@H;$GzC`ZI+oP@mr^ z(dt6Arv!2Fae{=20LL7nfbbCv^?!A@!J?LiC`Owt>N&hA?=i>~7a z1|#+f6f=%f(lq`@pV1RcX+isLM{mzLh@dSH7^(7s6^(3~&276oO-EX%r#Ob#zMrq8 z{pzM^J#aO0EN3N+XQEHZaqXeqPE#*hy&Z-o?`rY1k*M%2(DKnJ5PLobO~87O<{5j( zdb6!!5HEE*VGLvMlEp7J(%ap5Ryaah|FGQ>0Yl$aULC@G#gW~r7(S)nK2HXLs62Z% z&3hy^Ycr(~rW=T!YY%l*&_=`8_n6gY(!8cHR}HU#On^y{wEplS?)aixS~ynBoww7Y z(+-i5;>4a>YI1l+#(1rUZZ3#AvRgQYVzXPYjD zqE5fN2LJ)DTsQQ|ikVuyE1KL-$nJ;AMZHGLI6lkWOwnSeA{Zfl#bDlqQDtMXB1xnY zQ6l;Vkbq!C7rcfQ94JygmHP2RrQiAqLDR9C(w0FgMlsV>kWW<8_(kMB_NhiAVq_fK zOGaCRiEV*}C9!W3ky%}pb7F~9vD<9+I zY#*q09u!Jn5O&3cEb#h?*-apdGP7&1X2a2^aj3bo8_F6`EKvv|tFMO>tyQI1oS(CO z5^3_Q?lWTsk-4PJSV&x%r zqvc^WJjYM5(6zXavB*EHLyi2asISs!ygJlVCvFu#~Zv;cLMn}q3}*x zQ^&Ltm9Iw*eV-+h@xq8=T&q$mt`X8SyfS|a>RmaP)&wmx>t6LWe~)na&|QVDA3yC& zBDPypUX--kNh<+eDyCej)8o*LGP~*D0dKsNNSx3t#W~C*cU-T8c6e$$d1l!_&_8Qx ztFb#dXwX8#h1eLv6 zoMVz?-}S(0$EeKfkjEW8tc~m?rER(o5x;z_JnzawVy3)WVV#6)NNF+fX;!9n-y23e zK0i0*x#2zVTRMD~7HS^d0xLM2jsO1XqW)4b%O}ZHCu!_^SWl6G} zcwP*Pc_}T>XpS*?E63ISsEAG1h-sg|$Bo|^sSS$jP10Jb;+QX9Yf0$dmrt?4*G$ZL zFkhT-^}}2RMCh=@YpAfqz_rDBo<${k*lJ{gRVf`b zI7Qsn-eBEUhP3$l=fkq%sw)T*;0;pQU{kQG=@Nco1j(0hu@(yw*m%*Xw2axGuq~fB zX9aptQTpbtYPFXP? zhDA8cH|A;1h@(aOk4EX1LO*blOvX+tp>$smQ?o6r=k%^n0CTiUBM0qAYMF2CF9?Zs z;HI{k$RJy(lKP+D<+>fg!Gl1SoRM{;Wu6=2!j1=dCuuGn!-#$Y2bNxJl1ky~sCnL| zG{pnu=-azyC_2IZGQ?~IIo?;u%hb z=6Ii&vjY^v@;dDPydm-F2bTyeS(~?R{X4$G8PPqn*sm$Ky?o3j&xz4T-}I%A%cT$y zO<6b`bnD(c`u@8-^5KL=df`UCI5py&1EzQKg^=SysYZ$J0BQS)(7ZOQ4%_R^etga5 z2vJ)gOPkNHrnRKx`t|5(FTnv7mPB4NSNB5TRd)>pP#$$L>yZ50R8zabM&4p0X`)6` zlY^P=_FI{04A|&c=2Gl3z*afc8w;HfhE@J?yqviztOe?!Bi>aJpyJc7*p3yj`=Nl4 zCb=C*{ms$;)s$qax&=prr(?l0q&1^T$+X2pPeqcnol?r7%y8$zRzX~kq5{TRCH zmNddeKCiv{{P?_CX*;-ms>;0r&BNP+x7C@r>IfVXO7x?z-s$`i0jZbPe^u~_6I(vx?iH0Njf8U`sH3ATj4#Z&V#YZGK7>`YTI8vRpQ{8&Td2y)>WQ#3D56o%NKS+^ zm3?g=**DuUu|bW@OCAD}!RuV+BG6x2rp-4EXL*AtHo&;>*^C2!2&{pdcUyvyLbH$5 zM`PrH2Rt_up8jDZhPhF>K&yz2R=_%nko(bwtLM>**}}Y`6eFzT42xRq)?P0-{j6xS zglc>>W!@fof#nf@19oJ>PBiGe7pF#E=|XP`HlD1F^uQ*PW7MOC|0wT?4{OZ5w=ff= z!{N9K>jcVe%w^7h!fu=WUbf};%dTP#lBsy-9FM3E6KqKePJrORX>?qrw{+}1Nhiuo z;E>Ke$q7!naO>NONaCr?oA#~~?QOAdnQk*u_OcnyBK{kXLQG!UCW1M#L<#IFc3etPoXl9)A|$YB~e&q>?RQ{~!C zH+FLt*N%)u2lem9YcM43fs=KTXh20$NwkqMfhK4GaD7NbH_6~*a z^j;4ORJvD!>(wX}ffQ63nvVBODCe+4Wz86E@qPh=C3tdC=RRAyb6SJjgjt!qK>47kycE#lBP!T^8`lD3+evSrz&~73{e_5R}sq8;QG>#ct9Bd09x} z0aZet71FmJwd~vM7+Se~=p?^5JsQ<Jc6Xdi_5e z+;_48&Ld7;`SSbYT2V!+dYkqT%5BHkgvk?RJvORtZ?$%7 zef9^WLfqxF5MEF%mc9L{wiCb+D5lkXwLshR@jeE*7E!s<*IT9Ws+ExY$9Sa`+O=q| z>gBARV+vFzSEUe~0bz>^Fzm~xL<9W!8rrYM@7zUsN(xLUx6S5UO>erlNCSzuC@?;gVI8ck#SdYrmL0 ziCn3s)u!LwmaE^6VfXI$RIApYsnH$5h@$sDD*Dxs2om8Xv@oPWWq>?&{f5A=D2LxT?>yMTF<3uf^F9)lW&aX5vXo*W zYz8|T#<=>MftBDMV9eb-BpE`*dVQ`!H?FWTe?3=SplN8?UHyGt@Pt`p(y7C@Z?mqA zG5Ob^@K>vR#VX4`i6$*JI5|f^_Td~l09PTUtP+eH zAgDY&7nF3|?L+%{&a%09To~^l}hBFNY-_d4f6Ww{2$t|^L&sM zU&uLWPX-ZO*0tcZH@Tu>uE#Rpu#FL-uE*FV++5^4KcSN*%t&K3#89(v#-g&8!qmeM zl3OuBXH7g1t<0%Z8VV=FX5%#G1z2p*?y7 z(ZsM9_DdZr6eCt&F3jzk8ofPrS0Aux^c0)2v^7&nj|a4^Ivn5VKDv5ZKBGlSU75?E zPR7uazh-g?={9Y1&e}t=v99lDM>s9ggu(ZiXl3e7Uc-{$bXYV~K)_?s@eGf>)xnLO z(<-~^Chyk^!JjJ%ZTl@$C#tN>q#bA~tdIq996Hzd+`eM)yXE3)g<3@^SK3H#9R&9A z4;QP;jaTBn<)&4780gz|yPmQS2VJ;81hE=kwHfYdzU>ATpd5t#_(UrA`e#He%ocZ~ z9YmZtJXqm@^)-Nsl5E)9$#G-Q_8Cw_r`2-NoC#;OI&cTs`=yL{i&-zN@0r_ONaN9N^am zS=o)&6_b~KIF(PF1-5VsAwN3Ow-$_RuBuW@Nc_nsi?TWFaNT_ak;*+He+%a9d64P8 z>~wu#RNsd2FK@xBAO-XODlRlcIQ1ff|6!+BUs1HSa&Xk6F&Bj?oDCIIF0bRKr z{`W(#Z_nzdn;)lYO-|HUl;7nY-mi1EH8XrM-oitaVJp>Z+s2 zu`b9X38lrdK(#iFeYht`d25lVTiUg1vhHObGX&54j`SPe%8R&SEIQxLIq=3=$0=ZH zQv>rQj_>LsOJue(F*p>$okV%cJa(ZwK`yOWy>-RXsL>b}EG{i(zu6z{dZdP5@pUmW z-;hQ^IobS#SU%mzBlt`R_wd?HIyoJP(nCeq_^UY27k+rHot*!w(t=;&@A?@F#DFWGE?6Z>=1PmQV zfiZ*%gU@|ZTEg&3%V7nJ$-L;k(|)a^eKd>e$fcvD(PrP~p%QKF2nMn=5A}ezwT_GN zfTXZ{^D+C6UWlNPb^Di(6xmh@GubwsDiG^Ln6M;+$LwRQNMUY+F`@dteNz1?TvbAT zLSO?>Ocbw9!^lGLWT$QB!=1?Yv?Cn6wjAQV2l8Vvs~ar7q0OT;e0lir{PTdd)D zG^Z(SzZ%o$S`-5v-mVDOaR-iL+tq!irCM{kjJDg^4%?@qMU#qLx;+}po(;)|k)}L4 z4dxjfdNjAL4|&6V)2K6*@;&YEQUNs-L1^)>n9os+7p~Ahy6Y`Y#&(<)S7AB)wngi7 z@MSRUAcwMxSt;3)-G}v^;9FN-P)J_N`zCfMb54=w-63{bT_7w=UBr^)`tktUq*Jvj zZrWK#?;=G3e1#DrV7cpdMzaH0@G}Tz)3CKyD;T~tRF+N@4VnU^(PA%}QeXEZN;xZZ zzUlVC5^c&dEAhYVw+FqP^t8tqc7s8Ud+cBKc`LO?3i0M{6z`N#ZVLX10-qdjA`5% zdr3SlbsTzMyY`13XvcL$j&9}Zy#o0KDZ%(B8{ov>=z#`U(i_ep|-^u5=U_e`T(WHV_NQTOPdgMMIZ_F58B=o(Zb)2^eQbm0pYE}?mn~?1`*8} z2qpD?>;MzrSOdL+9OsqKZJ4%{%WaTH7P)g@bndhXhf=aL$2O&}i`k?ah`XPLXxwaI zeks(8&r~#1WrrJaTd+3^<08`QZ|lmUm&dwc%(wz?>IyEC!oro)eqCGMljULx>Ph57 zN)Vx)Q@<(`a!$>svG=#YnKuRJ4sScAsgCI zk6%j6UXo#Dy1Lpk+>eUB_#p4{EfEDw%^e|GhCCyg>Z`$_dDl<=2ZG{1DC~?(B*#sT z;Y!KJS$&+A_HqFRQ@O@hNi)q$pG@?gpW8iXSB&qDg!_)jP8|0ovAC&N3>*)?yP)2{ zlqsnR0S98TQ?Q)8?Ckx-n&s8MR77h=3jfamRK-c0P1CjQd&2x zAXGd&el_!^tKAu`%`vge95-~~N{VoV;k6=3s)1aoM%A zUK#bfvw08Hk5@9j|A@|Q`RoKaZJjdl|6G!k57|39(bI?&b6m*V#q0#q6|V&W2tRq~ zR63}h9<)T8@uzxeJ_Qkdf#I|gQVMZwZ>m>l@hJ~Y7S>ASLVY!BKx0c0ZrYpeiWae5 z9TJTqL_2$EY*c)y!#DA~$u?{}TB;eJ!eF-d(Gzc?k~*CC*T*mdX0v0cv{-E;@7}w- zJ3=Y1yFRfF=9}2xtKMX5Tl^6%?Q`f-?AgF&Jys?wewqgb?9D-B@$d?OeAs3hYaw&} zfK!1_%70GQjUsWwDUq$}zl!F_Uu;yU%~tu^6T|0V|Ey;WkNRFd=z64;p0DYmJ1Azr zu5bu47+x9W^EDq`kjp$lLl^7 zO$IK{bd^uLb>dX5sH03N{R`@AY3(=?tFuX~Ebhaa+4KQ(ebK-va=gpL3=-WP-zw>B2)&?hBSQJrL6lf zxip|`LTXiRXQ`n-c8TL$?o}|Vna{GBz3b0aa@k+2$OEJwOZFi;QCEEckF&{!QpIG- z64%D%-KGeJJM8)RCJMj}%LRjT#dS{nU3+nm<6(c+>-# zm1Ein@12};7?;~{n0)`U%th~6{Nu<01z-MjFa7xBE~{>3%`*~>nCs6$N**iNBihSD zE`TBcwM3AUKK9Ui+b50tFf12fsP3C;%uaT6u_%O^{greY&E*{d$P>^J;6iCEIqh5Q zi52mnl={}JBYzYEOFaBg$=M=$c=5{VpqAxOzP*cwo*g{ubzdZIRQXi%+TNIDj?4y5 zR@5@5p^cd(3m!J*E+?`w#AH75^8yKVcUZdbSChuU?2PNIUI6lK`eIQ1UCVT6uWre6 z#}C&nRDXMS^~616cp6mMNhJe*yLBwy_^ew1ZKl&c=kZ1%AxeZfYVAF45|S2}cdKwC zJSW%Rq?V_nI<2I{Z219o7C$-lQqt{O_36#d2)fuTxybQvKKxO~j?^ z!wTQ;5N@NUOBZ`8D)K&T!cxUKZ}Ps0=}A(KVwd!PSh4fu4bENNaC39EwMyg7QjeSL zfo_&#apI#Vtqy0cDU;l7y$+3P`FQ7_kI%O~7RA?gBdq&5+HF2AVk(#-;cSj)&0dpS zRd+q}*0zKFHRhXcqxwOo^}WZ8U9NJDpxyi9z13%u=Lw41#quN0Kb3C_K8su)qeA)# zZD`UxDxHp>?oDD(IBW_|LP;{h*kL@s>dP{cYwV%l=Rb9A(IA7r8#Z9RjM8`?%F57d zZBALiYX(!`;oZws;ib_f=gKW+66hGtRNkmwvsTUdT^#?;Sf)fOKo-bBs^h*4<|@c&;y1%xYU+ZoSgVr8*JDEIT*YvR<&kSp*n_&9`ylk;`9?c%fF2 zpT{nXCb8OYgw0&9Ii;pIJLhmp-6cYB={+}jtY6J$pU;lir8Z;(a0B~AfdwRz=ZVZU z>$WJM$qMQ@*`SnQdQo9H8T@>D^h`~06<_e0>B@nGUGP&z4BqSG*6J<|!Mbrl?aHTY zWtwKj_7JKFZA3abw^reAmi(X|$liR1a>|>jV_DZ#b>v8n`UJjD_Q~8A;0@qM`MvlLg%UGg zzUG`ra>(n42Hp?-6I{Ao;r5w?I)J;6Qr5)ZA$RL;3ar_Be;*(;OyZgtqJQKD|D(u( zk&S@n%ZkLyJP09Q#k@;h7sAMf<8Wt@s7Nt&Oofr_h#^pg>=@o7?AWU`ty`&ZPZVvl zn;v-8MDI+a9qJ^0{?#+^-Q_tRzF*GUcajYhLcXW|JrI0~^{G`T5RdgVo-X{1Rm4mR z=bSpJe$)yMC9^@-k@5iG=-!*I>sy(b23`*qX(-OD0^T3$*B0t&{2?an%}$s%QH&A# znynnmBW@^y_zS9Ua_wFmf<5 zJw-K-z^u<8ie0;;J#?a;)8d*ugiW@gMvLp(U;9gvjJ6~|;yuw9R4C2ZMX;5jb}Mc+ z^Z2bW`Cq;M9e6Fj-M4#HOuWS!0DL>HIjM0EY4|zvirMvltLY#@toW zr=b2Nd+a}o!>9*9V|*_R^~2yV1b~11N_7LcAH7q~-~QsT{t;XL{jdI4LH>XB0&f(# zMHhE642aMNFp(P&K*c%e4Wu7l`E>~SW3(g*fR#eRZw#xwQ`bO4n{weG@k~9hUKaKj z7wO$IZ8Ool@RKzIFsZdv2t%KM4+esNKKozK_ka5C|AiaylcB{6ZyL4KP+|!F;u88J zh|g$IUAhshn0h~2A0L@IS#;bnGxaM5%YTXfK+OH^Q~nnEqeJ8W=>h-SqWB#Ik_&%9 z(fg0}1^&9E*O?{9blMHXUmTV{FD5_RP$a1YeubC*(>=Bcf&u*ZWBtcF`S)o3Ba-}k z68_h9{(rf)ioXF>=Rzlg&!;!t@joNzCKf^Y93$ol7^{LML-{+O09~Q=tle2HLGoY! z?Y~IOClv^Q>>&EDn>w46M9n{1Q zi)yq!jeys~$B1hXi1=Z4LfGMvRXUTX8|F4QpZQEVClC97XwXNc|J1pugl*0?bL!vk zO%Zg$mFP7a#;`<=J(x>^Lv!?N%P_*tN0t?zd^e@e9*P*L*MH>@F|C*BY2G3IN3>^r z3=+rNZ8Fdrpf*NyZucfWyjqv7k+1d>VbTR3@4WOT8&9MY>g+H=pBUWbpn>CLNc{Y}N#B3fiVx zPaQ)iSOAY{d%j{NhUblf8||;A(_QD^@sg82o@XgtsL6Ib+5S_8Q}HUyMsQXLy=w~vw0dJ6a;7?WHLCYcEOb8mde z4eF?+aA9daqGcodM`~Nf!am0}qM4_Ih8Zo=m^v=~I_IA%P?0x|n?^1TJDB>x3X5ke z#&jrzk*`Z+?M4&su6nK#_&xK_wtZ=hu6iS1baLKq%>l4z5x|A9K!n#7fu2}gXlBWI z>F2N3AVXBU0~3|r-IG8CFn>$n(BaW(^oX;5(JV!L{XXNH?zq^+?u7czc;2-YIMfXR z33PWtbpc}>wB-ng&blzfyWtmm=FFvoi*o`ewZd(m@;vv~EDn3D)27e>FDEIE}9XQo^ACT_*n zpX{8TTX)QO9C%loYx;riYjIDfiT4OP^6g^N(NL zb8FbA6H%D&2uHdY^cy9ozdd$v(?h`w76J)LCtZH!R%3tu_O)PmlP4&QIt#TNuEa`e z&Y8kenHt4UM^C)E(biOFfr-I;vfD=`cY;4^Z@XLB`QghQvMkyC^6Ga1M>Y*_xBNe` z=?o$Z(%CUnl@p}PEBt-QVx7p1=Fiwdc;k_3bwT=b!v`jjG{S@QYMC({`)2j3 zZR6rOr%H>b=;GMKnE5S7N`IFz!`1Dr-E-f*4lB^Fv6q_SZ5OBFNZhb>qT(A8v_41x z4gk;1VsQnCd|sOsvx2Ey?4kU|)V9~j5oW2RiN?lk#!*mahk$Y}^X8}7KcdZTPOzp1 zU*FoqUpoLqbDpKO5+uAtzgA%|TY0S*$8GM80-}t%6tkY=vD5Y^pY4-<<7!I@96&IL zKH6LD%8(A4{CYx7OvQ2EDExH1XmiO|1w11Dt9>%z+*#k0yJ_^i>%&$-j{}{x~G_+K)912$oC7T;PHjqpw~X{vA@h@(&>Y`vV~e|zUwg8=6Vvu zkgE{1k4Ad?QK{u9ya_Nq+?l4xm(YprrwW%k&>wD@|W;5Y>Sb=517`4CkJ=~Lk zIz1;7e<9WNiSrKdQHj#6v>c1+)vX(|=+-{wa+`Dje0y$w+q>+_H!W=>z{J*f7Aemem{W87I1n%k5{^o8C7`&Laxe7pb8E)rEPQgiiAcgba-n~X#|ely+!MWrKy#=}IXJV87HG27V+(N=M$xxr0Wkx;MBlXODhU64 zq!qkF^{qc*xIiwzSV>#Vnt+heQ2LP`$z8yFs;J4_ooFUHu}5Wd{>o@6*$Q+(K?#q_~kR@}-={DL1#ORVwzB4{<>gi41IZwhrZyjSTWQB`3kA3|Be;rS9fQ?)x>sDMYxQr;ap-mF^t^>@mv6CwE zufXNF4P9?OaNA;J*Tt<-4~tPNG2C>{YCh(XoATIEc5B9_o!JO1Dm}am9sZ)DD1Nfe zEC1bzU7?7SWRbS@!3v}8Xz7R*{Hv4O$sbT z6E#}w_(T+>OPp@b^Q{%(0G`Cb{RrQ1mOB)WKVgJL&rqJd0>%trT&b9+D)9RhLfyr( zK5mR}H23%F(sAG`v;f)=wFas7Nxnjypqn|6)y$(g<1z7&{}LAM_Wey3go58a|10g} z+EioEP?0wGpG0U=kD(_pCh?LYCE}HiDG!DQ}dGo5wTKL{6Q$o|RXHkU+}d&{Z=#4AEnq z&Gflp)>j!wR9`wbr(VM>{UM-iCEbMhw>la52T`rO6PYx@PQ~6mg3}7Mj^EQ;>&{KT2!0wU z1ff*yv>GdjOJc67#FKydlO$-b4uX|!NMwMCx+MOZH2$`<9?1CLtk-NAr}@BhmDNBD zM2b(VrriczGSaImp-PM@_;G3c>YdEqLn{hA`P3%2+|%}uCf}3UFhC(ZvxJywX47e& z{ljD8M1lga+RKo)uU&HPnEzwc|HpMOGjcsxiP3hXPoleiqiO!J#bN~*yZSZ!@lIrt zzjORqvc>lZoINP)mR?Ri1_HZ;rxT2Ne66sg;Wh}OmGzZ8$i8y_T$N!^NqEj}bt0wH zeNQi}3|1D_V;Y$uL<+6Wr|3my?w=u)?CYj>j9~759n6w*tHy;)Yb%H}Kbru(rffVa zz1}+&FqOaB20f#~u&Y05!r6Y|yq_5;n=0Tu`oMj6Kmk_ZZBT9+-Z1(gFFDmV*pkt< z|Fa*@+h_e1)y9EyEhKHi#zKu$1P%p~e=PoB+cOM{v+M0c=Kk89pzV`vc~`HK_C0J| z;pBGvnIX^`qGZDRD71?3hfkWYI>=ZIupT4DdfcpP*){o+snrS)QTrPt+|9=!6*dU2 z$0+9yTlm6jCxI2a-dOpc*V-Xm+l#@`+ncRq^=lmei1PosHk#okPm-LB`X2CR<{S4U z6G%G8PB>J|o()>s&Q|^)oXb2K(|dt%Som?(J^NLggdDt&u9A2X%x2r5JrxPvB3=H& z24DL({HO-+9=^P+C*c)^pnb0u4<~jSch<+VzH-3f%XdK)h}mpGBsac3gpGPV-^=ZXjx{UOiz62GKL^y;88gWK@I> z^*fhiIXu%$Fd)^?yey8JbQ&y2$+@myK~IFnzZlu?mnM^sO)#qJ&hQ53#FV@SiV8Kr zrzlkKypoB(D60n^YcX1mAsS)mJzTuT-FJkz3$~TYHLqXTWrAofbI!r6XBP@`wN))| zcv6IO#~quAYMa<(eyf_+X>Tl2t=sDflvf0p{~%zSyCD5>ZN})8ZVFbH`1nwde9C`* zvfe8u^)B$MNpma^&z>KpIAXTbtW<%Bbbb06Yhy-wz-d^faqXVMf|Gr{tIeCE3Jcd1 zL2_Wl;%tGa3ZKwshcEt&?KD7Kz?;pTpKAcVC@8;kO_#2)99_0Ixpx9t$$2*0Qs#^I z$yYmkSRLw9oS$yzF#+f}i$7O~CzS<38(gY|q&<)dj0H-Z;Lp_f)r+%k(m7tQU7~}F zW4qL=^3$sEN=ZCpgB^W{5<5kLwVQj?s+_~?e57$M%cBFuhK(MYA(WhR>oL%DpC{s& z;cf%0L?pOR zj4BE>3%QQE>#Guy`J&0h&FvBn{r`1R^PkgQ(wyLQmfdbx5S0jkj4=yw2zjFIf6!Yw zC*BPGh#rLYnYro&=#u#A-37Z%)!NZw=d#8t=YqwOGYELpZh1^VgxLWGA!Qk;_U9r+({t5q5#(*W* zC18u-0J@CileSxCmA7w%zOyI2(o%X)G0lM^*O*o11%0`RuB}w6=m{#J@pA|@Hzqr7 zbu@*-&k7aF-4sm2Z9Sb4V2M1U*UtNqQK51%U2w{)JB$SwYZwlfXd`L~*|kXNT$^Y2 zvD3b^#$&tdx3X@kkypEI7`hAuQVSvcwO#j}10O2a0FW-oRqn_a_ug7pOvB-7W+)d=XX&Ok zRK-T_!NtA@=N*DORj;F{+MZv&sOx-Jk^p?w%D?lknau937-W7I@-pCPFY8P5*wg1* zI8BoA$t#rmgE@+F?%g5vVzX|I*V~v*0CXhVd!u%%qifa6#^DO-hehtjg#{^Cl!G3` zbA79Gcvw53THe!E?lMC$0Q#=EV0w7_G`bDn4p_XFfh_#~mxkvOfvr@s&|+=qRuPnn zOT!EjcSqMrPpQy#!lqzSb1_UR7=+mZ^hC9&gVaIgQ6)cYE5)uMgNEO7`PLnuOFzZ| zalKuui|Ms{Cn_K8gHEG8Ib6xE;Pp1Bb;LW(HczMCxd-0b3#2sH_& zD5+i2-J6e;D4;g-VhY*o(J3E(lVI@Spag}@cHyZhC?vCEZ_*%`H~ZM9}? zjA{GmJ&t(K2D7g-kLdo=yMICO^U@qwh}TDy?dTwRT9UW0R_5?TAjqHkAl2k zNR{7ySj~8)J9NsSlMd$YdB$DV=#IboP?ycbPJkY(RebHj)d{CQZl{Y55nnadhZl<- zJ4o2IzT`2hBwNEa#w*p44mk940zTB^MkjP7BA=4oP{7bu>XwWTdc4X7*1c9rWwS zq+M3hi%{34yZGookHL^6Bzi`kAXO74;&lFH#6eDip+>nWOe0Z~k&dmJyjKU_^J36> z;m7lbTEkVgCyz<{6_a&$zUO_$qa*}@@=dQ0sI%|)xcWJf%*|kN8_k754D?w`eJLo7 zm|3zQ;zgu~z)yD+gkh^6k6jqK|j_Bnq~}@gH@^_W&1A zTSrI7@cW9~+0y&v*Yopfh7B(O`pw_fNB_-HXKU`>&;@G!&oB2#{cEJJ2)Ct0jN6V^ z$dl5F-0gaKK3=ZKpgw}l#gMgAB+2uqhRimYxCOW%(r_Nv=Q#?Rd*6OUtI+(6XcVRn zf%Hk>l}NA!LgI9oMs2-!R_I%v=v~aPx)h%|a;>B}gZMg!`4xvg_1s6rhB{wcZo0Ku zsB9%T@*lqgJ(Z?e1_qn-AtH#pftOcy+j zh7S+OF=SQQA`2`>@-V@}AWG)Y2->bEQR|a!=>r_Wi~TT-+7dkZu{!jmXuQ%1Dl*$f zF>C>aSPo)I(!cnkb=~r?Q=WbLgx%mgAnvCE#2;9GzsC5>FTZD(UaqKcMC(2a?Fgab zOx8#(rNV+-c;#zFmga0r;BZC|J&luhZs*jiYnNw!m!>1nrM&-4l9Em1{x`}2sR!}F zI09~o@5Z~7(RZ@z)gz@|Ef(I15PD2GKp?|DvO~;$;&XavW-?X+ER9x6Td%}IdFnRq zg=F2}%KC9r_>X1%j_h~hIj?X1qIilI%|4ADxT5Nw@Xs-$HS)1u4(cxq_eM=3H5KDI zo*Qhe%`Le1fBwE2p<9`1f{FTX$Hn~XPb3X*zZ-7s5aSv7vMu{i-_V1nhve)(-_k#S zKUlOTO>jeifxPB0w<}`hb&5e^UYfXxX)$c3)WS*e*E9WpTMCDPzg=)RT$F#9e({1I za6f$(XV>s>$uQk$-=EE};cb|N*e}i~d-pF6Y<9c^0zs?4=iSn2q6QpB)N-`n81RPO z@(C^w7pS4))ZC`Vlz`63u^AP9^VehkzfB6i1{E+Z8&2ORO8kWh^S@mTe)N+BuaV^a zN58-EWZ;r*5itDQM-;D4|K9ZkSHb>6&P;gwzu(zk_~geLxcTli)!@TBf8n?P`@6j) z1Kyov;*H>cF>Jqn9>x+1aPuV+OiPh}?|O3Z?g)%uG5)=Q`}e^Ad*J{3l>YT6|DUgT zp@n*bdOXqfn7gnHGc#o7o)Od7kpUD6BuAe!J zxUdkWUmTYAulAC&i2L9y;%>GYkNi49{i{FWfA;Q#*gxF;TM_d|>siEYB@5E|n-TYH zx0?BKBj)c7+`k9@-vj@5raj}oSN#7kSNs)A0xR+9j3*qpr$vWeX8KnaVdAnbMfwVH zqV@KZ_QvVij9k{4VUg#zullJQY6PpUoEimHbJfS3i?0-qJ) zL;{Y(e3L8D~VcV_WK|PwIB8uj7 z8G0_)o*IhUI>U*2Q>yPfAJ99rU47EO=ZjCsi4b18G2Pt+N|G}iDQQQJ-o1wH`R#2b zB(*iNP7^)0BQbAK`VGigZp%(e4T@mK$|I=)zX&??B=Z|Uyc{zs=E|w@yKq%Y^tj7v z7%%J@c$H0i`;)->6Svxgt4ndJo;H-OG#{Acy(K2kNJ(JQ5vJKER^5Xg9LSQRf6($3 zwCN2%H@ACo?GTj?eG*PH>p6W9_tCETM1|jCu!mYYi6^1pd%(q`G}Ono^Ut)hdc>C! zvz(Nd!^$r9Q56Ssag!`N^6Fp@QM8qLyBpsUfY4Wo&eK}Oo#Dx$?bwCh_pgruV4ELK zD|n%ikL{MsR$lW$d;sHVN%N@&OWNC4Gj|}v!kvs~)kHngU_%8~TUlO8#h-ZbtHVYaR78cj3kt~v!-oDu(PjDFXt>j3w#sY4-!--o=K<1MDHa+s?%eAGH zByKx4GE7J)ar-BWV#!Ha-?n-4OS8MibWyfxkq0&82YXjCKN^&8mh2uaZwhlUyOZV3+y8-_t$IXA9?_| zA+tsL0l{o=I=lYCYt2ea%@a4P$=^rtCmX(&-~CyJ!)}(FWVXz_T^n&Lq_wS?DpyBx z4(gP?j1W0JSoXt#b!#x3clByDqhAPRS7BP??Ut{J=hb(Ad}37TNO@{6Bc&fg7MXD2 z_RMKfEw{q1PhBf&(FCM}5_D%dp#uNHx{hkz3a@F>_r|(vLg`51gljog! zy|FG@U_RNXA*9c1R9c2-7vFsFIlXQ!qzuN7u~T7Ntbc=&Nq&i1FIOy(qrYkL7(H35 z&Tl=+_nq<-igaPdN99AlzuWpix{(i%{^tN{eavs?11->8+kYQzTq+{!`5{>XFW*;M zq+9u{8mZM=4qJ4Wx|C*6=gjM$HA%ixV%Vq~>bxOXiy5yhufx>+s6UyoYqnr=wwX67 zo}?_Wn-+&bNN))}Ez&86-ICbpxqXLm9Z}M(Ta8hkr{2(Mt+OoDvQe(-2&c(AB3%{U z;?mpqrx80cMhX3h5G^v6-q0>7)<=7?t_~m!J;Mvsq46SnL+WC!;Wr*CG#+mg_c{f~9MzY;m*%vJbaOsjzG=%d{aFf( z;`7!TK}WcXd$`)L z1Jgc(qbqsdEk=7ePO|tl$iG{BKIO&LL;24rw4@ioM~?3~Y{Ak6Nfo!Mr!P1>yQalZ z-8H#z`hr*B)T9?uo_}d}QdsoGdxu)4;BxB$KVip(OR83^rQZgd@sfKTw02`NX?o2n zFR<_2Bk8gEK)Ws5zO`CdQefOlZ-2ZU%UEnPam6&3k7v5X&dx=%mK}n#-*`rjVs4p# zK=e|pVZy(MLRX{SQS&upnt#oZg>+Auik|;_tD%~vDgsxn)#%0TxYFPD+o_FYa)=8d zVjjB!K1oz6GWWdR(Z6`Et))fe@_=Mcjdhx{{&UF_AM&>Ej`VGciV5O{uIrt~AYN1>6xIv%S2C0V z@Hek}dA!^#(!+j8n)%8L@lTIsSmxe|u8lNiz(a#)LegDwlZ7n2zOj>~zBJx`WS@6y z(hU?bk$Gn6-r`K}0X^JO%2%t!euXfR=S{)*X>WDiq*{ShHLhY0);bu@qiWDBao$1A zo>U5)nxq`oEkit4R;G|A$3VBH6Z!uT^_F2#c2WEHEnN~4f}|oKT>?XkAR#5)-I7DM z(j_1uozmT%64Kp0ba&U_yLq1Hf4pD#${aJ-Ue{XZ`km({=66mh?G!Lu!(a%GL8oQ~ zr$uo)?cR2m{LWi_6OV&v`o8b%-C08C8p)k>C)zdUvp9*0S>gKw2d;0+2o9NsIT(eW zw?#&XoA{KE%jbQ5hTQ-EY(~|I^wfTYdp=TU`7pRR8z7S_d%H|`Bx~K6!u^)hVV5@l zGEQkQo*p4ZsRXG_y*Ie``<|B?k@QV(zc#x4i7pI#wHRyj-tEqvKG<@;I;wFeGi+_^ z8Z7z4cO9#V)~H-PZdS*JFV(+VPvkOxU2b+8{*MTyio(lLd(d7&DNjzxVO#vwUT}tN zva0FTF$hvB(DgX1$#;O?U5)#e7o|9suM>5tywj7r+*{&gz(R7doM}C}*`yO8D{d30 zGA`gdFE*TSl(2T%h?3yDUS0l8H7x7jlSs?v{UBE3dio*;3mIB4k@kKU3XVFaEE2Ui z)&y)FQz?=ONBb7U@#NoKk|w4A74s3XBD1u(_<=gu@3PVbwuD=AFm-pi@6l)e zGE}!nfQ)Kb4VoT^x52Mt^|*b~mc<-LKa}Ut;<<~N%%rRcp?O}f6kUvQmUHCxN4j*+ zy_6XKX-(y*H+lOx{nTp=ZWIG5^RC(7`0mRUFT3y^Jn3+e@VW#+#(pCUQN=^hv&~YS zru=A{e5fB6hh#;_x|W{5de5%m8=DABST%&Pj!17)IZ*Wwc?MLZ*v^VA@jdQ>8JxmDUNL1;`#JAdt9EWx>IeC z@!D5Ac%N;ccWb7r8RDLRutQi%^vDpS)f-_nsqLZUvRZ4nqDoUq;YUx@WXzcD_U@w( z&3g3x?eHZhHBQdi$2t}snCm(7edz7ClA+1pUM#bUv4~{c=-&nQ_ z#}WFnGjS*E)dtp+ziaP2(i$(u$J1iy-lBSrs200^a=$%-WzhY*)p&Q+0&Wfa_HY{H zzCTnq6-4P}e8`HwmxobeII?H>*1nun+m#9X%r4_N(!rQhI7EQ;#dPIWQLc&K@>? zIDYL!Io`uJl91uF8t`(*<@k+C;|0yGG5g-UBkVZ8w@V}!h8jiSB1HwmEjNTDuCnmN zDsD=rWg^=L#FFlH^OB2|%VXPH9K@$5|G|A^w+Hg@D{m^W> z3O9d;tiELpUZaZO#|69H=ftCGtV+&q<8>COXgPneNYELyu9s$XygOgK>v{MWI-T`7&5ve(4N~FAlm4XZW;rjbdw*39ShYIemm zIUNbAKq7DVK|~wjg8=!HDBmISSwtH`hBK*tmPIwZukmz4+jDZhj+8>ZwurBbkT*#e z7?^D`vBxgUZt+BY);!>oS5xIgkn-(Lbs*O~RYqs#My2k+Ujm0iQ2uzA$_v(#7U{Ad1faF6)itDHRVg6qj$=eczfk z&;G^VY^g{KgR?i=*c6^8&z7XLKL4Jm(yU49rktMAN9Au({(o5jCeK-{M6`pj4=0Cs z)VQH^=kge9xpIkQ6BZcIy^TIx9f_mo!YION%BF^!C!EA z`6>VYNSFBbWSGKv_P1$>0F%k0v=;A!;Xb!@$opxA>LGk&?Km-r))`jNy3dQS;yMsN zr@FNEqW<6AewjPTaTB?8q`VNXnuuMRluHIq!gei{O-ZfxQro|be2M91w+NM~eEfTC ztEJvFQP@ydNMyJAKIWU?&KjFA#~rPY8rCHP28vt1=pzg8T;G zzmIRJ=sMinTE_b??pI=I;gVFsxa9~HpR5`3r$}91Nkh3|xZRs-me8x#4x&;M2HgQq za8Ror$t)}bNaHM{?FOWxPI-)Q) zVU}c6r#_6H4Wj0Dx7Ec+xUKAdMZ+xrG`k#AL`dCir)im@OqqDzOXH$KmjPL6QR$nA zf8^r>Cd?uAKR?}K?{O)*wI2Th3gV(XWfe5>C8#4|U)v2%KRoA5W?^{1#a_7syH7=& z)|b<#1nhp%+&?_RL|m}^|A2LAFNw^*t$o|cZBpSG!VLs4?1mO9$qP<-6>PIJBm%qE zsN0P=>)dUO?wfsr)JUZ%`n;4i5+S8DZpw1V1(>cAJX65^F3|V``)wMHG~VN`*L2)c(^g!a{Dbai0%7S?=eylImwE}2 z6U?AM^0@O;hpQIJEx4GbN52&g|G|m>2x{9U;9H8yTJOZa znO!BpW_Is%_50}R$d@M^nYw&NXa)Igwui~-TVASn6LMaj9P(dIP7J!>v=O3dIUg@u z+ofcD6FucCIB<~NzNR2@!C14G3{+^{b^(-y63?Pt3!Y_r^wCQDHR*>CB&co6W$oS+ zqarGJ6*qn@D6w^G0K~opBh&t10ez&r;wLA!REzjzU&vyZ7ixA!g~zpSL_My-5lFXe z+|BLBP&G_?Z-K(PWTC1jzY(lX4oKDi9d!VgJCaZ`(*|T7b4Ew&A$k`b7x3^pN-TA5 zY3BkUi7Yh?HRZTk%gaE3R4(W0b3|dfR06!Re#Z@$&EG-df7O%X4V3021YaFj zI=z!9p7olq?s06oL<2v6Bv6CkUcB7>BVONO|G;%aMclUYnGr%;=0;LfQL_wr;jv0; zJ@*0b2Ow6e0OO^Q3L&Gj_=;JGK!$9c$-v9T60O-ugQ)98qOSI@ePQ$C>5d`A@HrnH zxA1sEo`&Y%u;>E)EuyYD*UdjW%$h&Y6-*OYew@AgaUHX%Fs$_4&rHrLs>QECphFC? zx@v@PpNS{V)5S9*PNEiO*Hp?$o*TgB{D$D}eKT1<`Q$=zr^~_bpzb8m>U*2EmO#Nx zD-*qvb$Vy3CHARDnuqSkyM1|`>mhndJi)tAZtMB%1)8?K%2OmGg@W%pRsCJZI*UZ$ z_3ntEh#S;vk}lHRW!19#fuQR*w%;j2@fUsa?lrjyqj#{~#;YPmH7#3}0z` zqi2=hFr3C*OI1sf3W1mB)HmA^`x4&jb~KQqmpBl9q_{W4zLzM}RlKIeY=8#|-N z9D3c37RbN08&uDenXnZebY#OLutwew-G7Y3tTutDCyP`5aB>xQRV}DzgV~l{a9#?z z=6n)<(JPPCjHGK*1ie)t-geS65QvxgYk6J^LxSo~uUh4Hs1nY*p7(PPnkd4Lc0UmQ z-HfA>(_H&QHKQay{A-u^!y&7c)YxHe}^s0O4zH|R#fta$=#`{3LI21*1o)k zpY~8r)y$|WE;*XK8pf{7M5q?U{QgB@fzlVkD~fY_R{8Zce-K{x;R|Ld&^zy3{GsZv zwwPENGyg5`Bxw0h4%Usr@+aD`szx8WJHBmL+T&2HRu)mZqIOBpt#OLNlEP)#$Q=Ur zDfn?TTfxXsYQI@J>K9vb`N5~I{5heFz0fPSXG*ix}L1~%=?Z&BxVC+dU5>oDwX;k zr!n?7&FpMWRt(r>PDyv?ikc|OnUAug%;{9X3{Be;PP!e;V$4_m$6-X!g#OioPT06PeUg^#5C zSEhAvKbuP7qG6oY)qQGTxxrx`L-0M23a9+siC)S?G%Y#J=vXd?iE$2(+6@k;QFN|x zw+t>dY(EZEK=_P%KNw9frD8`+z*{{2CPt*gb4^V)(`l4ij(Es1p$2iss#pO6TXdLE zT*(jmem>BB)iPajb=>QNsu4>|=g0(UcpQ7Dlsdl0SziDx8DI71_|~oiv$OP)B7^eW zPhne$GW}iQa)OSU{kBCl?pF9cJXucrO|sj1Ajt1WUCQYKD<%5cPdCd$l>10QZV}A) zp$0P$dhIzqqBX0>V)GNkmh8{l!%J2~sm#UFhTOJcL?zYc8;mrclAXV_qo?TSIc$3n z?vahJx=hzY#r~s*9}4ZI<$K&Vh=(U7m;|wxL&1bf9OlApRiL=N$1Q`egF{NJBK|wY zX5os0l<8}-jTgFKVglKF%W3?G8kWTWx@w-2PIIn7b|c|zuJe$wXW@6vHRcxZihz?G z=sHRvuoRCk90axFU%qX+pzh*)xliNX{lo3;9tjnDwKSj8Uc&U?RRXExIAZDrz?{bi z5L&*P&gB`l3--O`B+|y#l}&-L6kL7!Dbsi3=ik$`45(CC25VdMN5Qk+EcRpMd1P1C zhyIUR-m8LX#qrQUnjd-Tx-anMml|sv?;A?Z7UoPb;$G@MpVmp2a6@7*skunptt5ek z&)4iHdS;OSidE^Tz+m^fBY;U+>%y>3lKC!A^QGPap_~gwv*^O$3%R(SELp&fC*`qJ zbymcmJaF<}VC&CfTb7TL;FW)emB!5-C()0ER%Z(v!}_XLraOeSw|$i@b}wLW#0X1o z)};^;yV{76cME;_2jg;MZ$5LRg2jjDj((vwTt*D1UwQiXCYQk_>neZvG1nal-CnFN z8y;FFR>K9?xRcgi)QI%ffbG9Pq|GLn(*>PRIa)n3G?=H?7+1@d4;L9~yO+ZNx^FYw ztOxt23zX z8gj(vRvj&T@9k#b=EO%CiZy| zUd4yk;N0QarlTMs+Z($*?nP;=4Ya|B&wu1skQK<@D}p7Hg$;9C$lAjZNgpd3o36dhuN#H&hb5+t3ppK5g_gJboJry~5@}k#ko*VB$zXNOXvD}J z^Ag3J#s=#_=VU%EiI!v2*ZhJ@YXoz@HI*=1tRzm?{>txlU&}|RL;@50o=<`W?H+hr zHpG+p@a|~YG|X*$dW^Y#13&c63{4{FnzMYUH5t&&C8ZM-kySCe9`O2?L0j}YlmsKoYTkxkxukHD6|Y6en1Q7fpC9L#^BLJQE=oy~*>3MtU5Gl}Xt$+7iRJ2(b|pPY3G3MD*&vE6L3g=I%g?Wg(rqT@*zf#CU5R{l?$>RtA@D|Jl*AayWBv=fRq0 z&0f-s< zzK}L-3sSQ$A}P-)9rsmebF}n)?Y?ydM~Q0_vQp;XhkI0iW2g@E z<3l6MzTw(&+6{ET5ntga9bvF#*bU&)k7&F=q!#Cu*16rx(WuJReCsSRXaNcj678nQ z;LdhAlX4HpLY0?KV3P{FNWT);zWQYnb+1j_GoHy*+?VEN%5Kz-53RG9&}9AORAk0+ zmh9tEW3BE23pzNL{Dd3SRpB_%5fCOTbdnrU6UwHozNtX`q!Fdiy+V=i>OOndL2#*! zwpJ`k+5{GLxyN#Hh1J_FwM>!r`Tm#QYxffsM%(XBKgu!idYBhwgda2PF(+%69jP$ZC|vb_ zX|{R>v>#7h`{O6zE)}~;<9Ex2a-x=gi&M&UuHUcxf^&F{b!ciiw4C~1cdNbuiA`6q zflrSLn}*GyP}KaCnqJOx(TAC?sM7yFZ=tTgGWYwFS1B^?8t_iu%LYrWq#fs8=9V^we%u=^;T+i#z9jkmsweaWp6w zn$Xq3E{WgZoWs3s(AU1~k_dVJTBTtutL*T{&`TF;S>vdQ;~&-FG!n1HS9Q+MbRZ6Q zsegpu@0fgU<&KlH`w^O4V!oYAC8t{b6Gxr2oynFJwS7EZ&zr>QD;R9ZE2JxZAzO&xs#wA=t{I5gNk@>j>O~e$f_E+$Aq9S6mVMdTJ;M0l~N0^v=!Zh3X z04Wv8X+s!{-On)-zPHM%6=xeF(HHH6L*X8$TCEr}TJgnH*yMS*S+g2SxH7bW9lf&UHBfK5=JNHNIUOC0(CgbNN{T$aZ3<Et1P@2{&J zdol9D?8o)hiu9-h)*S{9%huWkHitZ1jEr|Kf@N)#I>L)`prtIo7)i%z=HdA zXB4lSR5k6SDQufVrIoVS7cytDUK{bB53-noeF3UKvMYJaX_`*uc5wz(9TLSwdWXF1kagE9T#GzIw4bK5Qn2hZDi9&B|U+u_;qGBt7V zr)`Xo`-|WO!g1*_O0SDCg=Jj%672?tejLtpI$8I{o4#5Z-ScY16SyDgPGh_{gK3 z&h`D^udv!-mBPse)4*z3$^|yp$KrI+!}-fp^L=w^Slyz1SnZawBy0S6s=MiKm4a2S zfRKU~4x|&E5=EX>MB5L=spfClxOjGeb4Yc5_#I-JLM}ONc+183z}@4^Zgp#MQk&WG z*X;P-#x1Pok88}kWbY%4I}GM(+x>zdsu`Bk7kZieiLOfq-$tc7;ib%NS~Ep1dO_6v z_0-srx*V74Ey|@cXL+>|0j?+fAgCbDUFz(766_QTbtkNw!ESl(?FElnv4;56=_r5g zL!~#+ErHkx%l>A$y?=KD%k{RUJ9s@<4>cM*vzx6O_C7zTDCsnGg8`*3gNVts-y-j| zOfE9CT&UG?*MXM*-8II3V|7g>+A0Z>x$ZEw`r2Z(MI^tqx!usP_hMo&-9lXf#@Ysz zohidklO>vJP*VfaPWF6nSTLuqN#5#L0veUGrqktgS>e1WLr zeWitR6O5e5$j{^IVZ^_kXBz+*P*ka1;>>-(0}U2iQLBa8OPDUW>%x!F!kk@8G%p8D z8m7b@w(-_q636%#W%u}jIQTtm!-yOcSl~Q@M`z%3fleuT-gd(4_}JLZ9}VCo=EsJ` zC}8ko`*}->Glwtahv4Bdx!d*{GwauT^;2t?G8?~~w&dlxjt1+xDee?B>eLpG>%A%H zUb6iQ>s?+4(@*FH%rfKRAtHOPt}3UNegzGDdPqu>$Hq)P%7$G zvG*WlnAr#jKs)U=KNG2L9(qgi97k!3x7bhcrL;sL4t-7`=;E>-m(+0`L%vcqs-=6$ zkS)im__iAsI@!!1fQIEGckD$Q4PDHWmv_3H+AJ^9|6QO`9!fgJOUgF*1A|9Bt2lkfFTFlTCgx1^Kuw+TxBE<(qyY zhF$I!Vzu!Hf#G+;bLC7i^q5>YT|l&YWpbDY{s}-<4#20?Nwfd+r&TSO zY~Xc<0#?xV^6*AYxRhOdxE%0(RT7tIelj&kM7wAMO6b1H$0mp$^I6^3PFL#~H(@Be zH!ACL1!j*Q+k}skwsT{fm0qMH-flVCtDVv<%)8@nJJX3HOiaVw$r+Ht&X2n>#fEba z@A|-xBDNZxC`H>AHjyhxRJD<4)qCreH}PTD14z}7_ueHmQqAcJ1+@HzqZ*;&O^50o zAGrFT?`HRoNp~UQIVW?Ir?_~Q2L;rFj+sN);Ap}2tLk(+V=!iwXI(0ntN5)&pYq1c zp>dGZ`IgHbU-#h=)$r-bDh?#hGMnlMd?bzmm5iee=kQlkGjS94T}(&k2IN{IBA^>7 ze@`K%Z&#l7G&8*t;cbl(4a8!DVWbQUr)9=FdFZ<24t4$}G7#Lw+L0DXX^JpJIKICI z{xaSH)5bR0@-#ntiX%Y0h%*aU9NU{IRc`!CoU4xLX-x$RR`xm5cEMB)}~)&I!E&u zRxQP$ujTDm7Bugrxyt%}S=3Sl@>T1Me{$Yf=M_HdC5|DqfcGqMqi40g9Z(&R<@h7^ zl1b3=+x_{r@T#R0w@O->#eSLJmBzQrst6j?@BA(iy4~DZX*MF-wk?y=w`Xe71pUl~ z>0FG9G{h!`2}tFRyNXqdEvZz7BcfK01I)2CKhH3>wTwm#7*PvzzBPNz*+jf#DRzjV z72FCY8>2|fY?HpJHJz6lKoDUbx1NwjuQ7*4_Psg_rID8xy4}bx?r;1(^HvBn>u(-f z6(WVFYks93l$}(n{>;a|2Yd&yPnnk<2Ys+#J^~ScRNO-Z*kN~7sIPK8!@9Obb*_lK zmaD}#uqZVh+Jl(%3t9@YsEDK-T<5Engc5PoazOL$S@AdV&VKdZ?>ezEMZQs1Tm`_x zRrq{8G|T;VQ%y!^##_(o-7kr$OR?q3`1tTW*6M8y3XK;P3(BCdzd<&xj2y{5IqgZ` z5o!wFOf4?Hfg@Pfm~1|+f~8Ho=DRoRo>zU7TE-c7(89bN@LLd8Njq|Nl|e{+11fh6 zop2$u@ulOu8sK!Ut@&Y&N?mN<)2XmyWWc0wD3f;8-_~^K&u0I|xS4Mq%)UIKX zH@H*G0f`yGxlV;xghNNd$E8N6HB*(^zhkN&`H}E1*eRaHmx6!%Ciaf*Yo!J_Sus&a z|27;3`$L(N^&dLrl*N>F#o1TVdud|iGND_y=Pw~e!A{4^pL86Cxa7`6&Pb>458oEL zaKMFntLx~Q3y8izCm?t{gCpXT2{BU_cz;F~$E3hxB z5-ai;l?s$~f-wjCsY|io{F0&bAM`trlR&B9w?DPHc zO(6K%pNL?8?iHcDrRax+5Oj&W3lgEM(`p`>$9PyDa*v*&Irq-Aw~ZHLWGZg$y(wo! zrtltIntAKMlz7M0rBAqawLl%N20FFbW3xiv`sbv}oNY7-q!OsU@i8M5oY~FAMy@*X z5qcbcTxTsGlAd<8?Qa4X`os&!GDIPR_H}V&r#2(0{95Etv1$`s|Ef2b2Oj8a`;H`p?0sBdoX92##8;wqr#RB59eGf0?Ux+HWbg1L6 zjsG2gn<0_bDgQDJ5?NZRKN_DQ@RjR%ImP1HuP?j_*a8HGGpLL*F~0R;Gotz2=A+*e z^ucD~?k?oMc=EB;EV5ME8d|)&_R*6cM3UhtVM~f=%OJ4eG*h4Y$Cs}7V3$x8iUv8m zpM5rA{(fY3#>{_W-P20guf-_j9p|G+Xy;}EN#q#9s9(G^Dtm_GxHsR&XKx?O^m3wr zwXdB-xI&P&;m;X|b}WNRrm2K9QNbCwHzdy+r;Sa4kkvb`;yum!-`14;|Jt$qIi~~K z@tvTSOBgZPRKAz?_93NLLaNm*rI>rD(!J%g<(_WFw^SbFC?Cpy&0fy;i;i)IYTAw* z#?MFBr@mh!`Lh{9qcjl6r0+z7=kzXqs~f$ICOVS^WB0=qVQ;NM^NxYji)p%5{qP5X z$OWTEz$}WIj-g_IK|iMatL7P~aw^r}aw%qjiMfFxE$`Ijx=3G9aUwU$MM#(2x&K!I zK%gXbH|yAGE8Ytl-@c<`l?t=cC0dIrh7Zr3y@*jbu-`aI=H*-nMmEu6QW064(euhF z`3JK;B2u07xHw4LN|j(45p(lTP;wJ)u?gN(@c+ypph4l*qJY=-WWvSloGIFLUt8-} zDat~~q)^NfchZqtdOP8Sizeg2&t%Q9+&Cw>ruihZ8$55OxU8KjupyT$l5a?5n zG@wWDPqG2GUTI)8r(=x#iQ=R}_y6#rwqbNT^b`b(+$AL_j7Y%3JZMN#n(2M0hUL(z zG`ilFeWXjxUa--FTq>qG$a_ZPdyUqLlP(POu@kmcke2OtoW-pB+F0&#KesT?)T$c> z&f-k>X1pRsFOzOAK(iEn8HtCiA=68ad`8k%uDsyB;B#m z7gE;6`8qfzCg`F2jkO+jw;JHHO4QKL(!cM<+TPFrNdxD#Rag3cHVAM*X}OO zB=}u(KN|6|_91P|1xfV#cDS9f{EX6n!(f5vVerQvbb#S#_V8JGPKXuET^vK`SB?a& zy0CgInLqdz={1l$Uk${(uS^uP&xU0%*G7Mq$4UG|olCSjCvZ&uu_EcLuf<5JyQ|NJ zA|2N;_qk3Da$_L=BF_5m@&0(IvWPpyO>LUQnfHCp_kBgz%_OI4QW;>H7I6#@8-OQ!?V42Y{Kr zj;INIk`6~o!v=kw5V9MnOV}iHIWz}<*3yo(AVE2pt|q*Ml?b~U&V58KfDb`;MEs$T zDLM3<_@HVU>6E#vQXL%^HQbk`w{jxdV%g>9tA=KynB+Ar?oMg|obn-Z-Bzy? z{DKS}cc(2T6u3V)-{RMhVBL1zPL&-RUw$Ice<_ta|DYjU+RP;Al6u{IpZAzdq^`AZ zb&3RaMafsX<#qhdGvj1Y7vl@L=5_Pj&B<{hSsZiFAOuJFC#E#!UBli~QJ^lY^369b zv+7IH2C)*thmiz!*?6P89kZ(xBJS{sxO}4$uh++wa){2<64=*GIsT8Jfipm>?-3^vwKrb)W`hDWd0qR8qVga13vv=FfAxoleks>Bq zJhiVEVM6=qFY9hA1P~-N@`C=31wP*|0+BAgRmUuNE*g2zI^h>Oj3apyZN~fO!6Rj! zzbtMKgHAoJ!;&B0;-WkQ%5lukv066(k;ze#23uP{3~SnH>2BiRb9Gbps8 z@Xd_VpO<=Ej8#O4AgbU0^+!^FCXqU5xholLN6KSU2%uq;=u1+%pKcfql)G8j;qEA8 z^Lyr1O{ppQVIEK|&YD#?ZPUVe;&P_9^NbF&?Bmao1FIjR-L>deLP9qyh)X;ZLu7`E zTi>6-hrGRCbivOCSaVC+O#0u)xIBn@6z-6iL3->_FH4o@n?)v~+DPh7aT0M32JWh1 z#2*W9KRt-RZ2bck^o~W}Iz58zjOd@e?C2>@DFw14?lO%qOm{tz&RfAT$1<0mWOflU z=B`g!5p?Qd*d!0V7)wuXnZl7!qU&CDsszzl#CFhf4a5!AIK8&T@2;Q0mwQKC<%#x? z69C~2Mtv4rk_|$2MMo}dBUTi!t2V&Wo=IV?&Of601Gc{O>0|=CQBBA{6WVuiwS2c5 z9}Ari-R16_ab+~MPAq@)UPLd&lkXi_MwLy-@lPF+??6b}6~L3JeMaC{q#Gx6;`s%y z?m47wFqb;`+#{EpSy?FFk`V)t*`y}cqiOLAGGL1usKfUuH^}Fcx_14=cv#Ktv9wWf z33&a=$C}w-to@X~PmV(*Ik|S|Yfj%$_PSY>c*s1v2Y7*#Tw-)jk17Q( zuD@abbA81-0iBuAAgY!%(IToGZN(7U@^cZqk_0Sc-ta6CtOkpr3BJGpvvP4l~ z&-jDK=QBUKFvi*&X|+I=)`7}xeI1}HM(%t+Ty7~cBKhoGN|End50s3;LKSM*D>S{W zVIBRSvx#G(C^#xbs$~keCPq>k!Rw20N;y<=)6z76!u?(1@!_*(aMkqK;tU;MoS#{K zGMU5R*j6;`8*H6vDms=>Plgd0dbW`C?y!6~%jP@Z$B{H06%eu+YH>-;iD>rDf`ly1b0i*3- zRs>&v+SyX>0Dc?>m7(Wh&A+=`AN+eIT)J;X1{hWc&j%VQ9vFVH=Gjc*i&3ct(9}H~ zu&o68*zVhFVFfuVfns5ekbF#~b=`uD*L65sA1I?M(XU%$E`=XOFl;XY$8>ICh>*RjxEoNJjm>OUtj$?<|Y@>R))> zm&-o)kSE|7iTOZ`WGtMc6N`v%52X8Jb?0s|+Y4q%AgqA^DeVfR)tp&b-8TJprMm@z zzpf1n`IXCPVK-0eNb_Xzo^ibX9`cJ%t4*dQ-UIBN%`Cy4H~TU2LOI^In=|=*2;C?K z2IO!|gL>uhs2DpybQXsaFs~&O3RG#?whX06BsZ%a2h-TXFrx+VF5u$T_vc=cXSKvB zNLTzmj>tNS#PRWBVE!5)AEuylXH)%P-nptv*ci z{!iznSl z6I=!iIe9AI9bNlfG&DJ@s%D~48<>6)n10T+|Brfkkd=Sshig0w$R_>Hl6XDQ^5ez0 z6_HmGje98jff&-}-WxJg;QZ4=QZL$yC0j?f2P-f&|mj$tiXnAzP!5 zs?eV=k)f5p&Hui;5?WjUacKpg!qT{xqw)D9M->oeT0CAl4ROsRcwM(q`9ic~hL4w0 zO^JK1l9RvzpL`*%$sGYw%M{kTKQUdoIxWxH^;v;P(1Jj0asw80LtZYW%_b%fY+8}U zd<7g{#L=sUsS+(MoN=DuyXk>vzj9?scN;a0Z_PbqX=_!#;Eps}&+g8PyqX+4a5;;T z=zd4G03$2T(b%`K2~&XIUCvBjEWe+N-YnryS2T{X0LsXXC>>?LWAN?D+EDZo%olQR zU2BMYPg>)|Pw$Rm`D<+Gs)iGo{{pxF=r$wHeaAKR>I=O#8&27!Pm*Y96!CL+ zZ>m|%D2`_O^D)k;L#|I3~h4uD${t+tu8o4QQ>GJ#&7AsLPTG4A{#C;LQk z!jz-c_NbG0xb%Oj?S_TfF6%?<=8N@OnNKS~00kCNZ@WF}r{)A%rft(Ei{s@96%dM| z<*wit11tksXo<>hl>kJpVspExnbGBpBNe{eSRRsN+3(<&{%mU8g?T=5CUSi>tw7vc^6x=LzYiMUP;nNVJw ztcr$neb$aAJ*t_RHh(tvC5E6LDpELKpEBXGV@1=C9&&wz-7ai89( zM<{!kcl+SW@#`gkQPKXS_G8vsHbF|)l}607M=Z#uzUK#-x%e|5DbI|1s{M*Fehmt~ zJ@=+whTjUkbfgxL8Ei{VyTiP4oegyJv&e zA}MfLx;xo<54kP)FKIRXY1Ib%F~4`*at(q<^+tk2xdUu{N2t3AoeqeYkg{%t+2g!} zA3r~qiCs_U$qPUE=MwdCiDt_5SWTzxfJC2~_|;7TmyX|~{KpZw%j89i_1xH7vw{KJ zq$W`jaZ}WW6&_c+gXjVtsn#yqYXR+U!nkeAD1Xka_+^#~XF;v5?67{y8>rGDk zdkxtQ)v7U)*`f6vb^sFr3~_^JZ42tXP%*fUj`jLwOIL2HzCq`flL*-veSooAa1P`^ zaQ5&K5UQKZ2XCJzQmVUq13QbA>ZQ!Rs%ePGWCbaAao)c-F_tO+-8+xt*)^>9TR;0q zyC??T?|72@poRACFsC0MaX;ag0lDd*s2DZ>z?0W(s&uJ^w)c^s%X+z4 zBTRKZ(bM%g*_?-@OIYpdxw|vG`4%(fDlQzK=#5S5t+sxy{ZRaAUWrt#lEw z>x;NueeYjXoa`->!x4|1-^THMUA&Ro_UiFyHR$x94N*phb_NM&087qP*C}uc=AREU z#=aZy88*ul_}J0cn(m!w;zsZ~1_SVBW445MMldfIzfwjP9$(*`s?P>5peoi*@D5B? zyL65orbvMb;t`-IyI7SIBSxL^A2%EAStS@V>bx0~074$yeZ22QQ{NM+<(H?o2yiks zv|jJe^q)l+D{)XRDP`&_rd@2$@Vca=)o=vt+Kyd_PY246S~qH~`5}h9yzCTC`c2sk z-RB^?{b&OnYC7I9VCa&jB&Egi$GPc=Ug|0WY2xRpy%yssn79C!+uijK{LKSfJ57hBqooG3*QFNt)(Sbgsrg(hQ6a7$ zpT|f=e(OVv@Wpt#A|j_GRnS1uVdThrJJqEoAg9-MQ1Bg1l^Ll`;)||6pFMD6i0d%X zJOF$9{v$MZd>`)SnnLaA7=}8&nF!9*Tswth!MO3VJiT!NK=>0y-nwl~3oY9UysX!# zsuUzG0()G8wse4$>HGexU9jP~ekSdnpyG0~8kB5wEAMUgG^}9z({W7USV~M6H&_2a z8whu$wJW!usI@lcwds3yU%4(6OeLpFxpB?X8Ps6QKUP@@@^i90h1HRv1My{Fa0J-; zo0CrCpykJ;2obNiw8MNycxH^x(g_LPzS#6nL@oahf8W?kd^o>$dx^;Q+>Z)-x*Ztg z_9hi5yj`n1icRWb5USx7#)T!68Td|HgBGU$;U4=towS{H?_Wkw%WJYaP~s|*f#8IQ z^hGI^KQ!y*_m$qtxq~^4SuPoZgjLp)`5_fuTSq_YE={rgGuA1wH$!W_>-?V|yBl`) zMfef?Ml<}Cy(nc zE1Rli-m=0Ay;T;#Y*U+-ScR0YcH;>DSr*Hvwo8+|&F@4?vb{4#O}Mu{E|oSeyzFg* zwpMHVPnDGw{<{x__$8w%lWrV`JxlB}2i`9`6o0Uiq_n(WdxGMiuCcesd)88v`cFo6h=y; zypNunKekQS#2c62gkSXYfLlP?-<3e znioGmeN3T&ELX?1^iKQ2xj9bq{iJ})(Em71jqi1Tg@QRUzGFu(lWO?-^rY*VI8_=I z_90YVr#f>wjyZY%N~cTTzl5jn>h)-^H-@^X@NRaDXGotEy^VCI{Zx=Hi2TJMWJqvj zqyKF=Lt!e+GcUk>PE6-dV`!X2{XmOK2X4hz-1KDvgtwt8abGk+Xn|dSy-Ry z%cM3Tl6>}xzh7!4vM{Yr7ply-F4+&rnl`Ap3B|%em1sT@%C=8$4n(@5dzxaV`N)n| zKd5tX&6!292)np9ya^V~+znnkYa+(Jrj*B}W3ImqSu;*B{%a?h?hE-646&5OAf%`Z zfcXCmM@Sg?n-Sp%JdHA}tM8{>Z_!_~Ipl~2$qg41UF)aCQ_1BTTstV?2GJPG3pMulQ<{i7`eN`DNnJp_OmljrDDO_wLq!_9RXo>O<@opH zkBFEs`GwnP&@Ht!#1$v;*w!+rm+KF5`UFrh{*i#zAW7;RNO5IcG zRr6+B0dG|2T)Hl)yd9J0oQK63E6&!k#Hs-foqc`JB+EYD+83%Fr_oxVu@)vE3u ztqkG-v(jKRjr66=W-rq&hzZxeJci9IW%a%cNh4D>f39AhJ-_|wQYF&m z`wv+<)uJ zT>{R?Ir<{0Jg~fvZ!!-Dg^sT1Z6$`jTko~-*!K);oO#Q`={P;e6~3vI<@M2VxppCs zH~AHN3)b$N7%N&ND0p(F4V!}l#MA@#fKj65Z~ozWq`b=cwnSGS;0BcAb({+vmZ!t* zDDC@{k^XDTl|2r{TY1YEEP5+o-226>53~hVnG05UMay#HMLPz2jj^xo6%VCpnC|nef3CS5B8=KWF*S5_l}9 zJ2XsnulSoB@Ei?Gn^_JXbHKWx;N;i!U#}Ov@1OBrZF03E9CBC^Vz~iI`UyW7&~WQv z4km@z{nV^7P3@~qK|64wtR$bRFhUKOQZM$~A!!qZ@ZNqsn1v2=WHr!Gh~C$F-OI@d zvMIDIEQ^koXxF~Szch9cdh)k|Aw;e%v`Ov&JJQJF01@uI)8^3hKpQOjsKEW4oI|E$ zJZ^pEq5H*9-}^_J&k~`c_=1sTZFir!EWPXcWH|;x?@febulZSr-eyEhjpEoX%F@ zkgP_9Z-aQ_>47C7b8Gyv)}b2VYcyH z*PnNS?|X;Se*f)1KcM*vjB?eN@e`@*GKX(D)%^tYca79N{rwW^jC!sV41t91ak;mS zcHitBcDCfpMkwvvC$hq<#l3$m#4#G=*o*|-eO$(~rA>LV>?q)F|8==O+F<%a0VvL3 zw*{ck1ur!-o%dI&I48M7aVADEFzl6Z>#XL&?(T)V8=kvJBvea=d^q18Q9#BZJNA!X zBcb1OK{!~5n%-Kp=-ss7cQnsqwpVu6OFMC$9lf6dp0#8*{s$QhW6HC z^;@uFLVF{bCKL-;rJj+los%B?+m(C)EZU8EwuFhw`S7t{hZ)#sl(NJpo%L71N#g%6j(; zrtNr({G(8FKPz0V95Sn%-gDl7gGw!6_e&F+5laHSFF%>WWi#q)pd%Ho}06L>eyp4`cH$D?5^^TXmwY`dN^ut9Q(s=)9vAx z_uQ61s%d()YXyXpxoPdaQ`a(Ws#lv4EB^Dy9+<_ahr z%Rl!)lvf{ndp?oz6S_K6j<$){nPyV+$?d%luR?!PF5$Ekd_EzZ#B+D4vtV{rS_6EaL0xu86M zPAo(2xC#%WiWwc+E{TWhS5%lEEtPF&ircSLfqz;*yR>VY`m?C^-11E+JMf41b}P&tP2xWzpMBMp`kJp^B!WUL(7j{> zVz~bxGuH~;fv6HUv0glJ7yl<#J2r>-Y$mzn+CkLk5>AZ-w$uR2wrgZ^v4VFY&F`S? zFv!NThd~ZW{dIDY^*cQHJq-2;P;84`<&Cp3kT86C`#etqy((&mbUU2aoTrDPTNVo+ z(IipZ7YUNll`ZcKbKEqw@NAj$7WZer5pIL# z+lK&3o-tOb;>lO<#;h9-6mnGahOxIM_88+WW3gRd73EU25c2^+dICtHGaAc~`FUDg zNldH$w6vdpFo9`68e#d{#CzoaGANL`{b%_uI&v#qmjb~|;0!pikMc9kZW=cD~PykTv) z7HeI{b`8nnf{A+v4G;M^%w9Zn!9bhUS48x*Xg;rE9>cL7FhkdCKJX8H{vy%kgxUy$ z21gJAL<|c9edhPIR|*SqvjH_HqDY14%=J1TwT)#w?=g~~j$(?B+AHA#qc#1;UA#8V z=CP+-<_&9Gh-Ei_m5wow2c0Nd!)sM5{B{K}7Tgn-qsDQG?12l|JU(eT%0-4zvDBf* zgoIQFb-3v4Go`>zzgUYL8%GzT&3CmL`Ap+^v0uH{g49pdjEXMxz_+^N=EzzQf|jKj z_#QfS3~b-;j~eTDB_n;@w>o>3zCmH1ttY|W;r3eWtah~$T z{2_*8=G|HQ@JZ*@a#M)IU72XmvWXC6jRf`!*z3IDv!PEW-Nw&S!lG4F{w4E-6bJTd zzkk4?r<4U&TmPk)rzC2|;>EnW^9wz@u6NfpxxK}u*7a9kjXa-hZ1g7*p7A>f_9(m5 zUoJ~k_nmyh#NOsie}6X55zJWfhsAhox>dy^aj^{rs8K&@Kj1?-Z<}m|*gt1K>4j=Q)n=-h%=jYEH0RNZP z$W{9-yX7S#wmA4B6H~b`H>UF2=CVgI`Uk>R z@|{8;ItxmgeLay~vFHKzmWr?f-uIZ-M93uBFX8FuL9zkc)zb(ttN-&Bjeqn@i6i+X z&~fR{<++~Vgg?Q-y+T}uJUY0{a+bWeHVS2aF`-!ujVHP+2f4*<=}sG(EnjEXQEhTy zt|`y~D--zv<*ebK6$O6UT^{&AQTyB zv-H|z4ZvRaSlKt({BAB927+EwEXu-56>(b354#w>Kp&XoDQreE8n5bbYI!d%izmE* zrb=lSfj`ODSC(zhxgT`kcoWd+D4SGk{mNC^!Q0$D`}OwVl~ZH|oIbwQLWtOQK^ zic37yM3@=^_c2<&m1u&u5lddbje74T|J!27F z4ic~_&;4m8-%shIlNtnFY#uq>kLzO*2nlPPX4H;60$YW4lGU~g#^EnYw z`gx&C(ENO4jFcaaGVSxH{GR0ynZ=>JJ&q54h@w>*t96y5svWh^(ASPam7B`?$?FXL z2umi*-6It*A9L`FAmw1PC}c~trR+%CfKeBZ$DwfJMXAGr+)pMvWf0jDdh-b#XQ1?L zK)DZoH^)7R){8giJDh`C@(z0B3yh%jQq2#D6DL9_^Bp#_^%mwI?U`b()|20i(n+_! zaDBTNF0;6VVl~{I^IC4P6=UT~aO^X|;`ZsRF9`y*#hor%$c?U$bm)|qm+C$aqh@Xl zaV!C+rTZ|5II#!2P0QQAS&A$k5P+GW=X6Uc!6#Z7?{fcG#LIqmEwmT2m9flxj7@jn zxSoOcmB75fJgWl*m7UWdX9ME$AbQAJJ7)C9IE=U(+p@n(;4ik4cOm1lP%2ph+_5fd z-qey}LvQzDOc`i}?3WuSPylBqt_TMbUe$$Kf?xw^25`{?DVf?iLxE~suX>o6FjOg1 zfT~VpN?afSjd+Q{e*>HLi*#T7+?FFTtTu0quIBV8 zTR8u@`ot@uc5;##dR?6M6&?I>ZiKSwO-jMM*hYHYDQNM=kyi?8J=kN9_t=M&J3icd zhIY!r6|UjlDq^=TX)^D8JiM?oKwhrLl5*9t>uTG+zaf8?sr#|zzz1H8J92lmqGden z@aH)O_~r_lF7OlEU`qRg^5dNZKOA?fO=LN- zyywn4k6R%KJAia%J7}tL(7&iO&%20eJho{4(d7kFe$!0 zg#oXN1kT+cqh_hy^eP9vmGEq4@;ZY>*u9y01n+KP%O@Mt5Noj!Uhu;=clpv z_NDN^f~`L+%Da&kDeJIpb3`S?6_i%ww_x=+azWfs(^e2G5E-3e-G)$@b3=<tiDc*DSum4MX%_uMg37R8$0<&+?%da(Sd(x$#3Zov1ye@8 zKyt~;w)I3#3{qh=hwnD~;d9V_^-exJJ=;}VoyQf@h(O~N9LjclkR%7g^_x{uWzLGi zVPQ8r7-BEZOUtUc^Sy;_fzu!tNOfrGT(y1e%Rw9bHt9#z_zn>&h45^y zM#0r=W591kb1s3HMbjZRv?$`)i0e4JU;Onocoo= zlxQ2X3`DN03Pxfxo;tNFnMv`P@IT1>I21i5vq$QxzuU%owU?ZRUhc_aZSrL+{I2#T z=^rp?^Z*V&H`jDp1-43bq7qkqH<#hn4uTvEs~kGTCmbO7!Ewqs#*NA`vmC+rWSu?s z#m>uE?6YC1p5OT${3$~Uz$DWl?O0@Jx_K+>&@4=H1gspq9l}oiT)zY9RC8lZ)$kD+14J!*N#ni& zIneodwPQ@nIxoVF`S53f{F3=d7IeI}_Q&Vvxx2fs6pRt!*74qCSuZCD3X)lcamL01 zr2@U&*0siCd}b~U^<3p$zFzeycZss!f-Ca_7+Da~e>GcF%T}DDk1a^nN~3A;&W0X< z8M%yl!r?XqJu^{7=UYQO$UxA1rjtMqq3)t?b+vFn|#^QIfHl+K+hFl(k zb|V_VUjxb_n_zk+!39%{{AqDBi_IcQCJ{xC7ANP&r=xRM8#r*41!p`?TBK0J^4(q9 z?b4CjJkQAkgk=ZjK7PB^&Dd(F+VnV@@sC`4jR%%;bpe>OdJz08k9;L)2Yu1xOaN6< zcnGQZ6v~TMNC%L&E^<$6Csv~zdfsSN@Zap3+2&cY{p#le)8a^(QMA1u*d4&H!5HXV4eDM|E`)0$P|)1Mh8 z$DJ|On{(oAnk>I*FZ~LHuCFal&0iz|eQi^Pe-=~H*k$*3!{*vr&sdvVD(rNKt}ovO z`|(T5LHJWxBnw;OGn-H}LPzD(_manW1$S*BF|khg{lCzj^=Q3sLCxYlKM?GU^H_7% zqj@O$fvD{R)6r2DYz0$we+Qzo+9x|6N}P!n6)~-GiLvChJa1yW{1Ng!uIv=rR-DkM@#-E*Ae-!nICm#55f&4_#c`{U zfc41ClJzOpS9u35uP+J};v}==3`=2`i(m^v-5o{y`}~MG%K;Kol}1<3FBkTT@_o-( zMrp}H2NSB;v)bd3N?*5n$NJgwBQK)a;dDVn3i_wa!ZCAn+J9ccpU}nB6QA2$t0z0p zY*Q*ax_Iv5%olh!!`iM_S9q~ywxokWp)oCYWhT>)4b_n4)8XtS+%JfQHSU*F#Rg1k zw8^k6_|72Rb2yjLq&_;lvUry*E)?mjEKx6un0WOBg9vYIvKJ4DDeGSJ#R|>G=q(B< z4^uSU0V4$Z!qL70JMJE`;w8oH)kyxxl7Y>`S|PZswU%uJyhL}W104ADlpOgc-&B{N zWjo2=w_d4g26iEtw1HaXpT28S?Sk-cEMDjRD3qDstHV*5qO*->I%W}{&AtJMpEx2{ z2rhFzl*X>+!+7pgxG!tP;o$|wZEDF=g2M!Zpni#c>g9;_t5!!V)G%gs17V}rdiAyD zU2?Gj7j_5@787X8;81^brxgJ&SKx8YA0aDut4p$Pqg76;t*yR~z}CRx610J2)%dw1 z*SH`<)>V@uR1Oi>|HG@91O88|=n11D&|+gu>c9{Z_ZRWau!F3D2lNE6OMrRZ7d^cC zm4rzRsauqs>a#Wee49TKwu8=bvs{|G$O_0ARPLe1iS`SmT&tk0NOBguazw(E^z7Pl zyPY3uzXt7{(TXB~B2$>SOI-@O#@TjP&&U(BMRcdc3T0Z`-J%XRW2wZBoAsIl zr|yNWWJMc!b;M0FLfF%2`BZ!UNbH9V6Ne)szia*hDXD>SHXA35*zk)`kh!g))T z*>y~F8*Xcrx(nQQffbUc)G*%>fHUliAxQ2ud}oZ?x&+&9Q}`wm2Yrq*X07(99e=bn zWUl5S7)YPAR2vOk^W>7}^Y?fHwb5g?c5~QbH_msb=&8v$i)!^rum0rZmxkZ%^4*|1IM~XqWU)4o}MQ0{6LM)UcpYIX>3b?T0dq z7~a^I{6Mt5208)Mwk;@}v6(IVzZs|UwB#&?Uwlz5+b^J%S*g@hO=+|(zSHRA|J)Pj zf}BEz-bxyBj$H@vd5Y{uh$|uRXhaYY1Q4WNcg>yi283QQgn2s@hM%F<_Yl`U{Nmb} zwI>K;fsIBTZmAn?XE;ny&T}i#K0YHj+T&aZ#$9Wb;-`g=$RLQ!uH#hv^ybkBu%{=#lO)kWNI4F9Z$pvqv=?s&{SYa^BpFvI>suMsRr_6G zq&R(v&}l*nf_uLFMljBbkM$XzDtW?}hL@iM(qB33HpfA+BkwqrAThhnfmb2XfE!nY zv3dr0a#paju(no9Tg2WlTdEJCf~*;UxcChf_LBd7eNCrV%W4gEFFIlxo{t1bN3Aj3 zQf73r$7uCw0!>9m#tQ)g9QfQHZ(-NjNuQ6qmN~K^RO2uuZ18aRmKOWBg0+E)(M`Pk z+D})3^wYW}by0ku#xP#toF(8jXu9rO9u+5du(sN~4M6=)uWl^&wLIVzr3-GYi%k(B74vR0jI=%rePCulnCD%i`ym10_=+R7uSiN*8DP4>mW<;j-vgESw1h6K1#fBw2rydD2|qiB+8+Z{1mNeOam zn%u^Lt|_1e;(RgZ90bxZveMVy%5o7jrp`vu0>sG8evMl2Y%zg|vHMcpWWmDkKM$v; zuQP*4Bhc&;9Cq=1_-p}YTPyN=i$B{y6=-0YEYuUX^~RO7f%^AJ<8pYyP;vqsl&XT4 zAo*$q0?S~2t`?K4Uer277aKlMKUeSh6N{z1*xj9|gDw!cqlL{j=1GRiD2S;+;Ct&e zyOHco3T4z`5_nj)|5)Xa?1;!vs3U*mVS9%X{Vx6eg!=u^K6vS-<3SDqwsrkchOscj z)z{{@xQ9ta>MOCcwo8crQa9xEWDg3Z4GifxS%flyw?zZdOa}`p;_vrzKdjGv%qw%I zA5+JQUv4jRT2P0Hc@DZ@W3?7>=H^~oMHR^AZ)1|Nn*npi&0NT%U2Ki{kurRI>Brqi z{YOk-Gr9z>uy*C^V{H>m7MVrA)~k&NxhO{WubRhKV6Y(V4dPucYO&OhkQE6pnAp`C_X{f@ z%5TV6T&huYD)04=o%skEra+AApuOvw%X$;7RpBIXN75*!71C%Fr!m`VwR{bW{9ej|3rGR1*`ri7~;Gb z`qk0`lSq=iN)srRUei`9_7=dj%(9N~z?BO=KFe=|4E{z(T9)~|@Y@BjO8*UT33B!2=`UYl=q~+JqZ*i`pA3?R0#F{#$B)D8$o_+ z>sJ)(;0S?R`Q7;vH7o{~1RAA`P#U#;2A_1r2+-w^S}AconOFuPvlw7KLuq~C+5J@i z>qt`L!`;HnaQuqE$FZVZu7g_pk@{K&62xyp%Lw{iQA;i2f!_C5Y6(u$v;?q{1a29k zf5CV3A&VECpPO|4jj9uyujup=2@V^0k{|P)P1+x#kMH23Bu`u)wLNy@<%{PLS z)RNT9I_sf-;ZR6aVb+n7y_SKT2`&8v0%lq|7gJ>XQ?wT$pwj#k9A1iV9I)VM#kk%Z z#~dS*#{|?aP>wZ2;lJJE*C9e_(A9)Z{p_i=JbK>5;K>N746#s#OXRQh^{Owl9_*yi zovZh$hSsKk8U_2|M*b65^q&!|LKLuMWfZ0n_=c5C6-m_jv}(;>bbkOYCAAqRIV2Um zT>$|%*}X~qKX3{E6>{!xfA4?#S1|%S;*u@{B%yxpD>HdhGnP|Y3_*W%9&f^W(fJ1S zABi&m-2eOfaUqU#+t9H3Z-`6(BlJ#!4b+(QDHLPd|Ma*1Q(uxOJrbX@os-t6{>R_) zUlSUA-jIXv_V_t@oPd8_`2WKv@UJ`muSfm=<#qelJN~~dZmZljml57i>Gj|H-2I*SG!uEN-8@VG69L$>9p7XbMp_ z@)`9+92)9;8#%s*gD9k76HSV5A7JZmkLioNALQuNfH*v;6Lhhf?Oiqp4UVQv@l~qR zrIH!CqqrHD0GM@4N`GhEx)mV4bdc4HmLEg`UPxykb2tbb{KRW-_T85O+6DVvFcuCr zT|&(JDLFG;K>Cn7ns+Cz%y4Z+>%=bky^llzB*%;&_$_ORp-K=%sz9o|h2I+U-! zQ_B&#Lx}#{axOFtskA?}NHXNoQ4d^FEvwNbruqPSTp}#u(9WF&D|Ek!Ix`(WX0?zL zWcx5vcg(oW6?qTx0_Aq+Y6hIYgJUj@LcVs@w?oje$A-L}oGa=eq6aWpNsx!Ydah&; z;eEYD?ZX9P+nK=OnfkeoajS-DA}taY&7vEyZPhT6YTn;oiB1(86SjanG>#*HdCazl zR3>;FcZDydp6w2&*I@bA7BJ~m+9YuuwHgkIVcX3;9=`FdYiVWq&y*F4maj-RcHtj2 zR-1#rB9H4I?(tGV%KteD$Slr61bR4$BJ(l4iJgwnS8*K8MF1S5qMW0u6Mc4j@<1&X z<~Ce58|MW>*5!7FS8dq%Wc?dyL{-uTqBYFT`5jHeg!@9N4X3|b zYWA;XGe<50q%K;zyN>l8FPG_$nL#VzyvKED%QyK`Rhf8haAzoemte$z^OMt?5U+6q z{gKK_(Jc&l0E)gK0Lz(o{!wYt17(CUG+n*uW$WqY8GU82qg_uP!Lrz&$lJEmBA8RH z1me`mYk7_rAMS4o8c%{O!368ib%qv$$sIb(0x)3wN%6iri@Chs*>&=tDRX*Y>*wN6 zv+~#+Bs~HxwHd!EBHppKmaz>5j$1E#qaSccwwH$nMfMxcsp>_TqCprziP;&X%S7JC zU74?mc^_M665rj?J%hC{NPH-$2mh#O`AC}Dj z`4h{cL**fa106GV1kY{5C+#~h$tp>Abc1P*tfF%Qr7_w;z~_XEjKhGzX42b@jt$oV1mu)|RO{MoRV#JUN(|dD z6td(u8}ESfj&8r2UTfADHou|1cylDvG`!UCC@_YkKXgLs5rxX&{B6Pwz3^dtcd0H( z&i(R(Kn6+w!yEtv3g(!-?T=>{mrfQ?LWEMu#6v9KpzBl^w!LR{J5_g-sj$*%WvrMb z7&?RWEI7^RpzEAW8a-=OQPVNdG4kFJJ@K5_&6VJH>W*QirjqVt;uH-8a=weo=DX6m z{#Bya)ohS!kJF)!Wsr}H%UoZn`o?-*di}y{pp#@Zp2zWPt}~;5<)a1`XamBVsUkr&uk8~WqePyvchPj5l2TIQSCUlQEG|qF`JNi} zB_=@!4T5Cne(EOxz=xrsAp?^fg~3!(m@u2K1CPBLOwWW1D#;7ngbmOZ_!0Qn#c*JY z#r|*ZzE2~9i-vEADkR`V`-q)(KJs!31#GPg{R2$Rh;1MZ9%%wUejHio3EO8AQzPOp zCm5|;JEZ3L+{cpQf1yTm)bx%3b$W9#d*nyGdu7z!J5m`sz<^)_+>RRlJteyxE65YD&v(m$*To?LhzO8rP~1$%9c6j5#Lsr8 z7!6V@KY$d#UZALxvRD=eY>am$CTQxFijt5_wlL>T;PziJp+tekx}R?5eTnChL0CGJ z8AP@8C4CF$kMQh_+n)F1Fcxjrjj({9Nc)rw0f?7g|G04TNRRS>y5}4&wMNvFOAmb( zK9U|+2%(-{K`TlW&;k9yt6a&mLN|G<-){=1pXL-Cgax?vPG1$lX z^d-ksP>6#8Ft~mmtJT<;9UbIw8BE?br?$FoB;sR;AuqF3f~+g0@v3L=UO##yEV02v zv;xk;fYE9=Ez_Q%hQsw(`IVKAqh*a#UfXkAWh_8N%v5idmF^Cb^U^z4xtcL*>Ig4;?jDYn#{G(k%LDAh^aj0~(@i zN?P`(4D|W%!AFGkJNdk)v}OJ;^K6!yp?Z)7XNH?=P)apy4KuA5#2LSjfaE7wsOv;N zz+x{a*!QajKz6;4+G4JiJ5{w3nvZwpYMc?d1N>_35ETjK!Xp*j8i%n(JFPGgZV{HD z%zCu#pCxFqRaI%A*!PST1XlX9F}8vzmKG(xC1qUBofASHK7!-BOW=%ddmNB} zj;aO!N#T#L#juvNsH&s@VL}aX?K&#;sD#r&Jxn#yQFM+!KC3hXU9}kB*{F*#BIId? zTrwHpAS#FWdu}wrYpLHRdwZ#j_SB2O20OKvTh64fwYZD61#iqmb_cGcxR}5);Jp5b zkQPs}AI3Sad?5U!Ra8J41i}1WyFx+_1}q@xn>)T1vz6cR8jh4e4qQM8&|=HrMQ};! z*HY`u0Z1y{;KY|bK+4VNb_c;;!6yR`$sjaJijUeL)e8mr)N3}cLKS=SATZ_ztezjd zJKT#2@QuAU0t|ts(wqo~0EUH7gxk$0e4fPUwxLiK6axNR=)Cay-M zP0@CYu=e+2H;{RtT*Ya1t1Vl2r<`EdP5WoJG6GBwK1wv0Gq|GA5uUE4S|bh2fzOdi zO`BUOoYQn6Q;MmI3(CNwi%-As3?4PGV*Xk%Zec)K?;5xQXb*X_<$w9U4!2tlqUOnv zlEdhvfYE$m3ay&i!)|q4lcMoS*h+qXqj_n9oBz?f!PvHDe*b_|punbzE^uUgsaJa0 zKmy)BO@k`aGjLJ=6%!3>LL8i^Eug47a~$s^Qw6&WBFWtX+Tt|V<@4r8$61e_Lae*({!mMsh`XFQ+rsC z^QpCQjJz!QS!X>)$GUq^Q@IjN1+|ctZ{-bRNkD@a_k|3|6S#{Vmun3e z#?Qjr%0OHwI+78V=&Rv%bY0I0I5e8wwqt*=6&z=0Jx=oIk|A}#VA9@g&*!YaB#Dgl z9kta3g710Q6hNoJ4Cr{ekU!h$`!S4KQ7wuuVf7*XjlwBhUbjj~gi&-hp$|VKA@Hml zMiD6Jg-kEhQemkaq!G|9nisHI=R8irx`SWclG|pG?0*-Z4gvE?{vSOm>LmgoqtGu6 zVQ-kgwz1jbWIc~T%2F36z+QB*<(bqWFuA?c|Ixb=fd(aX-pI}&`Fe9Uu`v4OW#Bpp z$EnPnX(0QfQlkVWDgEpwNR`?{qK>|Nznj}b?Bc+pSLN|6_V15rLh3MgH-aX43BLEg zz!2>+zBm0QiX@}Cb<@FqQ!S3iQ7o{tN4FhpV`ake%aP+;%+T1pZb$53t`uu3R#-zn z&Dsylg>M+1-JVV8t*uVqLobJD>%n#}c*ZJc+kq~Zm};fmYOY_C6v<}@*BRWX7Tt{t z3eisyf&$XJlSM7nX(~B5sbXR7c}5k4r%R#uoWgnXOE>`S5z2@Q5RQz2ncm;Fay3qB zUvlh9PP~~@CAF_>ToGmPKxe12bKV{u+s-Vf4zRuW^%kBtzt#k;#|T5h6h@Pz{LkE( zVVyQ3CUm(}%9t^VNo}yX%%v?SZsQM0e<^AvTU0li50hWA<&clg9c&1lG>;ZQl?*OZ z20l9l@#i4{MCN&ydY)%0c3Z>ZdG1f8s5{{kbQnyZ|KJ6Zj$(#XzN1!%0o_$1T(1Es zDs*`fjBN4Mp7tKv%C=5ff``|VF?)SAJUYGPH>xsYC>u#`u=GpDe zUL_`IC~6|14jhiLj3AN=f(dh7;~796<5DLoEB48=GA3*mx`hZ|fa8+}Ar8H2FP^mc zqX}HmuRAlzE%YS9DDGdID2#V8aq@|Hy<bMV?ZDZ# z{Z(d#9cU)`OFv(r@`rE@-JuZ~?}o29zTXKuAAva=%y+ zZi;y&^cJH*g%8aiS$u(n{RGnGm5a072w$>tSQD{c)%ZFelG8{wBniS;=iM*!^f?wl z#Z^`~#P6ohbE)F?)HAxOt7#+52Ki3nfzOXT!1sFF&~d@3~I3*{Ujj%Ph_MO>cu zyx<=yvp~ZxH|e_(?SY~ZKDF3A+ZvJkGww1Kav7ho{os<5&=n~QkI>mD~3-sgNDh~6^F`N9jt{}d-tGN?_LS(skw z&`~m$%@w3-mYUVmsP4j+MOc%~-%a!1+KA`8wdqfIMW$EpaUC^{LnAK~d=QR&)Z&h> zdNiwjf7|ipjtWj?qc}4Hb>DKPoR;2^@_m&2K;*}`VWN32g1!qHgrswi+tZP!#%jJVKitKbJIARiB_0Qu8UgWJ z_iOjZb64EK_wfy*JbUgndPTuetsx?zIy5M`9DoL^h=%}Ff!igpwW;vGjNFyS?e}wx z@#LA4ZTT^kMQ*-SrB#D_CR2)72KbroT1-;>iiFF&S~;pRp54>&mPS31yno-ZrIl<1 zhN>r^kYa`5u^%!>@qqAHu_l+Nsx*VBMz)*QfS?S2w4Jk4T2Z<5wcFh8*Npn-DM1xo z+36P><`eaLvo0$rOTMCxsC(r^{BB)Gu>)ggFV(7!yXimttSrP+8^VFHEw-E)Pjx%% z6m?a1g|C(76nFx}nXcflg%ewA=IjM)YlaUL<8ySJ%axbQAyF*I(jfZBTBg$fC6-b7 zd6BGpwQPrQ#|(WvQP#7IUAFE57Rv}{l>5H9F&aOw5Z!lGa3ORod&nW9D$>4QKHfAfgM!-lM zlBP=5iVW29TTfbbG_B4%+aQcq14x$n7%!Ht$7r&vNtORK*ui}_aS~MOu;1ucOdKDV zG%~%rMoHCcxbM!inww75D1px{$sj?+R#YaDmLhcfhK)_)P~7tRJ;)vCrBwdr8mPM) zeTq&lCb{F3lUF?DTnkNm1AvZS0v3(BqduOS!S)+0&gK>QAnVJ(l7*TkX+*EM&5EmE zWb57~eM@g}5MrEfJOXLDQMAU&Vi&tpIeuA|>F?Egx=rzoJt!W5|hMjJWNCh=J;OWko0b~j)xA~}< zMarx+yh#o4DjRFKz8QF472ys;FQg8gbkHBo9wCR?w8}tmdrm;O*=ts(U0AlM!W)+g zZjGhZcV?e|MH-y%a;v@_4c*;Yp>cU46P-~I5T{~V$`_irH{;yNBLO{T-ANRK z49tmY1ilpmVJQYJRCIFc@vv;tes+c02*_Wt*X+_jUa#7+Cjbs11m0k-zKxT&f+muwMH}XBt@fW!c9ay(@`W=$tHC zA)NV{6(V@jTQc6*2C%|}Gl>$oe)xqjg~mPO-#(?}jbEBSh-cA!tAPKBAe&l4i4LCY|oia1gF^L|7fwjPqo))XSwp{&y8k}e1kddYjHN?1KB&fcA z^`68iHy*!!Ava+_pM?g1ZdA^vMyC>!#V=AV_x(c#My>*W7_Gl-VUNB>ewNym`X_WU<`Yu)`doUxaHb`neH4QP&O_kA(acUkK;ZFy&q=qG^|CRr15c3~C4 z9B6bW+xJ0O``f~WGIlFBzZ4JyQ^_TnBibkM5U~0f^e4E!Z~Q$XBEA4*%YV>T%4Oh$ zXMr^`1sdY7j+!rNr*qfc335FN?ZLmt%^g7CI<@Z~Zsg4Cb} z68&gl>S)%`Gm*@jeeZFklHz!pCXQqDFQ0qz*j^pAxH^)6-GYdw`gyAJK|MDt4_7AP zwswTT^n^+!R?p*Rp>spaPDElLsm>9qQz6T?;d{*h$MYi-zDGS2_6aSpZ%qQXyN@3Xn`m8>#;oAje5IN;q)XFMtr#&3b2UR+5qkhnqvv-;37HK3EZe7^3cv+#f+A8-k8L zZ>bnTE8_POHI=JItLEI8o8vOIm?=QA(DmeN{y2s*PxS%87BInyfJ0 zrQf1OIVI~A(nae&OMbY(r#WvH)BfJdu;{6gbyh?Q{HM%5YJW6CPUzH1FGzKdwr^L1 zNthBGxe|*U$}n&T(iPcx?cGx*R@M!=52)Wm0^UcIcN(ec_90=?d4O9)&7h=y+(<$R zi+-{}T=c^@xKqDNRO`G^Xd#@CH_y00-gX3g4;W&5gK>~1Gi2zilt=l)jpt}&n5`G; z`Bw>`3ZsTGK$4u5C3vaNrn1E6a+uQiWEJRA^DcQET-1?Hit{c&JifS=;b&qvW^i=K zg%-DdeoD6vNi>7s!2uTAp#XCWRiUM)q!iNVM%h~G6;~in@I#BuAId=UiTtw4jU;*d zVTjC@dk_~`mS|4w(#@s4PWmc_HD{zPUg_{*f_7LxX>W^T5zh;=@q&fUU@H2YpyFM% zEO~>nPouY2$gS0m1zH;*-!_ZkM1#nV8YgL8pR6~qMT87)#DN@nYu4+=62_!y!F7YM zXFFDh%ZON7G@ZEN83XmznA*=MV_cmou@=wu-t6&EtP+OvJ8-?~-C4qZ?c;R;`ifd? zx92m~(Ha((bEHHKN{QYlNsCuaY+m4~sp5AF0Bw0C4tK3A!)B-JP%}wn)AW)ImuGVv1CB)aRGJK$37(4;pst z%b>UO-LW2z?lJx^YYk1I;T=D0&z-=rgnL!OS%qEK_&RBi^6o;z!XNxaLxzJK#@;2@ zGGq-F@|pTK6bu>5{6;*znnZ$LsmHHpNKIbW5i2Jm<9tvE7{eJONps!&1+Sumw-84u z1w|ZXUz8V5;&nC&N2DwAitt?wD)-Z!$8mi}SgKi8(;7sL6Dc*f3xzo~%?*;^il|

    QIo;l*ZJvlj>rhM)zybs5z!JZr&(vP2yH!YK6J z_SkbUK!9WKYNu5g(e(T+hB7)_CA%aoXzxqk?Ytgms%c~;jJQnlq6JA#oX{}}Pdr2v z5>P~aikRS_C^@zM$Ub@IGf*UlpgbYS_oTYs~!w#UKeECpqF z5!abE(sAwsPlUDLRU_ldsQE?@i{8oCaazs`T^)JZaLFS4BYDI5J!2w>nw>8VHEwL| z24kdOa<)=1uiiBlH1=S%r~5XsG_3jJd^*JYh;kq(tQi#&rrUD5N z$>&5ekAHId(^Y5Cq9+%Y?EYtCjyy2$S3XS1OZv_+Aio zQWnDtFhl+xujCJJAcNV()&81gLPrg;1_mcSp1nu4{w8~r!z#hJZ&tVF+!Y)CJ}x={ zD4h`)^!y(gcpR3~X>goFRVeg^z}}LP9}+MKS8{hI^csrTraz=*eU}`6mCqLPRbp?t zpY6=?c=(|8M*|Lza?8g2T2antkz(qpqAwh;T=6-RWM738Eb9 zLMU{<0A(WTitFuHfo17m6j0ec@Y;Qkw6orwJTlxG-d%I7aXuhBnCm0mP#E&u{H3b; zeB3hQedjv`g4)Aoso`ZwrxvUpSrcP+#GvPXfE<0Mfb+%%MnpOgp)4dilRGvmnZ7kI zuSlPGz!Qvb3v)!@nMqIMC$2x4^}JAtVKdhJb9hb6uu^JuSIEB7dYiY+B0D3IB&Q8} zi!H4ncPA^g7>%w8({BnSsDdyQIYEBaD0($nf3)p|n*(BWt>wArJs%-59|PQxhBAo= z?9F50?>r0(y^fZ5DUrzIBu(mZH+tiK!G5SFn2bl^uDB?DC6MK*M6yIFBouFUo63q9 zuz^{U+8h4zMR@8^J}-0-(f8o%mh|`Ek%91JpZK&iFSj16@`cqpb*JaQ@1&gPH2mC4 z&BuZZOPuERfA7~iU@}zd;FdN_`M|M0Z5^_*8cZQI+?!S#{Rn0U#?K11{qHSfCpZC% zM3%}-->uD9kQ1ziI|=r(t|2H$n-o?RxF7~*>CO!pv8VmiJF-l<5r6an5)v42GX6=l z(lJ_~eU!LQl{%S@ZA$8dY=61ud`F)?@n^A7sLW@((=RH+7FispCrsBecWJSrPw@Bc*8eQ zus||nb%$*(UB5ZH)p<-7-#|;%%ogtC<0O0(sz#`~!KR>9a7}&u)aT_FLmSxzT!y~k z(k;JjzU^yBVrrrl&&ibSbr{%?y&efM(Q9~iS_Y{zT8`T4EhoF(^W`diRa{I|{G~+p zK=8(WTGB&vMKAxKk(JqN2 zVytiF#}TR^u%XlPqzCg-EcIUZ;ijN_ zs0bUw&*Xg6h8twi;>!km3NoowBt)_e98{;MMFSv zfk}oue3e#j@eC4$WWHWL8mNtswWQ?dRuo?uz4$fYKO^=de1hx5_c?N&N-M~XqOz*q z5~Kgva1|EKoRq`6-^qchShme*Ct^lTk2XvC6vKvUQH|?sj*J39c%;V5jSB50Rit;X z?zlFafOceFPJVy|tV&bXNv5)72;X|qL@Etidw6F5d_@a7TkzVe#L;tW$C4G?zrx&b``(>aiXX5z3Pn-D5&-qh*qwTAWnz#QvKG$$ZH4MD zUnriadc8-wYxe6%_lvo)RT7!<&t&MnT)m5|F)xG5mH0sMp$$Y4=DbIh9hPCsnU{fF5er=`fhK> z+e`w;T;vEvzA2{&J$-D{jTh+~K>P%Rc^MkA@X4(%4gtQVv{y^dbqzaJ#Q!AsQ)~AK zf%F^rO)!zNjvL~D50){=xZ-7n$7JP2_A_$=QFll_8z}qjEG0?3J7lf`Lz5$_MkMVC z{O(^@o>b5t+89}_lMZ`EW^29O1-la{%E_l3RF`|8Qh7}gmy$g2*xL#9Sx}s>b&aO% z{3dlr#JNQ-%vWB0v4rngNw!p;@#nVGZCa8GBM);7I=P z?v*O#J-w5Lv`!^^67V`Q%(TRbAm4Cj8lC*i9RLZ*dmH8N#Cdo|_xf`OCIE9rlb4^2 zY@y;n{Ap9XuG6!L>umd(5d`O4YDq993QHA_urgd~Zd=~Uqz(cDe=W;r2n}CR+!f0x z2Qvm20&UK0OXjV|?VH!ywi0BKH<)6e4~P5C_9k~EP@b4L=bEuMi5#NSW#-0OTWoE9 z*B*ll3CL4qK(XOBD%{$$~%bDV}x77&8NpDXt5_rP>Gc_U* zi%~6%-w5+kp=DBTcZFcWOQLn|n17nW;_qUqzd| z*&rT~pX#tRZ1)IN+xxbH*CAP?_E8~wm_6KM&$jJml0d1+{F$1Lqwyp~Uy!lq^{Mho zh6%9{$Y{)LT_FXRV^kIP&ip%7yBge!P3mAQ+U2OaLNDE|T&;_0ZO>1igP)0U>U*^G zS+cgK$YO^4?z5)w=%e%lT8;enr9s@!M-I|v{>Jr+>F;TU8QXQYfPUo5Aw2<`M%*-= z*)M$>hN;jmSBx69M+FVwv19`#h4J)@KJH5^d&?u05RCp zs@(`R#dWPEf`mz%;HAgg=g(46_IiU0lAE~gNjeCW-!Qf^YICWvBlV_pfF09S$(UlI z^L$*L^2f@jT!{@56&Vmjl%OWRlDtAvB1?4gVJ>umzp8{E_IDh{cwQ>RlPUFLELpW` z!e{U0dL?`%tzOI$^0{;(WxO(S?E~E%t^3KLWt3gP^T{V_O7-iZ9w$l5JT-oWFJP+f zhJ6WPg=|3a`olh$K_79xW60kwq+enp4tHV0Y>tm*!#@E+Wk^+aB?CJD>*n8`! zEW35>Te=aYk&>1!=>`cwx;vyhrMr=k?v(ECE@^2&LRz}J-pRApyVmpUdiUPnU*8yO z42OdOC^y%2&w0)BJdfjdH0E@m#aXtp#!nKbD!7ok@B#c5ORYQYs!#3J5JbMLxCU@MaUp}>e#AD5B?IbA3d zDd1=zl4_$+mk|hU`+*XhW%TOqehD<}vqP;d?wL~0 zd?O%p`F6++V=JU?MUWM53G##-caBBGESK9_dw?_pxzZ}7(3b<=9Ap>MK5<2FPTztp z+p7=PxFIkgfWwk*1oW8n;d;V}9YrJK}~}$-E`EL*9uKd76I35!ZGdhy(?ZFYb}P z1@?e>s51kNKlNq&%l;^$)4!Lt}WFls^X7D626Us6o;-1m9Zb5BtoHjujIH7IB;SXc{LsC^ZebHmtizPrk& z8)|ml)=!1c9^FXP^-s?uZ?S>Ga&06RvBvJ%zC;`q>~RM%FuLqPe#AJ3E}FIWKVR;hhEa@`WhdkkqwF@1UQ#U>O* z4UF05+DaRGLe%XXOMhEP4tAuArg8{9S*){ZZqE9F18Kdrl_0*LzVHtPJ4C*V$hRrZ$E`DI?y12|y`ZF>gE0tL~2+_5loTID|0*6lmYM3dPT* z@~BW#qesLOm+h`mwk~LUTw8gU8W2|Z*oV_?X!{48PVZyMQ*}qi&ZYHgzVDw~*9UVO z^X_AaTnjP=Rc!wnhNuC|bIDzytfAOMLlMKJHhiSf6s%Ro-#I|%$*&fts9G#==M<#2 zi^ks3NY`zkZGftqvie2=js4RBZLQ*V>*Y+%#H=#*Fz&Hc_zRX+@yihi#M|0$vl5g9 zYSH7Kd<(W@zNGEIojL-w%{Aih&3`a97&oTMva_Z_w}Vyu!*E!XYdlS6rzJs#qK3Y+m0sdZhFlR~da6O*p@z6oLL z*CN8EJ;8F?a)GHqwXIABqJdYvuD|j>gDILIUq&VTDzPez#+@|47rerfVMbh)qIrF# z{3~T;b2x&6_ML2=)CKJudlxSDVw&Jd05)S3yjyx96BPlLo|*4$Tgq~}los=}J71-U zSh+Uuy2ToLZ2eqgvIO`#xwiW;IfM^g>Ks!ckHw>uU}{~25z?xQ3HTupNm~J;&#L*X zoi&=#Cendu*o@O`4{jd}pfKlrxJA^JS|y!!>T2UVtY*u?LFdXsvZJXiGr?l~@ZRQd zml#@;s(5bB#RZbKCuu)LT$LL|2>O~gG=Q=MSr^x3aw1b<4)A+}?yd@#IXAfFJUuBO z!OYkJmvo@o^sUh2g^fpzW10Ab6H-`+!8fFZuM+4}@?WL(x)D}kQSnRSG3}a0N5)K} z@n~Oz-C{u9RY@ldfyvoeH;GbJVJwC+l5e9amv|Htd zMTBr%qeeYFWH#0$u$d2IN}?@cG$+lP5A7XQ*V{AUqQr>(f<<*#)C9@|Go432%px{R zLgk?PC3U3_^y6DEE8m`acB8~?Aj0qx#H4Z9hIX99d!|nIVDr!R;=w|Sl&qctnP6c7 zmxBqViqfEV*5I3ir*u_3Z4*7-SDT4?RY6j!WNX+PC!kN;q9R7sn(!}_~uXrjS3ewxZ{C15%oa~~Azs^tET zUE--8ro-Z8j)<=gwDbc3Vio4zU6cmI@wS6}71c|r1PI9qvmw%(`cV4;s$dpwp_bvZ zNCIwc`k-XGsUBo%c$Z&Tw5nfeSM=JA$E;TKQmlHcP`7~MGfe-wbxWM}+4HC38_8DC zb9uyCh5}&~NcXJNkS8Jy0jm9738^+d%vLNYs`8|-jYh)|3SjaBtn7AaTop9-xpm)q$ z09pN$f?Pw0#AD2{;bVFCjyr`}Fxl3oEjHXj%?4QOn<`7JE8MSV*QhpilO$He7;ONr|)}wqc4|({oFGUKwLP3?SgLkgCxMiMa zS=X6HahT-s9WbsiD<>OJpa$IErR;_w@!l9=UUX}2vzK>NqgPV@I$L*88=RhEyr z8|B)Sn)ZTeOVsrX(U=Fh11fY`MUm*00fq;F|5WrCB@^Q61Rcpu!j36BwxdkS0tf^KxtS zo#lcHzL)UyX`tmJ0dkVn6Lv53a>-2xWz#posX%0Hjx`{%@2+QN#w7~vuT(rSLH-OQ zWB)g26W6vsoeq~Ie|f;c>DhJ%p4+Qde(!?vmjEDxxj8>W2lMGcEPB#*Cu1kpBuVe2 zLsqvSU)&%a!PGX2iRI=t`rmZmJZmDTiMD(xh1s*c1wTf)bPNPvHkuO)HR@Q>Xjtp^ z>7ZntVgl3wBZU6%Ng>i7Mi371C}{?sb_6;>7#FXu-ddMaqef^~bBNAw{s;!s@)rfo z9V@q{(qsZ~+Mt67!{w9KR%*mGj@U5FQITOHxB$#I#pHZn*b;%t9H%+`0OZ1={In(9 zeL^T1smVSYc4|LpG7p#=zXwyAk8sXT`4(14t0bk}u1m4rP*a5p%T?>U%8$sD!7RpA zq~@-s1mza#pf6|Jg+N)LD>&8IGI46ZgkJ$tbt#~6EOxu3_4FzsuvnH7gmk9Z84Cv-wEZkFdXj2DR^30;M%ywSd-<&I zOsN{@;GkB1R_H|{NvZoyu2F-echhaNo?yLIUnJ_HBk9NCii@^&AidYn=d{;v0Wh+@ zQ&2q_Zw$N%C%)g?mvl?dkr?{u?fNiP7?T}BQDhGIJFgk(A>rb|(bj&Cd*Vxqhxl(A z9BwBEaUucm*sMr?nskLNd)|JqSZ>p@n5$C4{t)N&EvjFU2#7j1q*=E=6S#eZD=nYib+TJ+Qo2e(@4Nc?~@nDgGr;QDdF(bSyulB!QMuu{Xxbwm5Mb0`%+O zt{#(wwp+hvG4e9~^(vV!HR#({7EaK2&o7+U=!1oQ8`K(1D;qw_^NT7(WJ5JKY3G%; zy8Mw*bvam16=v<(Yr{1T)k-+q{HFD7f+`}&m*_~8gw^|n*;Qi@IZ{WT*O?@Qf}EYj z{Chp!?EvPu2fcf`u%txRoE%KC_#du>OSJD(Yi5*W)#HcQB+|qM7oeWAzBu2Vjq~%) z7~II=Qh$d?IaL8U=S1=X{~EJrB#}vSIF(Z)cdA^6h_gXy;QhJZb3TkTF2l#ZJtz%j z;>Mg;Rygj7ylzb{)L%QXO9L8$h&+4nL>!CvHUf!9X{o5c<6HsJyIg$7AU$DloG}TE z?Pg}>a_)PuAV&fPN0KiKaYZP`ME2LN`#gL!8f?l845k78>Fg9YoOl$UHGsVJjY2xs z6U5AC+ zSmaQvIhd&vVF8A;!l7wZvF(#3yZG1~&WWqt#PL!iD1r+>y|_U;vnRn2Ln#y0m7))t zqK!%nLX|}Bj5Hs&UBI#BOp*gak`CxR3%$*6xWCfloGw%_R-IAmR=X5-$LP7v8f5&(!Xk^} zgYH6DeYn|WFdZ-So38&}WoQr44Q1o1*$du%87Mbazpxn6tv^`E5!QKauK!wUbrY8x zd11~@qg;XkrXflyiRb&kVW&!OJ*ReUApf=mkFrZIwc6g6)qU(EWP}Xz@o0=nr`QCbZcNirC#*ci^y* z2gA-Tmx@av&xAjbPmGFDm6je7i8vNb&Y2*@`U-|I1L?)*ZRK)p(x;f%mIba2CdRoY zM{aXvBv@ERfy}fRV(AQ)`VfpBy^tU1GPHTo;DzL}CZsMt^AY=6tpgTNk}pRI|8lK4 zCA!Z%+}O?eD>W>{y*Ls3H)(&DS0|IAR^YRGLzA@AJ@E-R>RSeMzJu_6t znsGt8al=57nMB+lPEyFN{jK0|QBpd$U3}Qihl?QtugJuLNB@nh1mrE&dG2JWHl2c& zkBXYz%bsu?ruRIzduEPH=FgB=F85=`LEi_a(|oaMwl^mdRaP$GR*Ubb!*AwS{Gw3m z+=Gyo$NIOG_*@w`!ZcN4WY`z^GGkaCfa5k+xfs^u&>i@pG}mXusg0MQ9X)Gr_V^Ea z*5A*k#BUK24RoZ@m4`i{V$ov04aU>$QU~u&=4bq4&zlTw;g^xo^GxH;rFVw0|} z#Zhv2d{*Vhm)$FYprHzoTo+B-^Doo?UL9sI3gR>He(!UZf^^Q-f zGerx-I6X`h*;qT{8*@Ktsk26~a5~6>re%yFKqU$24a4?FzR^W1&~6n~4*0>B0ux72 z@ar|KclB7Zj}CZnNA-951r?!w+VksPA3*Pwpp5Iu%4dO-0UhwdnJ=H3zhU&e{;WXy zY)ElssXLBAM;cKi9TAtuc_koW-L77y;B;17LjqO#OzswpTy>Cf7{94!_nGFkU|Ii4 zHlgdu8qZ5esSUe8E9ZnF>USElI6~^un~6LH)3o=@c zpbaqU1g^iG0b#gx@6F%RDY9PiM&Po%PqppaR<3bA)a?MqmbZgr^ud0{-^cPf3$UdN z^bO??+%Fd|tHx(UTvk4DIPR>YWswd-6k}C%Wu<95$rJUPzY2K<;erE*gcwQ;3UfY* z^$Pi@ub;w`Kp2!2!fmQ&*k(119(DYfZ-IPssYZRM(MZ)H_gyNqZRNv~qq?B=Id_~d z2k7#S%``dgZhUghJQ2-@=Mr1M?`Z)65rgV89Rtke*OqU&KP;{y5Ax4J+fpSwEb0+0 zhvKs2h3NNIB@p;$%vGih07pPq4cBy;mN}=vxs`MOy3~pckha3Kg^xFgw}}Z(Hx9eL zrieg(#U>IojGwO-b}~RdKorYfhKdB(sD_vE zAUSK#f0fe-Iqdz=Ua;-oKdRD1bEK!KdUPUQ?!>4#2hubruo_c)*3zyFze_La!z;hqS+ytPgma~Npr(DS|{I7;KV zHSE_lPrbdUq*tf_!iSG?dBdjEsgwL?IoVdvw`y);=46`OuU~SiZQ^iV-+CJ6jue)z z#EO=9CfY2OEt2X3jwdy>RCZSpp^=wmiD{xcm;};MtF*DrP-&Gvg8Fs3$DSbb?~R42 zpa>0|d~YR>5>coiL#}bcnr5IbP)!9zlFkPbTGfion%}cagseqt;?Npj$aJOK9MxZ# z09~RT{vnE7rG9UP@>$CSazX?rQ=#c(A7o70LaOQF^R?%{`x&)ruEYHgfkKsza_K^O zV5V3_MV-?k`y%ZD+{O>AmN%JPh8h09G15KBqROCaD5zhC$Czp&_jNTbYYXb8NR3)I z?ya0vs;IFt0K%V})|Mma%`gZ-q=AmgYg~nfymz*FI;{E}ZY>a)l(X3qAQ~~CyPtr_ zg(II__{n07jCa<+73o#s;Lr(!*$Y}`#r`Z$Ws2jd6#*)xydYXogahq$_6WOhFdKk& zzFrlGg~6mmj!(2oOBH;s_ZpwtBv%FIQiRQ+wKA=HSp2Laq4(L@2OM=RZC-#?@* z_FV%%^nhrWBUb?+J(B&hJ)c%7bc?1OlVg5dMpSK?gnE9{Rcl@iXLVM|8H`^FPP6{P zq*;P~H>*<$GIHT?tgOrKr*vtl^R6Q;@Cf*wK57zuhQD0WH1)AiqJgno9p%nD#0d>2 z-NVVngGzq`H?mXY<3Zpn>SDL&mAHct=59@Tn=@1q=hsgOMbl*-A2(58!@i3PLJ{dp zs`6@7uHx?E4hJ`wujs}>w$pL&=s507DML9|z+MfrjH&y|q<;FNNtGP_>T3ur z8SU@&krb6Z5X0Gq-4>sQH7(rZgVq9Ep8fhT#il?6ub`#^!muK%h#`+Nw)e*JUFhfU zf&Wkm_+NR}Pr8G=UNEYl!c`K+GriRU;NhNk*jgqoS$b<(vmg6h+Ucc@eVQl zUGwQEcgf-3(uv0Sy)v$$cSPFg zzsTx{`k31DduG7xtBuF%NZUYkzH&wVb>^*PdJ%CRcdz z7=W!jm9JU9d5G*YeA6;3r8+AP#i2*P52<$=&aTby)IWMdLYoJ@P|&tC4^$;P)^p7-oZI#=zt^!twe~*-}(}`e7^qJGA#B4 zDne|D@r5sSXkBpkyrF?xKsrJ!yF8BS7=ws8s^Ew1tdrSh#O3}84%L0?1Thq#t%cXe zr}Rpt1_*ev0>X50ye^5}6MaEWj9_ESZqBPg`+SiAIK@{1_SvSjZ`~V#{_!^B z(_^mrM?Gm>Aytt7QTiYPf4g_TD4me=To{QsZPe|$^=!i42OggEg~k3C61GJ)-pd7h zA!LLfr_F$XF|n%dwEWov=G9rZK>*YKo08e`a^2q2lTE zC3Pn_hZ7Ejk~d!!1Vq(Q`F%NhOc8$(hwf@$W5p@f=yPZ;l-gm^5Mrt!J_21#s?nFL zH_5Zng~4mpV{C&-!~}{C1szra|s@k!dSWggwYNWyiXEzq^ZE=#iQ@_kw2&-qqJipFm9t<|ml<}iGv zyB=z&2u<)$hEn#9WQ7K1-6%06%{}Ee3nzHtk6$Gez7kVvETA8_<{SFF;KNEc%CXv> z2*@@W&!$G&q-B`&Z@R`$7J7H9o2|h~{FDk5yoOw(njw?6d3)%U8J~#q$@&G><=Gc8 zLML}oN6m4oOHg_CHhGjl(7W+$Rs*#KVD5(G#(l8O<&^=pe+8q14_>Ma{+LNNqqLGd zv~LdPYc)^_*^mrmRXcPD?ju#p@PT^~UYFC?10x->&{a>d|LJMj?z_EX>RB~n{p@E6j1XZ+DDvfS=n;ChfBqpX;oCyEIq14ZsW>d?3;8+aksBzW<&U{pj_kge z0U5hm#bXSrjA9rJ+L}ZZz(+BLwR5tvbL#1(LzSu(WrF$R2h;Amqlj}i3?K@2xdt6m zMJ+hrWE<4%d_g~04*J(1=^HdA(AYlh&PtO9n$e zqqH)5YS;K*ByQITS1)?9Fn$MFO^^-Y^?xU?4$~XvgFh29fJw-SjLHI&qkOy)KxHI4_0ly zz}zcZ4xc3ZO62*2ywrD%y8S!mv;i9n3Rgyo%WFhjI?+Jp?H zAUirtjzRKi9+YIsC3*mO=JX#rnPY)5Fr~w$?ME5+kfEl~%2)qo^i%HaU*x z>ZjUgytkr2YF2(c@3om^I59Y6OP%-nb{*u3U0h&O2~YH7=R$ZJ(M{{K9SBOfIfXs12yVAn_Mp0LT#o1cZ@Y zMg^4B!dY!My0607*){k+%QF2DY15~X2CA_Y&8mR;qsh_tjFMJGu*2Y!bK7fF2eaW$ zO>WP~3V%Z`hg98bD%EFGBusY)KkG)c`|$Mqewo(a7;ko35~abKK>sAbdjKInv13PW zug73KPXWU|ZI8!ny-6xA0tu@ESRM&a=sLZh#O1vY_xx(gN(=G0!aS9COOAZL3ubaE zpbI<3Q~lmUhxAQ+D?*fe6l&pe#|Jo8lO>Sj)Wp%Q{bjHioOV z?VAV}g0>MfoUWoAC*VyO{v=K7NS{We8tUgr-6C`TlIaG&ZTE`8U?8l;<<#sD|0i*~ z*q0L#BUyH}7U#m7vgD+0>nzuEx37ltctyf$GO|n=W0}sdazSrT9~SHxzU7Km?>dgt zf3oPq-sX$Hao;IC*1x~82!p$EojL|?Z~C9Dmaz%5moV)tQ_uIZB^Of6riicmS(|<9LcGELc zx-2eCwO{J={6=g%VjA_bz-lp2ndlJ=X`q!=zte+Y86wZ;&-qh%-yrRmA)xmK1P||B zjt`i4t9UxLrPX~o>OKFxJ1+W>v5wDa?|0M#T?^9Do%tF9)gI6-`R<-AhEx&0{?A!&E%W+W9g0o9iyHurZ&S(A9W z23_c+WWK958bybXcGl3GKf<7b+{+GPh^ZTw6Zl34_3dDqAvubanFO$<=JkeU{N^lP zL|x6#Ko!@UCv@SDh-O)nR9oeCYj7qMR((RAh3Vva7m9!BG7D@|alODL&|a}6%&vnY z8b*AD5k3moJbw`Ko(|L-#2{&!=mPKbqt!7h+#ljR&4>bTZZ46IgG4#}_HK|z7Q89@x(fqRj=ah{BZ(UGj< zbD&}P*0;`3^#zrJ93RHxVwdSRY#6Jv=>5aYTIXbXxk)6G&2tEiLV3z}Z_SS@G%4`# zWjI!xhmxaB)og)vYp3yfLOVFktgJ??YXX}($pBGz9zBEu^6D)joS-v@o-~hS5*Tvw z40Sa`q6zRqEi=2%lJ1M}zYLR=EXqlL_L_8up&;@J9Ap&rb$RTe)Pr&td2ZQc_+r$r zFG2=;{qnbQqlm{>9yyiyR=3EVZ}hUiRHsEHr*27pbCtXyIaj#6bO|Ft-Bf(+VLrxr z=4W3;aS#ii9-hYi>HYC4QB|9&h-0?cjayjlXn?z3h@893(e2a>a_Y(MW)P3Az*mrr ztM?PqYPOq53OM3(b|Re*Gx_khJ6=)jXO>=qV@SabX^FkqiN^GGw}+ti_lxfHL>t8x z=i`Fa30u2Ac^}E5G@+kjSGLeXO=&Ej+Cjg9>hNf%bF7g62&YJ~Zj%Tf$o#jo5Y9z!&EVTx;gAEIJD#H-or z+P%*bf+;n~Pcxf77#zPeA48&OSopNuYG$!J#L%t?*=^mvn3vt{pZtY1NbSDe2=9qX z2MRQ0ZC*DgboNDu92c%VzWL)cF2`Dm*MhN%`O=nZ`JdWL1bo(dS}%=cIvk$Iok}qS zRT=kfsJ4oxZDRunyHR|FWZC|d{rS}YE+z)rAQF%rQw6ePZfrq&9Llh9VE|rOcAJ=Y zfyQze(sBYIVyH)XFEx*WWjYva3ggVfJ_w;t(^U%^S5T-7fp(|mqpz?>sp%2qbpVrh zDCU^TT7z}Vao+jD>$?Iu84CB4s!_^%vM6Mb-=Zb0yh0o(iMgdr@Aj|{nmxR!_o{zL z6D*!%Z`-~H;4>1&I6~O^6g2;a5jrdOM~5oZEoD&ISLJaL;)1+PQfgRCGz zMdhV`b4~>k6yi$+5$fRX^)QMLRkW2UF~q33^DQpb7SC%?H97!%GUNqmV4;M;3gna3 zNsaxqirG+oPAHX5gYT=dEdqA?eX6!x=K;|HPVpQt_Rxz(fv#{7IeRS!qu8@;2qt_E z-RRbsr?729LnP$}a2xRRbt61C{mJ{E(Wh?~@zM+0Ia8((qvC2->gR$CHj9Xsd0xD? zX2dE+rfBEu8JQD4-nIatr0lkwE`z`JiRI%rV$P7E#pI?*M{Un>_|=R3TRA*C=Y^NY zjk#Zbdm`1lf6suni}8-h9I8P~Kpe{9I<}`)pb@SSYK%rIZ3>Ijb>m)B6#3 zw7nHy`DgQMAI@yalvjPfy(;a75&U8sC7=)AA@q!P@ZD3yjNA-8$$$!)o*$tUGKtK1 z4}|&Oa%M1b|CBQ`NK+mP2%AX5Cp(MHZsKByCH4T_!+0Fediy*G*Al3lWRiXs!+mRw zcalniWXd7a1x)``6!_-=J4XvCIHRX0Q7V@Mr?^nSAeF{7KAr3v=i}|fIEjDQ1h9|A zosd#JOrA znR`ExLw&T5?g*s&YMlFIKF} z#dqrj@EO^rlW_Y>>HU?6t6j%OQVZnXjmh#q>q=qLU-Mh7^^Nte_Dbf~R)r7xX12kO z5vJ>$RqA}wBHt%Ce^g{e;|uug07)||LP8}#VE(y5gf{2#hD2>G`4`CxbDU{ z&}rB|-b^MQ-8cIA0cHo;&1ZTg+9O4r#C1Ra{nwNA+jhtXy1}< z(Dax3Ou#M3)bVQ+bQ2R932!eC6Z~W>OQfx)#Hf{;M5X%%@@WMGsPYMnW({@B&VTWQdbD)&|V zh3$j{xkt#)3TWDR9p0_f>fQ!h15N$BdLl@Ww}tVI0J)U z&OoIp$AnPZ+qM7@tgt>%gr78yGTHz*yyB{1mRV8xRUnLxs!;-l@(T1P3b@S%Yj+6x#Y zEDXj|Fn#AV?7RP+6DiHfSAloBk)RWr{VsAW4&p8rst-9I-{%RAXE<^UTHF{<6l1$< z<3O-Mtcnp6{4Ee&J<#L{ep|gf6wCSHA@0S~OmM$HlEUEM=8E_X6CYh~=>$LNb^{1l zjL2?NnAhs6W1p^m6f^Vz{(W-oS#gGx7>x;-nm=E}fQLe|MeBvx*TpS!+t7fnbNHtw zXq%G$36(eK%_yiI zy2f0Hd(=@8{NMZ`roy^6XGxG){pK?*GBp<0vh%TFFn`D^?eL2$q%YON9-xq(~P5<%LVDG0iojo~W zNWWNv-S8RH# zjiN*4vG|g_hYiVwe+i8c{o-A8a4^|`FMI*9egiravG4Yj!m%s@wXX=~7_*Te5RK`~#8w+S&5J`8)v zTCZCiBmYyu_UG68Uw(|C0y1zgXDP?r|LMm1FaN<`E{j0K3y|^9Pe@fQ{7)g>fAju> zZ|i-E@98}sU|Ig}|C@h)XOzLy#i)cE-22OG`ft8^0b6nqz+>xiGW@@aw^+vk0TXt6 zKFoi?mHqR@{nzVDRudfaR5UYqPCxu_t{dmmV+r3aa~SnsxBP!_@Bekn|Fz2hXA9>4 z^D5t~HM}ir4>jLRvi?su^?y8*{(}#E7G8s0>VQXb)Bmq0$v;^Q{^2?J_aB751-n$} z`I_fZ>!gK1;5 z`lIx}|F!<#pUG53&m=BW<@Yp7|KD6UyPzlH?I~>%`v2yoQ_X+}6U|IyMiS+}zbyaH z&;09_|Ld0j&)&2DTIK&gT;;!@mEGHvP`qqw)sL_1{3)k-F5suv24fUQ42CE^W1kCx z&`pZLGx1FL7pm2pEVV$Uu^BPOcPmv3{=&FgJe)tzxmk~y-fRL;Fj+usQuJAdVn}R; z25j2$wtJ^yYuI8Hel-H;Up{U9VG}-2R?SubRM`Pp^729Wb`@OPU(v%R_Kka69{?tQ zCpRI!=sDk2q<>mWn>NuqU z(nyEH(I(4llrqI_aivDPjgn@uCA9zG5lco=;d-9G18V!i*2^WyH2cvxv+~x9_ZVsY z|9&qgA(s8LU@p;XbHR+(OSfn9Wsx?D9L5&Lm$Npk&*Z?`>k(TPUNTp9KMbBp(%*)k$WO)bG89T4gl=xk4<@W0Tg$k=l!{t z(dBD6B8RzlzGrI*pK!rx7Y=R{EeUa;)QDrZJ3s&!YvXp@B%aINj^v-LiscBw-RuS0#XTzXO>L%j@T{3ew78 zhXetmh_D@FS<@J5rabu_ndLfOGl?ufWF1OQ=*r59rOS*ZMXjOXo-_cE40j&kQz z$}~A0e`n>UIV{isM9mVfNliW1#gD6-iOrGa9RvObnd3!1@vtx&v*QBf?U}}h!^a1i zQmt3Ta6=C0r7ETBoPVC?L^4CJOIF%ObT1+s!lGnQeb zG)C<|c+9Y*7+9d&u2zxGfsn3fB3E+P)7uV#!Pj;5xhn~FmzoFQn2bgT2;o(^XQFetyP=|K!~$ODLID6(#w!#JC2aE2ptkoB3N zWP=6rp-k`|yb+Y-RCWovw`*6q}E zjIxRvIs#*#f>&0-AsQ8r*+>kspDsfX`aiZf%&?FL$81xe* z&NAp9LPV#sSvI+@hQLsq2N4mj15d;!vYD|%;VTuP$(8&7FW>7=?T)9LbZ7phiQj-%Hd5g^FDsXh zJdx)W(R7ZOwXs(a>i!3v=0&}^x&4olY7c`aY;!ILYar#i_~!C(Yl&c+tq)nbOtX;k z_k%*~;{#k?`iq`0D2l~M{A+L5t+dON#piu^;9HAFIo8hH`y2fJa&kfR6=0UM(SxyV zxRg?lpm7kykM(!#Z?$4p!{IYPTHO$L3TG7J_>|oH4o3yemtzPEWdsZsX;k%XgZOJS zTEXn)Nz66tL1oo!Iw@KNG;FrF6)RZVX9pe$MSwu6*`lK3p#8~xgsGT){L<6&nNo^pQQ_rOnBw=lD45^Ek zBX`#iaNitcS1I!G3ruY0IQuR<9sEN0w9&e6ZxcMSel|`%MJY6|g{wWQkzOOOC_T1+Q7H#VNxCE&Zz!_47JBM%^g2`rEK*;!(bj?OZBs zh^u_Jd-iX=w53v=!NP+mtgpS+qwdS96%uCr$ z#R$~|0XBa|N~r3VxpFxEl0{`lbFK^haL|5?w;b-UpR;^qZ;l1Om}OMD66kV%>|^WH ztthQi`5U=Ocje+u3(p`vlCh{onZ)u`6LYr&up)faNK(dMk^$K$ ztuy3`a{6!^_Qx4@&2}MXL zMozzY7Tu&S-QNV8&s8;Y?bl4cq(C_C0!TI5EY&<2{ z0k*rIB4Zn*)w1TYx~BV!J1i+b#8t+X6>BeUv8?B-r(?54fX%8%r*TnazyGv5cEX7n z*6?)qX46i}Z$ov|+B1m^uwY+f)bkkrisG(Avj52-(M~}dfCKq$R%o^!Od>)-&G888gGrJHGSY7w8+_?D^w8M z!cnG1q_)UAH=ItagHF;dwnBYB36f}ta97?_rL zClMwtsdn*zNFlvxBc5%Z#Li_c3pV~|Zj{airn7bww*)bS4@eG^yT5JqO2HX%$OvpE z>OY*4b3NI$e!NxMAn9{0*v5@>Ine!ejVkzrB$l>IKX;fHCFiB&qGBf>QP(=%Cf1Yg=eI`$A?`Q6C*2*80k)W#%1uPEf)s!8zMh{V>knK=ZW^ zKCy1E4KvMRIgf;v&E5-jVr>2|B7FlFjx_G}NV9tnnU{xP^?Q9ki|(aS6ilr}rL=y@ zAWTn1W|-bG^V;{-!_CCE9-9+9v|d5{gfLK1!J_Px1Hppz8*sWC?Lz#%&=|SFO%xZ= ze6W){Q*u3wZ~xrM3Rth3RpwZTx%IkKl}gol4h@Im3-!A7!>PLH}H3qWw$D4ES+t6#V(v7DKeV5%E4;1pY%N^bhCMS}&tinYC^YO8CH}_r zrIz|OTzdqBs)az|y?xx}(x%o7!e1QRYW%3>vi+@7C1MDt(Z6Be5GKGa=zG=pmq?{q1$n<*LKxlyG+-Z zTjDaz#6Z1Ha>zxHO9a~V<<2za!CZ5>&+!>7$}ZeZS;N={8^Zca2pTkvA1gw&cc+$p z@~I;_ax+46?(gU{hZLLQU9u2YzdCZb3BS80uW*5{qGhjl{!CtQtWB*w@X){(i$^G?tD< zasYX7w*U_`1biZhA@PBm*I<@xKoNoY$spjISn%Z+`06?1?HbUWjboWon|10<$5P8S zWB_4H$|)$W`FV57N~E{G`LpaXRj+{P8|FEVyQ6M!^*$_H$Y*#RHY^6H&$nQ*kJ&>BZ}I~q5iMLsU$lyE-1pSlXuUGpIV8sOgtxiX1d<@PZ?uxJKPb0P zJFN+@qS>q#zP7Wprz~T`Vl66juDy)Vg@URwA^!QS*viC#Boo5xY9(k+*a#s(3699! zW`VCfToG(7a0_8N0^K~Ak}5X4h!8K@GByU|*q5bKBXND2oqCP56K2|l=f*cEgS!t8 zV`TW4+NZ(Rlw@77>=u!<(7*{3eXOU{10>$HZvT?bNIml4(%AcjCnz94A7 z`LM082x_p$!0}p5aMl*>sJZvEqCaT1ASuqQ)d%{JF6e+YFQo;8MePTxA@-xiG=@5h zdt7e>A|CmrM|1Xr(O31ncfIIoK{L5u?zPw4p6u>hH6KH6yqz9|4h2;7(No^yUUhQmK>$70Vl=aUVtD;YQo8)^28#{}G6BYCT`OF{)k>eL6C zH?@H`N@F+S-84VhEBO5%K(m>m&c4%Q*rz9!V^mb{K36en=Y@OZ#$WX8s&zJgpj+*3 zMz9(5WYVGzcCa;p8FKJ=ZvgW*Kr_b`$QjWV!hjvmHlj-3$>d7{y?l;HnM|X*r84f=znY6Z{`Uv`MGXDQAoWw(;ge+`;-X%g5%6QXJg5D zYPEM7I*pFW1qF*8@k}O>%YL)x7POOW=N@lM_3ngO%2o4QH}2kqqb;oM8GL@g#BIcy zCZjTGg6>5MYLr)46@p~7C^|P<|HvqL4OtZozrx`Z{>iJC`tEaOn2;FRa^_h$2+(t> z0H;87!_kD~l}Pp1h0P(@YoJnjdnmSq5T2^@XOl!b#n^_lW%BxvlT8`=ZNydj0xFq$ zFYy`ck2lwm^-5C!$0Nq&75QfUteqN$pDocQ+zn??lL13X(@D_M#c}Py^lS_-m#%w- zUrN!w$`d}{U(#y*?)TVa4;cYM4|ms!#p=~qqDT&??!piE==OIF=fin&o5XM^@%Sn( zvi@7T*LMwv`ue?Tcp6#!r=GE>a)=w>IGvBmxy>H`(HKn_^ZS#dS!%1ks&)Uzbq=C# zIUfvkUyW*?mHE72rR}j8@5QD5e1&+P`C6%vIJ+QKefes-SnV=?*>cWR4cF=IUbOus zgvXlq(f)EaC#8Iee{3_I;%(e*egcEXdVux}dh2sIQmL#Nso{G^l81OycQ=R_M75 zK#AD(j7L)rHDN~e`u?;VD^n8ca;$lf^F<5M-fPeoY8kKNi%4Sd40x{gExWtM2i7j7 ze&;VDM@z2T>a@-R(U7(E#koH4Ve{sUE@>`m+^C(+->5yK^xL)#jU zKbPa;F-$cNBN9p&Gziko6Bd)yb&Ea+r)TP%c$%xZS$Zn|arXIFwjmbh$s? zlG4M({H(~1^`uB%y{}!hk^`-Q9I?$e3x(6btd27~{!6U_Lp%Ln^ve<}=Efte`a!iX z2qeZ80~PoPax$6h#MdPv9bBra-sV)LpfhX4l9G@-*Mo(K+udCe zG@y}Sw|wFMgP{~3BSYqCxMfBE_6dYBPi-$XB6o-qeH*QZt_ZfAI3X8G1qISv5Y)K8 z!5Ounn>=ENYGkbZjj7?QUhB`5cB2aaGtC@gF`v56#y9}Q9_qO9`}cT%+Teke_}SZ< z@UD}T@bX*B4e7x3qP4a4L5YV0b=F6{j(4L1pAe(K*Jb4Vru_SOWQzMwxlcbAe+Egg z*+A4V)>kN?-2ZZ96OW`qC3Y78nDsq#(L*6B-{uo&_2E^+j>)okQepzNgjU@m7V{>_ zf#gjW_uccnP1g&!;6z`O{wTe?_fg_6eU$66Ze2X!o^ZJucOH+E zW}cLsPpLcePACXsqRc)+iOC;N{obwT=xzDuKH`Cn&7;A)4mjN8W&bpCj=U#Ejh=JV zzz|&0JnnF~yHeY!VKJWiLPQ>}yz6wlkS%J{Ng}A5A;2Eb3FTRslTNI$aRf$HNg|&5 zw)q(M#<}?KKRyKh;Kh^CqZ+b{T1AU`>O3JMgz^fytO6i$mx)@2ICdf~+3u~1IQbXE zzer8wfg0!C%GE*|A;Pp~8CEzAOD;v9xwa=Mi z!$VfJTyQWkWv~3cm1=AC2D1@X;_FuVq*yLg*=y$FRFPY44=AbsvPZWXhmG@T0hYPr zoXRK{h+|;12#$bAq&F8NVb?5#(B!bEe>o*tJ+G_UdbK^j&y70GDFf5SDRt!*-X)tH z4JNPxW+fSy@B=n=a8RX(?j{g)58JU!k=72f8j$wo(W1Q-MOA!<;=ztSxz6o+CA?re z_bF0~XZ2hxq25);uRL)i=3=YeE+84;2EPWsJRh%X|ezfq0IzB5T> zGPZVa(~Z$rFyzXbN)~gZSp<`gnr^;+IjL0cJrbbv@3cmY30d-4Q8FVR{gkNwIU$^b<`JIZ~0gxXEYu*<#LGis(I;GUJ+*dQeR{VHLSom$ zh#FwwQwT(8x?qN;lde|AJ~7x5a85hW`&aI^-*vs_)XX)TDcV<4sXpJrx^B_=88RFd zbN?_S8O_6Dkm1m3uHR~rsvCiHpe5&*w=!x!P?dL}xj(f_HEUgkQ#WI2^|A5H(vzwS zV9P3EkeOVW`0qs-b0l)-Rr}eEzo;@2t;`G~+FwZ4S~(G#)6a$HH3L{P&Z10QR^+p% zum6e@l}@(fc&HLNMlWj>7M1uMlg7qH*GR%Bg9ikMT*ImbUyG1h1?80mBXG*qSdGjOBh2e97Oxn@Vz&e>9S3KgeatgCoAM;c5gg$lX?&IAHf=^ zjUO&7eR&mtC@vQi0=m`CH_13+_>Kjj6ElZ)au}TYU9?)Gs>T&ZG>(yxzpz|FDw5cy zwH-d;m0AW%WJr6lMuM~*ml?aPX^XnH*E}U}Y4Fl5|Mn1BCurao@|R?7vMH^g-*JJ^ zA462it3%GQEHV*{S&v$LOd98Ey)vD^d8SEhaf(1YgsT0=>(W^1R(jaeiB~)u1CXvo zt47i3Vj}jUx0B)5e65G%9&c`C99u71j$8{yG2KxdhX^;f(L+hs>H2K7Of*}1X5FAL3N$sE%JRDJD5MILYQ4t6=){b3@XK4MiWW}Ml`V+uB-H% zLH}Gq|B>OBnxdwG=aMzBo=WshHC)jZ;#>lz%w}T%&2T8Ac7}>&u0?LNEtStDFd!%} zF*zhP&u2w~s~aZ{qlP+S8});lo^ZtR;%dRz`6-UBH$Kb^Thrf-)?i=WBT3L(6zF<5 zQ(VQpIazhstabOeGMN)W<`i){m7j^bqqS|Zh+{5|x`@A2ce}IWTH|3fX zNz~xfssJduY{y9kBT-!5Z{>|L8uMJ6L+nzSFQe;CCylwhY3&cD^0BRD7M2504+VBp zG&(;}b6WhJP&Kdg`Ql7tJd%_m8;9qlK*6~L(+nz!RPbgS#7UgT+8Zd!C5BTuSL3Av zC4t4o;y5tJerbAQoeu0X@65)^TGSgIwUW#;=u4Y~&;_Oy;=DfU2@M^nYHNR-{9D7h z0W7%ZHWWkA=ChTl?l-fqEFg4D{j_q4X12Mg2X&f^=h+2J~u?_!|6tC z4R(g&ZebllN$NFib&ggI!uA6ASG9;u+{TdpihC=eSw6uC0g zRF69ylODH3j(#}EY0G_z0RhH-o9i17n(cuYr6;XRolD+w_8JIZI5}D`rtSk=@_h7r zWkvJZ;-E_BeMidBzkhfB<)@N$Ko^YOdo&`txJ)a4V$^r^>#U~Vj-Zlka7tW_Ct2$_ zedr-NG;J4ETW)_$E*D}q8NnXmwvu!j@$y#z91%^aO5@GP0)yVgQ*PblXevCUa{PucQ)hLy_qQNz@ST%O)X3?wR!QeH zsRvP6p7|jp?^C)qvWi097j-;LtDu1sf<%D1DB>mQC>(2L{DMRV-$mg3+Ctq^q|%KAeF?`F99<%{!P0dqnnd5`FX)Sar->436j{C!!?(u^g#Vf zbsf?x?U)#<*s?>LP2R7KKkb9! zK`+)+?p!MOf40t{u&@7oh}NkV4)BVC7ViT1k3Kr1OS%EHDKiA?J5r%G{t-jpHw+_n z#Z!}p2a49GmM<741f7?!81j>xJI>8=Ny|WZ3yuBinA;A^0W-U?R6i%?O?j_z@Ad1N z@tLozf{2mkAVY8Y2e!gP%kB+nhB~g3An{$w+V4@FzCU`h04vDi}XMdux52U zOis;_Mt&l)I?E(i|5RUt$oYLtHy6zUTSBx=e{?+p_VZ`tjRKWV-2|P2Pw*bO9_`4@ z5l9F22Y5k$XZmb0EzZOHfP)nPd9bvKmU=j8{s86Ru1%Tc_2^flE08aKW6(D@B2%$N z(Ie`ve0ff%q$v{pNcF=BACtGruCY=^dDE+FnBgIb_7nhMZ4Ml+Q4gvBfbYDSO z=L>jUT?HL2;JEE#)gM+_I!tjFQM^eU9dZN2_*nI#B7=qU8uSE9%<~*G`#Bx!+ zz;E9DXoa7Zw`3LbrvDyPt{nu6;8@M0^LT!<(c(R z96#>HSiajJV{QD1j{hftr+;?rm1xOrWp5p?r&%e~sef^3d)Fx~Jir=jEsW(z+Y0^; zF^|nh3|+$Apq9cfEKnM(l!&J?-qX!Nd&Q#&5-cV+@?VUaiTc(X_{oe|sJ6zx zhXnkda|V&^)uJ&7L-7C+9aA>43YCO2z5zBVL!LaFHp^>TtrU3-8s8WI&pJv44Ys2$ z#@Tc7Y_C~>NXz|I?IESeVbm@-tp7H}-oC}YQf8ms03PHJ$S;Ph5+?QN8 zga^%L`0l0>GUHU@w6@wq0E75rC$7f}b?L$(YrnqCTOE~%%-NIi&XsnQ-}jGP?cSdx zo5sC<-$fDrn!EMXehCTFqR%>A2j*j+kvvY9OLn{&p>5fs`R}*CmPmN)9X)6O{5esQ zIO@p6$k?TqJ9{cV;w?g4N-#JVcIJcI_3`^yS5c;cWmXLu&O3`c<4Ch#{}z^LFptsz$GT>UwWm2}(G#6KkVCKYRlWS_3c$QSrw{^AVo0gXY_vL}O(x&GELc zDb>b{Ri>1i9*DD%-gSV<%<)0KZ|-nYZ!#49Fe=crWC@y%*r8W?7+3p<4U%tWW#8r0|z4Y;+~fv zteQ@E21k@i?ea`J=DstMW$m=P*B|8#@JMi3}gOI_&V#Y6Zbl#(DWGj-a||)$2Z?DTO&1g4dkio!Si)o zL_s4?8i-#!_kG2ml{ueRhK|jb7R7qkop1Igyler1$q9?$k#Jj|1$n%=>Jq%>Py}mo zhe8pbMAdTOEDa>YY(0@;B=wG8>-}+5@C`6RtT+|9!7((5$Bi-J`42@^i2uF&IB$}A z*b*%|0?Tkzxn42IG_F5GKb4=WJgS1W*4*bPsHqois<%37e=oe`U1xp;3nqafK_~~_ zL0*mQ_PSqh9^lP-1TMI|UOHx=+3du-E!i2Q7Cq^M)bwwNq+_X9N0p=T*i8$FCs7uO zIPS9`Rbkq4a#1=8G-1T`cg=mDJEMMJ{G_rB6eBzMaF9oDu0<pN6bfr;QrbZ zqz#iNtp?CNE0j5Vwq_Bnz2e#xw%B~d{EGOdJqKy-5hMd-@IAf?+&S{e{>uB?CGR9P zk$iE;hEc6^Q4KM69#)Xx@bNZ$?~ zg&YT)5s8)FWR2Nvthu{Z`A$SM7oERk7h&&~XL_(XQT&{rmlRRUq%s1FXRWdH&{vEb z;a@ZNCn+*thCv{S-Y+;VcoZI%=|oy__GS<7I*|6grxt2A6s!`DYYaD4;$Hk%Xzqvy z5*^u{;ZgComc-_~R??E!>;AmyqACy>AizNv;QE^Eyn!_0o{?c)%s^7&`=?BP18ste z2>u<73?UhwDDp;AmxVtn^fM?L9Zf=5Uj|0eU`d$Q0hWu7aqbxJNNn0(LVC+7OnyRV z^#>CgyJK$O=9I%EtJbh?Kv5~;RIAOR>%W2r^9GidBWq9ruzlCu+<)Hr;>icU`$=PK zgIifS#=3A!K$$oI~<# z*XDEo!1dmsM{3>#=!*=GPo1bEZ%^@;I<1j*dLDRAw<@8wP`uxI8FpK|s({&+pYmGm z1&cvI<$(TW@caVE17}YRfQ4vxC?x()TK?#M_eN*7O%I{b=u7abg#u!2ii>f1efIl1 z-jnHY-BZp2BYB^ki1$}|8+N-rVCP-@En6=Ll8ye(oHmZO^jtG< zxcb_+5a=ChUM$9=$rGxojF;E)QZwbaP2b3fcyH2wlz6g3nD?7f z9kG<&7^%ncs5D%=^iMMSA1>XZG0ucWbF*nzT$k}hW)od;iO@P6t{7%)*1nM3L6rY{ z`VoQ*G315!=8{V4(2cFevMkP!ghaSuNY%hJQz-H3f)O035++L`a>t5&!dQycbf+b7x~Vn_qnR zgtsq2h|o(CNtBn{bUm|})AANdqh6|99k^?_AhP?5UpWqZd?Eo-M)P1W6%#+ zH^rx|$ntSL34k>q{IvU4)tv;a2~EzP@u zE2D=hkS}}CM@fHhF6OMV@ZF;qr7k_T%V)rOMS|&r zkQpz0bP4&$VM07cmxM5uhe3D^7>E;p1kE;PE zv&kNFyK#Z!GBf+SQ#o>*UHXzxe%nM?Ti?&$*5igsr<4Uz;Dq($)WjoaB~19gw|Mtl zHpbeII^`y^3MMtnLE$nNn9P`z*#GPMOHjH{%(${l%A3P8=8;3$z}klM=2TS73UgLF z5BZA0sWB?$=N7-xeBZM?0^%8^zN8*kjaHGPy+%UySX?tg8YGphL0upSwa+4F3hyS1 z8~r^f538}yK}7n*LWyKW;&?Qtr>Q(o8^g-8o+Irxb;2R3CMWq}P*7b|g5xX{E%A4BxdNhRAhk2wHBTwgS0){{Q2t7*ILQ{5KDaA8IiWeq3<)@FdD?uV-%vR>)L zs-0~NMV0SR@|#lS6+PAVfW+fQ>ap?EaZgRYoS-~F1U`iZVGc>pr#kR${D4BXggvuh zQgG0<{pu)-PvwbQ8wbxkNcc`Sq4cm`%F!od5c!-I$5_7onZ5ENK30|tHBssv{m%^Wj-!<5=@-OqG+DxSh`j@=NDTT`^pp+_Rd>hQ7 z=KC$*WHe4veLkHQbW;wA#P8ybu?AQp2)zX#fI^bQIZcPHhtU8~m=`3qB=@&yb&k%{ z^%!8{vmkz}HdC67cjGuufvpy=+bx=!?Leiz(-L*esy5xU4(ELTGWHG5^Iq7<8ytYK z1VvD}bVkQ4k`yE`*>i{TS~ci3XR1|F_ZUUbio9@1nRT7B{>@vg2#`$Yo*;w^jjd`Z z?cksY`-$a0+E|P`Kt~svwMWWww>td5Te1?&Xflp66`UJuefs=x>^A!@=#-!czOHys z%tbn0c@D~cz$V=yEKmFH^y9p1!L>_#3aAOpHyE(nc2!Mb3$dSB)hw5PO~}{v&dago zjqRe>kEoA8;7U4+;E4_PBp~W~&_02z8x)H$Xf^a!xbS$_>2->6&5*Lv#^?Q$^pl7( zQXk`pc~jFTm<8kv^Qr*PWskm*Q{=16mWqu0dTwQqb`ePbm@dP${?}gnx`?Z`Y_N*u z)a1(JExoP~Gjsgd1hlT9THkq6?hk7Eu^{C@xP$@~;r_Us3I&U1x>_-B^t!LGT zm3-1Fz9M*#nu!MbB(GXfe5$Kg1Eb&ba8ml}&)xP>8j$$FmpQHg?Y2QSngfIxFN#*! zx}`pQlV{F4sjXhOi@r`k({QoH<%&Ojbdto-9oMD&!Sck}BXW89oUOXqpjU>!XMALd z?8L4IHCUK*%dH;NRI1809GQkbBOeP1m!B({Lf8x&$+Y3;+BO}Yma}EcEO=M?1lg*$%Z zU)yRxov=0hm^1F_lbv8l5e>zDo$W{J%B2CNsAsDm-JfbFMj(ed|N<5VTszK&xJeFYGmNhTLB8z~nlN{|lxXkbv(`{YL^L(D^q$bB?mBarhcNNEIn)qvZQEK?Wux zlqn3H<&1}4wQ*Il$YmOiqmmym%%8=SL6u~mBNjRihZlqlVJbs^UJ&*(bV-=^YnS9F zi%e<;R7jyL^hEg3$^_L7d9y1}FeD>s zgQXp>t3EZxZ3CY=Odex5r|Vb|35ecU+KYt!WO^b9CVW1+Nd1jzm&Ksrk0Eb))7XEB z$Kd=`11Z_e5qQ?r%z(&YWPN|qG7oOQ(=SszVXJrn!O{|U|V35lhoR= zyA$(hVc%+=32gbK`9(TcQn?Z$#(0fdJ=>1*M>R6I0m#&7k%k?&aon+Y-E*d)2IG&f z4?t2Is-f#C`ec|ZL*l-?N$vf~;ot-R$xT)Oo8^I_gJCzNd#2BI^=wa}5{`@;ymo__E*4VhunqY$OEn~b z3i?SLEW|Q3Jb`y_GWU~N5wSStIEj-z3%lMk<7`sG$*~UzS~QYS;B4@O#MV{poI%>0~w00Nlk@)?xIu6jv?p zauyAh|DQdLx3ZGd{y}#SsJvW3?PI*BO@pT5i+pL6)%N?s!#NMd`aa%VKLO(*G(PV= zR;c2ngO_3Z!cklt1p-ImFD%eREBNgF7sw(RB5deY^!YdL%%JhWG&c;@G_H1Mu|@+AK{b7iT0F4!|IWjc_f1w zFPm6L`y0coeiaN^KNR?F*_0aBW|M>4ONFcl$F?5@U~JU)q<^fYQ987`c7_wDD`Iuz zM){w92#j+Z+szhhaIV^^)mW&t)CHtyh%WEDwZrYw1Y-5Ga;IU;NSp9*V%V11yh=)7wMM|bBI_019MyoLnWyMz|;EUdI@qg(0*rzJp_E$g<$O|b|2e%?6ZK={> z2Uo2UB>V~RqkqjnKz*j@dnPM15nq7n2Wi-E# z5WDNl6b1~d03pt6>&9U!q&GCLAQWx|3u)t7pwa)>H>-o@-LC59)5jQZ#FYqetr!stQ+kS%b~R!W zZrdu*<(W;r!B|@*XR}TR^ZsO3(=K`57un{mxguzwR06C48)S4s+47{3*?9Q{FTeyV zG64C-z_`UD)riHQDoDH}*L2>K7^+KObl3|37hOz63>se7#cK0RM!jPf4vbeH%^fQZ z^w4|F4x7X$m+E3H>z)-5-E?(5mbKNB%BZqo*f;_YS8qQ;HVl_!dO-Wz+q>0gl@;*5 zrMsRs|2z!ct|gY?3LW5Dk*5106>B1hvTJG4znL4dSjWj1mNvwWQIA zu{JNC@AASg}^_cjFrgOcuwwR`&{Q_Hj2aJCp$-%{U&slQ4ipehdFk zxm~*AvdbSyEn+3OBtz zxAiLt(>6^JjCum*W|eEu@M|V%!@O^DF4&SajzmyMq%z=y%DeRrb`-HB8DmC`ldeQK z+-iVl8|2VUvG%?x@=Yr{%_S0Q_+M}%-cJ*wtNC)IvA}xuC7VXeA#Ek;&mS=n%1lPP ztt4BuNaE@62Q@R@*5E1simF|jSb6s|bkOy8v$5hJ%PWFba|BC+{yTFX+>MHIDU?C> z=9CnEU|kFcX4vhtxpvtyrp(5GCajA@ylj0M8cXiF zC;WfIAwUQyV^XoF5kThLgR5Ek24&ACynG#WrM*YUEn_Ny(^`*OK@|n!Z(h8kSZo`W zz-8LRo$Os_@)`kAhCB(1V$=N>)DS0juFc8@8NLVS1oPe5dS-AMJOW@;HcXRgRwb4M zUopM%@a7VUgolygJywJp?5`=R-h4k7=Alb;WKFh6DYjH9!(ZLNA7xFq*>E2LT%$++ z+gn}Eh?WT_j`@MEcc5uF*=@kd%ijrfCFYybFB&dGEugs-QK(x`w&)KXJ)+CV4vw&8 zBJt`uQUt48hN;9N#|Axo7&Iw^j)KHJ?I5Bp{W14mvgPj1+~if%@F(gJN}c>)NZ8c* zz9^}zxkCvbwKU40xcijeA8b7x_1|v3zaZV>Dql1!3C6SMFWCKiIr^zk!2OQ?ACpC1 z(QDzg(@ylK&0y}0=9RN;6>4Pafeu;(7^F8}o`=VCc)>IYw4og@nuTH61oMNA;O6bJ zY{PEj7?=ipFfh3Qq`GawU>uiZPc9&WZ&O|Jq71z9OLnK`}RuwyO zE;uFlZ{{pEInZ|Nw1&jbUx>JumF?nKbTR^M`#zfqNE=S6^hFlNpixVE8yY&siXLM3 zD-4Kk>{htDq+@fB_J*r`>j0Z&Ca7c=DQeG@9!aMLhDFqC3{+|ZcsV^-e}khJAmf*v zkEjrUR&uSWdhjOOFW?9zz37W1lgaD9XzN6%SAzmRj4#4XO17vRoqi`|N;oUqH{-28iu`UwGKY5MFUC}F z&EEu4SB2t?{SRg{Q}I?Y(VZI4!mx(ZURvewo0z8hhSTad9EclCHv zOPHb@LfIJiMM!T|HY{tyF!E8m%=DrVQDRe0(ht8ItuEsi%OP(;kAuz6E zjq5%sKRgJz6TbA}r%AGDQtSOh=by+0Nh$!ISUc3ye24X|qbYizrm`(-RIBW9-B3}8 zx5#?-onC0feqa0a{-p{>jH#Y;&Z0nIpvgDEq#q;kw3AEBe?gt$^yOV`W z?UK6_y2H4rThX%kc=Nv*;qRct%{nqBi6Mdc+}|l}6xrd2#?#))gN(!NHftyKz9zB( zd+oAoBPG7;)i_eht2z6Ub%&CwEx*l8>^kyZ$dU>0fDxr**EGZK&olXVx)1&*{kGe& zcxZmeoqsL27EGY9fhyYfSNqVm!V?eM1*CI=O0d2oZ>n>d1!?m?6E;MCqy2g^8Kj()Z_ZF#>eeW)CAhCJb!)Z;dsd5mYq9l~2o&Q_$dWJ7G z$i2BVKaj|2%HefVJb?pt!aFgQR;EWn3DxgV0RsWKT(?*vy@TD!YG|pH|97^iwpxUDP6;%`cJHc_-c^V{f z#bLh)_0}2+C@CdUQK} z#mS+&!_k=>0JXb0;$BGJCyoqBJ7t4^dE}X%3itKH)Y(H-GBe1)`Z;s69Pa(?6DH@z zfS3J{2QP?$wZu9Ag}5I`i08)B+CvH)4&#gzXgW@COyxg?o1<|fZeJHW9u(ZG+^_OlG?_wwMNr_IZT+V_czaGfrUh*HZOL^vicmKhWfmHlFEXEMpQ&2~-hX7L;qM zx|;KX(oXL-&zC;KR3JD(7uNE|RUU)Wbb|7h!32%;N0@rKesqXc4A7WYSO&W#vl=Au z{9W_em7MMT|Wo0g_I zcYNc`RZ0(%yrnQWolf}p1z0A@ySQNPIG10(TJH%l!1ng3NnA3uNd)4Yn@;iC?3mCa zzxEbTI*=g1^GPtN2;tNHkD19Y#wQ%4 zW+hHB82eGb39=w0r7`8VGha&8NdDj6^D<8C>=jm@t^r*pOo z+Sn>*(dTmU5ZvL$o*k(2@D|(te004D*=!w~H@KFgrd`@<=3aQ(4 z%v36@8F^8<=#Xu1zK?sn{)3wmZ;{`%OT)Och4z)PwBF4dq0yA(DXLh_2Rs6<*Y$=Z0|Zy^Ucom-AHWg^XW=+?4<_k`z0yhBD~&ufls*Qv^>pkh0C+i z_I9Mri*>frX?1yW(wk_*_cf4=DjYBN8qcgj{ROE_<6o_ksJ4^|X#oHFY{=b1l+QQ8 zZvQyerLRHnFwS_BxFAAFW0<#btOhZ-^rP&MGv@H`a{*C1f%Xn?gK0KuB-DMOcN~^Y zzCryH(jQ;ndI&8cbNbtUGkLgk7%5`V@ai2BcLHtnx(3PF4wv;J^X(O#`=aN>vQ> zSGXrP^}A{S5Ww=ai%1I_rhY>V<08+jTGL-ss!1EH#b&IXM2er3`7`wy8+6?;w27=Q z#Tqz-;(sF%VVqJrS0g!6ikpM2BCgBlG`GpG3Tq@Hk~Qf5k1(3VUo@i&L(3`BW{jI%QQ7 zOsWd>`tT3F%koZ+A(LU!yQX}LTpkc1&v?d}RPFz$1tXegX*Jeg5yU`O-A-4<))FMf zzgOH#Ts!N5g=R%|OV;D5MJx$ht|evC%3rf3Pdt z?ex!mlGnfeN$Nu6FQf!cd_(YX5WPGq0R-%KEbGJ+;opdRYps@Z4#gSE8yZV3noj8H zjcSfRNx|0~2ePbaTJQi)fK8AXQ6gR8S+fg%op#}g2WCs6y{kbzb75vE&p_AZ9S#4k zv&+v2BQ%jyD#?>lnxCAk*cxCv;Ibpr-WRJsb>m+r#+@pDLAK(*7T&pR9WnBeY84bu z7NRh5eM;WKN}<0;#4J}SJvjjVo-)p%N#B0?e>6)f{z~#^^; zfJF3k>oOGeM$_ac^u`Roe=m5s9coe=Y1DI3aenMraT!69OU^`)?jK4608lphI%83Y ztqwkG67RBqg~Ip#H$eOmPsE~|re6KtIKQVbydFd;fg}%FzfO?XlJR`g-GQ#dtD!TT z&@*JrrJ41zQd-0f7V@!r1r-xVs$bs$x*3Lz@Rm6*O)!hR)0V&UmCq7l-W~}!&>_yu z4wKT?N%KOsy3=AEYmXq8BFE!y|I77^#wnL^Eu_hMRl6bF#w)}u#`X_9?^+Mis3FKr zQ`WKiE6-o)9KcR%E^8k&56FoS!t1onx2Q0gWl8e&@}k$5%&iFo*z_tbM$FbR`y*!{ zP|2cifWy&e_jY$y5rI_oYq3_Nvim92{_N*<(RryEQz2A~r$JAvtoKW3WfP_!pS;>EpH*iPtAn^cgeNqa-`?T9+z^KvF zC|3}n^x(x$MxWk54!SVjWv5o(M++1b?E%h6X@hg zm@R=A2Eybmy>b(Q6OnnVYOc1Md;PrMysSwQM1AR8eF3K}90zKz{3ZPNn>XwFe;>8% z$9)g{DMMLKL{pvhQ!aVi&%H6aUK4Q$fnbsMOu4O3I8`h;_1emcz^)+w9L|L~TF3h` z>kMfZc&8d~stbP9SD0nu%7e$0>ngT~GSaxI#If0wC#(mEgh;FF16gUurJ1v()@C#k ztI3%(GhVgWWUYQK325O-e3LwWm~EGOxC7IcI``i-VX1j~Cz{HEpt^tq&5PzX=!O}i zp*Zd%$bQ?xt=syzyvpj&emK*tt#QV!dOX)6T&&Zoot(itOzsf@T!U_kub7rbL9~MT zj!%+?a4$8(VGO;B_WI((_?OhZmtEZ>5Y=#DnzYx?(QpmIvc&V?jyF~T1AuX@s1(GS z;Xk+Y#6h1|U55iNTjN^k&SuohVewpGw99>iPA&^rv%GQ!_Y9`QpSRn1mi;?QyBhwR z{l@5&GI5^m*FegC;JV_aCo_6rSLCZO6GgUuP zJSsWHRtx}GJ?8;8q)Gu`ysNfZXWy9;!A`{xRImHPG`O|4wACNx+!DlQElwX!ULuoh zJ`L9FJjauN+Q2Sta<_ONZt$99SG$BU7nB83O?Yu;9ijn#%YH#szA7S=9L=S_hK&(b z7RsIfyw3rpOTs;gbXAC>C>8&?u{AngU%&``kpX~)>Q}4N9Oi)R4}5jjES3*XJ9l)Rqlv}DjY;fB;QeMY@S6BG52d}42L zr#+~6k-X=T`6JXhR@V}LJu6aX0J>Ox+g97z09&G2AHlRp{r94L*G2+Fo{{4S#@dG= zlg=CC$OvSn-69jp<}l7q^6TXIUcbWbYVXonhLkpzl6`m6^FyhWc}cL>Bbo*7Q<2zL zT>cxbAR|(Ost6yA#;Ns8o`xh+74uQ$n|Gb8fLihJ_KK1C?Z*##a3|%xq_X0bvZ*Nt z9VpC0c)4Jb+f3#WJ2qujDwsN|U|@dPfm;;xwl@dn0$odAHtl^)CoM|@s-eP81$F&1 zp4sM6ONaN!0DrY?X=$$6h^9ryAFB+(Konx(eTS`gFlB1C(nJ$UeY}l}PXkCOj)N3u z{Tfr)x6=Qs=<^Mf$MC*)n#eC}WclOVZ`(<8ohPwL`?Ua*|CCKD56N6w9v7#S-UF}He>x+~!>ow1ogH!cj zGhuLHa~z9E+QR}TX|E;a20Sob71-5_QU#P)ht)Ge7aru8)^XQX-285-)k?J=33B%T ztz=@^%oCyA4ew2Cqd80~sVqz;Z9h|+u_aDajM(QgX?D=EPArY({~J*nd}}W*RNG&? z(k!@hh;mKp&`R(mv49H(5+3Pv#&2U3L-zr0!3|O<3eS8MGxrxdf0RLm71{TPHswoM z{lg>B_wk>F8lu2A(YHn%lL$5Z7{68RN{nc)9ig`X z=-Ce(TwC1xTcEcxi1nFej9qZ`mRIo-%midV&eAb|z;e&_sFmrK;Lea3awhX6@LGa% zK3arHfsq%=?Xmcpa4D}?tM-D|;s(!1kQmmh^Y`np#G``|h65xR_$&6vzUAIJ=~seC zu%I7|@k)5rcF=N!>$mhOqrXOGw?T51931S`mrfr^0B%^=n*@0LiYeCD7G{C}U@|iF z<6$?>b2d*-!EU&6ghv5NMSOt&HSW_BgdKBSf@K3IiX8?4&C$bWH^nU!*WlVAmeMs``=>2?iAUv)&c*WcSv-Ugmya;L?28-vg${1FFjKoIo+Dq`hJZvN8Qpl^LH)` z5f3sRzlr~yFh~EOX;`IA&Aj^T(;|Sl3|vW%T9AxoBInJkfI43jOx*dtrN zd(P>1UBA<}&UJqOfBofh-Ja*U*XO=J&-?Rw3(1!`jec%_2aTyV*D2egxb9Y@c{--r zZdtMi6Yo3lT0RobNL#+!CV8^=oCrCTe@;|@{>^+4$hYaLdNB-e4WrLxlbJ-L#4h_9 zS-4%bi>a3EYNyTAI;_G~t$U#1c1yPP&oU4AjP8uSVU= zq$Gk6JR24=W?;5+cC}M}(#b*fSnA4dB1!x9gIgd`R1X-Y z5%=_L`>t>>x!(e@t}YBOvn^1c5C&W9UM*(t(<&O@U^but&gX=}mwBWcV}H%WmZCv~ zDx=M3Q!D54pNFnc?L#j@)1@%O!wzegTt^RjVMz$eL`Dpb*Q0Mh5no!d%&zPb=)x7mkP z#5!(~Py8Xk5-~~6?#Vqk_a))C^B2rufd5jP?Y~;uL2@9rlQZ5}f0=Jf8N#pdjh#H8 zYlUiWNsV?o0?R|Y3HBFS4nkor*6N6W#nGHNy`*lc3F2VmbrUgX#Nb%T`>A`pCz)tR z2R@DRCZs5?)I@m*0|b$2Vl^H!;-OZm`-VyuMGd^DHF<`1|C)NjNvTbSCR*-y-^fRT zzU~|bIyj$?Y2AxHSA$S;3Na+UW&KNB-Owh+I4_QL# z-ludBcyMCyr-R`x8|qKXmOh81ns_vhiz0T*Dk}WxO)(mpN#BVOrI`o!xq)q6QmTZ! zG9{&9kq+O9;E`uO)N1f9QPl$ow(d8$t^`d6Z_HAim@MIgcls5TiRYrWvp`_3Hyns{ z{#s0rylupt7|sF|5HHqOQkGc85~r{LfD5xonzZw|T4B3Gk5q)~9dPV>@qBN$?OHJ@ z>8XDRf?wtws)?m7+5C&&wncQ-@f%*(<2yiXjk<$dJ1!Op9DUnfvP<-3TKda?B!XSZ zB&PwP`#1BIwq|#HB!*00Jb!lxyPx_U04%+$5q4`<74g)Av_H#;(D9K!9xw(!H^Ue0 zMW7IkfnqODSG6EbPWKQQaj*_deFhb_yehC8%xs{3^VFQU4kou$mo-jk33~>CZx$J1 z;+XVqezaTlvBD>_eQUrj4J9_%P&Mza{iK~P-_SM?{;c@kf)c^cE$l&W7W_PVwX5sU z1^le++I`jaeofct^uz$jq*d|wNd%|t-Z5=UK0#*S%35TZC-|sm+n!W?1(E|{hfY=w z#eq)*ONiD*5F1(-y?BXGG?Ifn&rgs58LI~+_2(AW_Q4QGr8VOoJx4p#e8=v7wu7xo zE*IRt|Je}z>dQbtMI{CzO@!ZI^7Z1-F?Z&@#boV$>)Y-IZ%JH{)YWpcXdxkf@>dXE zhlO%v*)TEhF|yaTRrzQ;`hL|Mp+>sS@T!1_Xg>%lChw4rw*vP?>nIh0zo}3;*n`Gim^R>)Ij)(;&pqkC9@VNKv4IO@VI@vazlL zt3@70%5>Ss*JD0O@d}UA9>+{Zn&d4V5uyeKHiQmR)3*2`tXkB*pDOCHTp3#^Mh_l~ zBQV82K3B-8BRY$+2stI*+88(0p{DJwQiF)ymMn-g;wL(7w;X&`-vDCvi-`BhWy>fH zjWuc$YAxAf5I& z`@8^S-&dEO#Lm)i{ss_UDsqrT^xIGTZmryG~XVI#K9gm3<$Mg z*HER0lx|NFIAv&(?}kE;rdU+oHBi5$F{m+|GQjz2^(Hu5?e}CM(3}uRMVWj6NI;m= zVUUUL{t|@83B?2p<9xG|Q;bgIx?&H)L%YD2iD>^-aQ+NWK1o(X59GJ0FxXN&P}`}{ zKr6R1{`m&`R;OCT7VxJ zJC2=Ki|kzn#*jS+8iYJ=-Ch-NS?tYg2Z1}ZaGha*9m|VJ_xX<*OCyp6A+l5NF#DnQ zZ39fjjJ@s2g&pnl%CXUH*^&pd#2lW|SyeUnJryeZ=YnKSsWTbKgs|e8=Jp zRQYn`<_C{KS4)?|pTH4Jf=N{eR{-Rx-Jzu0M;hdMmv7IIqd6n+Y8IK-{Seji)jT(* zIE5+hTTUsFWFB3e(1(H<--Bdl$QpYZx`Q}Vp%AzZvj-!a3|veD_JNiu-f+;$N=#=3 zi40`TCFxzX5$3Vsi$i;@HBDB6w>wrg3&s7%P*HtQSvYF6D$H=s0c0|0 zP)w)sb%JYWUoW?^s4`){drn*L?uaDFGQ5n_Veo zt)vQwT-q9)8l|l$WrXGV3guL<5!Ug$-Zcv`n7Mn8IGP)^tPMPm>D*CX#=geg3w2ovWD#o6HLnm$Zl=u$Vq(A#tTX{lwHR z|7~BZgrAw$8rMPFhcxn4^PR9tp5mQcWGoc8Ze}&20XZ1na76|3$)(PkC781rIe=D>xha+!T^<2d!M za^*R~ecVS#?q=~}Em7D}YkZ@e?(32_a~29%gOWIWkn9JpE7aH|;Jg=HO@=c*2 z8QQ5xCe(x}O()qtWiDov_guS0)$-X}6WS}4Z^-` zO<)Ve0)`83GIHV-*%2pt`RI{5w~5^HOVcK&aYBVB8hdr7j7MeWX0X3E` z(cy!*cJ6i4b6}>mpUw8pi*u-?0y7NHK+j11R9w@v z^148C_d}qy#d8)8+88f`&~kVURAR-_b=`Qb_Q_1k!VKsZQ#un~2Gbm7@S&{$2ywdd zFCpTG41)KDyhZvD4AC=~^*_JP-vWq;u!S=`x11G^nDI-GU>I(T#0*T7?jpu_AJMQt zk3T#Gcx3k{Ww~@P{tB6~<<#K()ReKjQzl>yozFhcBrgt-aPg>Yj$VoGfWRu!&KlO8 zOGo$IJwqVWd4MMwe5ZB4Ril&WD_z7c#Y8Yt9j97dNtSUmEZh>by6*|V9UYtR)<7#_ z%&_o<_i?@a5$kqJdJuc3(ngbY33yGS-~PsgLruM7K&1?Ugizs0rM=Bh{pSxxcS9?l zUg38dvq2Q!9THx%8j70YT_ghK>dXD3axE_wj}>06|7!mjNS)P|43b*Qw@^?qIXLF_3>R%uPuN+X`iB_P%jkt+ z^9YaHaHJa>=Vxl2Pm+3gI(cV zG;GXBsP^0I;Zk(6(o%zA-o*s*xsSH71}+I#fwhG~k-o~6Fmed*_04ST0;B6jQdj$* z4u+6oVAp#Za7h?j0dziFs#eCHF3s(`Jwx_ z+-_8i9$x&*4In+7YjdsdTOKga#nuR^dO|xAk8cUwAEBBhKE%<_#dvigcaK72Z5R+B z4JpHdp46ZzpoKJGG7BXmC+t`+G5#MW{6E`g1q0fU-6U^JX(_?%rI*xFCg&&>N6 z)yh+^aUzjED+~{z0ZKs4T)yT9y@mpWtUW(3_o!YOzEE04)!?8S_CPTXa(WOh?4eX) z@A>e`90qOa^hgrJs1}E^q6h7zs_Hgy^4D~KcJ2SXZGbqPu{zx8>pbcJY?OGSV!$Mkn72L( zDLgzvyk_LU=fJ!Hvs6s@$#eeXS`-$(=9k4~C_dA@Q8m?WL9w#7l~V>Y>TmY0mu!0P zz0&@5t^anln>;e%9mBPwzigwwmF8d;wdUZ>*GK-UMgLwJ^3b>eB6VjKb?E2q|3m3> zkR*SNYsH!^zx}kgek|>O&V)9oD!;4a^1p0ZL4 z&;LIV^a#T#BZ2=m{EuTP;J*$3t3mPq9`ONVgxytzJ08_6zgW|L9IcQTFsGhMiTv9+ zKhOMtH2MQ1-`-`x@3l-11(jn{!2EMtq|<^rF>)N!_+BS7Zkce4{w3ufo9gdX z-n-HOp?u5V75S_0AP>N;;-0lY|LF3@s{tuDOF~9%- literal 0 HcmV?d00001 From 9981a1d905590bb300a0f3c547d8c9cd1e251ddd Mon Sep 17 00:00:00 2001 From: Dan Rubinstein Date: Mon, 25 Nov 2024 10:27:31 -0500 Subject: [PATCH 225/386] Adding endpoint creation validation for all task types to remaining services (#115020) * Adding endpoint creation validation for all task types to remaining services * Update Cohere IT tests for rerank validation * Adding missing import * Update docs/changelog/115020.yaml * Fixing GoogleVertex tests after merge from upstream --------- Co-authored-by: Elastic Machine --- docs/changelog/115020.yaml | 5 ++ .../qa/mixed/CohereServiceMixedIT.java | 1 + .../application/CohereServiceUpgradeIT.java | 2 + .../AlibabaCloudSearchService.java | 76 ++++++------------- .../amazonbedrock/AmazonBedrockService.java | 64 ++++++---------- .../services/anthropic/AnthropicService.java | 7 ++ .../azureopenai/AzureOpenAiService.java | 62 ++++++--------- .../services/cohere/CohereService.java | 48 ++++++------ .../googlevertexai/GoogleVertexAiService.java | 57 +++++--------- .../ibmwatsonx/IbmWatsonxService.java | 46 +++++------ .../AlibabaCloudSearchServiceTests.java | 40 ++++++++++ .../AmazonBedrockServiceTests.java | 74 ++++++++++++++++++ .../azureopenai/AzureOpenAiServiceTests.java | 47 ++++++++++++ .../services/cohere/CohereServiceTests.java | 44 +++++++++++ .../GoogleVertexAiServiceTests.java | 34 +++++++++ .../GoogleVertexAiEmbeddingsModelTests.java | 33 +++++++- .../ibmwatsonx/IbmWatsonxServiceTests.java | 51 +++++++++++++ 17 files changed, 473 insertions(+), 218 deletions(-) create mode 100644 docs/changelog/115020.yaml diff --git a/docs/changelog/115020.yaml b/docs/changelog/115020.yaml new file mode 100644 index 0000000000000..2b0aefafea507 --- /dev/null +++ b/docs/changelog/115020.yaml @@ -0,0 +1,5 @@ +pr: 115020 +summary: Adding endpoint creation validation for all task types to remaining services +area: Machine Learning +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java index 8cb37ad645358..c16271ed44083 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java @@ -135,6 +135,7 @@ public void testRerank() throws IOException { final String inferenceId = "mixed-cluster-rerank"; + cohereRerankServer.enqueue(new MockResponse().setResponseCode(200).setBody(rerankResponse())); put(inferenceId, rerankConfig(getUrl(cohereRerankServer)), TaskType.RERANK); assertRerank(inferenceId); diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/CohereServiceUpgradeIT.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/CohereServiceUpgradeIT.java index 32969ffd1d112..0acbc148515bd 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/CohereServiceUpgradeIT.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/CohereServiceUpgradeIT.java @@ -201,6 +201,7 @@ public void testRerank() throws IOException { var testTaskType = TaskType.RERANK; if (isOldCluster()) { + cohereRerankServer.enqueue(new MockResponse().setResponseCode(200).setBody(rerankResponse())); put(oldClusterId, rerankConfig(getUrl(cohereRerankServer)), testTaskType); var configs = (List>) get(testTaskType, oldClusterId).get(old_cluster_endpoint_identifier); assertThat(configs, hasSize(1)); @@ -229,6 +230,7 @@ public void testRerank() throws IOException { assertRerank(oldClusterId); // New endpoint + cohereRerankServer.enqueue(new MockResponse().setResponseCode(200).setBody(rerankResponse())); put(upgradedClusterId, rerankConfig(getUrl(cohereRerankServer)), testTaskType); configs = (List>) get(upgradedClusterId).get("endpoints"); assertThat(configs, hasSize(1)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java index c84b4314b9d1a..6d77663f49ece 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java @@ -18,7 +18,6 @@ import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptySettingsConfiguration; -import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.InputType; @@ -51,6 +50,7 @@ import org.elasticsearch.xpack.inference.services.alibabacloudsearch.sparse.AlibabaCloudSearchSparseModel; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; +import org.elasticsearch.xpack.inference.services.validation.ModelValidatorBuilder; import java.util.EnumSet; import java.util.HashMap; @@ -60,7 +60,6 @@ import static org.elasticsearch.inference.TaskType.SPARSE_EMBEDDING; import static org.elasticsearch.inference.TaskType.TEXT_EMBEDDING; -import static org.elasticsearch.xpack.core.inference.action.InferenceAction.Request.DEFAULT_TIMEOUT; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; @@ -332,68 +331,39 @@ private EmbeddingRequestChunker.EmbeddingType getEmbeddingTypeFromTaskType(TaskT */ @Override public void checkModelConfig(Model model, ActionListener listener) { + // TODO: Remove this function once all services have been updated to use the new model validators + ModelValidatorBuilder.buildModelValidator(model.getTaskType()).validate(this, model, listener); + } + + @Override + public Model updateModelWithEmbeddingDetails(Model model, int embeddingSize) { if (model instanceof AlibabaCloudSearchEmbeddingsModel embeddingsModel) { - ServiceUtils.getEmbeddingSize( - model, - this, - listener.delegateFailureAndWrap((l, size) -> l.onResponse(updateModelWithEmbeddingDetails(embeddingsModel, size))) + var serviceSettings = embeddingsModel.getServiceSettings(); + + var updatedServiceSettings = new AlibabaCloudSearchEmbeddingsServiceSettings( + new AlibabaCloudSearchServiceSettings( + serviceSettings.getCommonSettings().modelId(), + serviceSettings.getCommonSettings().getHost(), + serviceSettings.getCommonSettings().getWorkspaceName(), + serviceSettings.getCommonSettings().getHttpSchema(), + serviceSettings.getCommonSettings().rateLimitSettings() + ), + SimilarityMeasure.DOT_PRODUCT, + embeddingSize, + serviceSettings.getMaxInputTokens() ); + + return new AlibabaCloudSearchEmbeddingsModel(embeddingsModel, updatedServiceSettings); } else { - checkAlibabaCloudSearchServiceConfig(model, this, listener); + throw ServiceUtils.invalidModelTypeForUpdateModelWithEmbeddingDetails(model.getClass()); } } - private AlibabaCloudSearchEmbeddingsModel updateModelWithEmbeddingDetails(AlibabaCloudSearchEmbeddingsModel model, int embeddingSize) { - AlibabaCloudSearchEmbeddingsServiceSettings serviceSettings = new AlibabaCloudSearchEmbeddingsServiceSettings( - new AlibabaCloudSearchServiceSettings( - model.getServiceSettings().getCommonSettings().modelId(), - model.getServiceSettings().getCommonSettings().getHost(), - model.getServiceSettings().getCommonSettings().getWorkspaceName(), - model.getServiceSettings().getCommonSettings().getHttpSchema(), - model.getServiceSettings().getCommonSettings().rateLimitSettings() - ), - SimilarityMeasure.DOT_PRODUCT, - embeddingSize, - model.getServiceSettings().getMaxInputTokens() - ); - - return new AlibabaCloudSearchEmbeddingsModel(model, serviceSettings); - } - @Override public TransportVersion getMinimalSupportedVersion() { return TransportVersions.ML_INFERENCE_ALIBABACLOUD_SEARCH_ADDED; } - /** - * For other models except of text embedding - * check the model's service settings and task settings - * - * @param model The new model - * @param service The inferenceService - * @param listener The listener - */ - private void checkAlibabaCloudSearchServiceConfig(Model model, InferenceService service, ActionListener listener) { - String input = ALIBABA_CLOUD_SEARCH_SERVICE_CONFIG_INPUT; - String query = model.getTaskType().equals(TaskType.RERANK) ? ALIBABA_CLOUD_SEARCH_SERVICE_CONFIG_QUERY : null; - - service.infer( - model, - query, - List.of(input), - false, - Map.of(), - InputType.INGEST, - DEFAULT_TIMEOUT, - listener.delegateFailureAndWrap((delegate, r) -> { - listener.onResponse(model); - }) - ); - } - - private static final String ALIBABA_CLOUD_SEARCH_SERVICE_CONFIG_INPUT = "input"; - private static final String ALIBABA_CLOUD_SEARCH_SERVICE_CONFIG_QUERY = "query"; - public static class Configuration { public static InferenceServiceConfiguration get() { return configuration.getOrCompute(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java index f9822c7ab4af9..a69b9d2c70405 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java @@ -49,6 +49,7 @@ import org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsModel; import org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsServiceSettings; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; +import org.elasticsearch.xpack.inference.services.validation.ModelValidatorBuilder; import java.io.IOException; import java.util.EnumSet; @@ -303,49 +304,34 @@ public Set supportedStreamingTasks() { */ @Override public void checkModelConfig(Model model, ActionListener listener) { - if (model instanceof AmazonBedrockEmbeddingsModel embeddingsModel) { - ServiceUtils.getEmbeddingSize( - model, - this, - listener.delegateFailureAndWrap((l, size) -> l.onResponse(updateModelWithEmbeddingDetails(embeddingsModel, size))) - ); - } else { - listener.onResponse(model); - } + // TODO: Remove this function once all services have been updated to use the new model validators + ModelValidatorBuilder.buildModelValidator(model.getTaskType()).validate(this, model, listener); } - private AmazonBedrockEmbeddingsModel updateModelWithEmbeddingDetails(AmazonBedrockEmbeddingsModel model, int embeddingSize) { - AmazonBedrockEmbeddingsServiceSettings serviceSettings = model.getServiceSettings(); - if (serviceSettings.dimensionsSetByUser() - && serviceSettings.dimensions() != null - && serviceSettings.dimensions() != embeddingSize) { - throw new ElasticsearchStatusException( - Strings.format( - "The retrieved embeddings size [%s] does not match the size specified in the settings [%s]. " - + "Please recreate the [%s] configuration with the correct dimensions", - embeddingSize, - serviceSettings.dimensions(), - model.getConfigurations().getInferenceEntityId() - ), - RestStatus.BAD_REQUEST + @Override + public Model updateModelWithEmbeddingDetails(Model model, int embeddingSize) { + if (model instanceof AmazonBedrockEmbeddingsModel embeddingsModel) { + var serviceSettings = embeddingsModel.getServiceSettings(); + var similarityFromModel = serviceSettings.similarity(); + var similarityToUse = similarityFromModel == null + ? getProviderDefaultSimilarityMeasure(embeddingsModel.provider()) + : similarityFromModel; + + var updatedServiceSettings = new AmazonBedrockEmbeddingsServiceSettings( + serviceSettings.region(), + serviceSettings.modelId(), + serviceSettings.provider(), + embeddingSize, + serviceSettings.dimensionsSetByUser(), + serviceSettings.maxInputTokens(), + similarityToUse, + serviceSettings.rateLimitSettings() ); - } - - var similarityFromModel = serviceSettings.similarity(); - var similarityToUse = similarityFromModel == null ? getProviderDefaultSimilarityMeasure(model.provider()) : similarityFromModel; - - AmazonBedrockEmbeddingsServiceSettings settingsToUse = new AmazonBedrockEmbeddingsServiceSettings( - serviceSettings.region(), - serviceSettings.modelId(), - serviceSettings.provider(), - embeddingSize, - serviceSettings.dimensionsSetByUser(), - serviceSettings.maxInputTokens(), - similarityToUse, - serviceSettings.rateLimitSettings() - ); - return new AmazonBedrockEmbeddingsModel(model, settingsToUse); + return new AmazonBedrockEmbeddingsModel(embeddingsModel, updatedServiceSettings); + } else { + throw ServiceUtils.invalidModelTypeForUpdateModelWithEmbeddingDetails(model.getClass()); + } } private static void checkProviderForTask(TaskType taskType, AmazonBedrockProvider provider) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicService.java index 556b34b945c14..eba7353f2b12e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicService.java @@ -39,6 +39,7 @@ import org.elasticsearch.xpack.inference.services.anthropic.completion.AnthropicChatCompletionModel; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; +import org.elasticsearch.xpack.inference.services.validation.ModelValidatorBuilder; import java.util.EnumSet; import java.util.HashMap; @@ -176,6 +177,12 @@ public AnthropicModel parsePersistedConfig(String inferenceEntityId, TaskType ta ); } + @Override + public void checkModelConfig(Model model, ActionListener listener) { + // TODO: Remove this function once all services have been updated to use the new model validators + ModelValidatorBuilder.buildModelValidator(model.getTaskType()).validate(this, model, listener); + } + @Override public InferenceServiceConfiguration getConfiguration() { return Configuration.get(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java index 6d36e5f6c8fe7..2f3a935cdf010 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java @@ -11,7 +11,6 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.LazyInitializable; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; @@ -46,6 +45,7 @@ import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsServiceSettings; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; +import org.elasticsearch.xpack.inference.services.validation.ModelValidatorBuilder; import java.util.EnumSet; import java.util.HashMap; @@ -294,48 +294,32 @@ protected void doChunkedInfer( */ @Override public void checkModelConfig(Model model, ActionListener listener) { - if (model instanceof AzureOpenAiEmbeddingsModel embeddingsModel) { - ServiceUtils.getEmbeddingSize( - model, - this, - listener.delegateFailureAndWrap((l, size) -> l.onResponse(updateModelWithEmbeddingDetails(embeddingsModel, size))) - ); - } else { - listener.onResponse(model); - } + // TODO: Remove this function once all services have been updated to use the new model validators + ModelValidatorBuilder.buildModelValidator(model.getTaskType()).validate(this, model, listener); } - private AzureOpenAiEmbeddingsModel updateModelWithEmbeddingDetails(AzureOpenAiEmbeddingsModel model, int embeddingSize) { - if (model.getServiceSettings().dimensionsSetByUser() - && model.getServiceSettings().dimensions() != null - && model.getServiceSettings().dimensions() != embeddingSize) { - throw new ElasticsearchStatusException( - Strings.format( - "The retrieved embeddings size [%s] does not match the size specified in the settings [%s]. " - + "Please recreate the [%s] configuration with the correct dimensions", - embeddingSize, - model.getServiceSettings().dimensions(), - model.getConfigurations().getInferenceEntityId() - ), - RestStatus.BAD_REQUEST + @Override + public Model updateModelWithEmbeddingDetails(Model model, int embeddingSize) { + if (model instanceof AzureOpenAiEmbeddingsModel embeddingsModel) { + var serviceSettings = embeddingsModel.getServiceSettings(); + var similarityFromModel = serviceSettings.similarity(); + var similarityToUse = similarityFromModel == null ? SimilarityMeasure.DOT_PRODUCT : similarityFromModel; + + var updatedServiceSettings = new AzureOpenAiEmbeddingsServiceSettings( + serviceSettings.resourceName(), + serviceSettings.deploymentId(), + serviceSettings.apiVersion(), + embeddingSize, + serviceSettings.dimensionsSetByUser(), + serviceSettings.maxInputTokens(), + similarityToUse, + serviceSettings.rateLimitSettings() ); - } - - var similarityFromModel = model.getServiceSettings().similarity(); - var similarityToUse = similarityFromModel == null ? SimilarityMeasure.DOT_PRODUCT : similarityFromModel; - - AzureOpenAiEmbeddingsServiceSettings serviceSettings = new AzureOpenAiEmbeddingsServiceSettings( - model.getServiceSettings().resourceName(), - model.getServiceSettings().deploymentId(), - model.getServiceSettings().apiVersion(), - embeddingSize, - model.getServiceSettings().dimensionsSetByUser(), - model.getServiceSettings().maxInputTokens(), - similarityToUse, - model.getServiceSettings().rateLimitSettings() - ); - return new AzureOpenAiEmbeddingsModel(model, serviceSettings); + return new AzureOpenAiEmbeddingsModel(embeddingsModel, updatedServiceSettings); + } else { + throw ServiceUtils.invalidModelTypeForUpdateModelWithEmbeddingDetails(model.getClass()); + } } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index de1d055e160da..cc67470686a02 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -45,6 +45,7 @@ import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankModel; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; +import org.elasticsearch.xpack.inference.services.validation.ModelValidatorBuilder; import java.util.EnumSet; import java.util.HashMap; @@ -293,36 +294,35 @@ protected void doChunkedInfer( */ @Override public void checkModelConfig(Model model, ActionListener listener) { + // TODO: Remove this function once all services have been updated to use the new model validators + ModelValidatorBuilder.buildModelValidator(model.getTaskType()).validate(this, model, listener); + } + + @Override + public Model updateModelWithEmbeddingDetails(Model model, int embeddingSize) { if (model instanceof CohereEmbeddingsModel embeddingsModel) { - ServiceUtils.getEmbeddingSize( - model, - this, - listener.delegateFailureAndWrap((l, size) -> l.onResponse(updateModelWithEmbeddingDetails(embeddingsModel, size))) + var serviceSettings = embeddingsModel.getServiceSettings(); + var similarityFromModel = serviceSettings.similarity(); + var similarityToUse = similarityFromModel == null ? defaultSimilarity() : similarityFromModel; + + var updatedServiceSettings = new CohereEmbeddingsServiceSettings( + new CohereServiceSettings( + serviceSettings.getCommonSettings().uri(), + similarityToUse, + embeddingSize, + serviceSettings.getCommonSettings().maxInputTokens(), + serviceSettings.getCommonSettings().modelId(), + serviceSettings.getCommonSettings().rateLimitSettings() + ), + serviceSettings.getEmbeddingType() ); + + return new CohereEmbeddingsModel(embeddingsModel, updatedServiceSettings); } else { - listener.onResponse(model); + throw ServiceUtils.invalidModelTypeForUpdateModelWithEmbeddingDetails(model.getClass()); } } - private CohereEmbeddingsModel updateModelWithEmbeddingDetails(CohereEmbeddingsModel model, int embeddingSize) { - var userDefinedSimilarity = model.getServiceSettings().similarity(); - var similarityToUse = userDefinedSimilarity == null ? defaultSimilarity() : userDefinedSimilarity; - - CohereEmbeddingsServiceSettings serviceSettings = new CohereEmbeddingsServiceSettings( - new CohereServiceSettings( - model.getServiceSettings().getCommonSettings().uri(), - similarityToUse, - embeddingSize, - model.getServiceSettings().getCommonSettings().maxInputTokens(), - model.getServiceSettings().getCommonSettings().modelId(), - model.getServiceSettings().getCommonSettings().rateLimitSettings() - ), - model.getServiceSettings().getEmbeddingType() - ); - - return new CohereEmbeddingsModel(model, serviceSettings); - } - /** * Return the default similarity measure for the embedding type. * Cohere embeddings are normalized to unit vectors therefor Dot diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java index a05b1a937d376..204593464a4ad 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java @@ -11,7 +11,6 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.LazyInitializable; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; @@ -45,6 +44,7 @@ import org.elasticsearch.xpack.inference.services.googlevertexai.embeddings.GoogleVertexAiEmbeddingsServiceSettings; import org.elasticsearch.xpack.inference.services.googlevertexai.rerank.GoogleVertexAiRerankModel; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; +import org.elasticsearch.xpack.inference.services.validation.ModelValidatorBuilder; import java.util.EnumSet; import java.util.HashMap; @@ -181,15 +181,8 @@ public TransportVersion getMinimalSupportedVersion() { @Override public void checkModelConfig(Model model, ActionListener listener) { - if (model instanceof GoogleVertexAiEmbeddingsModel embeddingsModel) { - ServiceUtils.getEmbeddingSize( - model, - this, - listener.delegateFailureAndWrap((l, size) -> l.onResponse(updateModelWithEmbeddingDetails(embeddingsModel, size))) - ); - } else { - listener.onResponse(model); - } + // TODO: Remove this function once all services have been updated to use the new model validators + ModelValidatorBuilder.buildModelValidator(model.getTaskType()).validate(this, model, listener); } @Override @@ -240,34 +233,26 @@ protected void doChunkedInfer( } } - private GoogleVertexAiEmbeddingsModel updateModelWithEmbeddingDetails(GoogleVertexAiEmbeddingsModel model, int embeddingSize) { - if (model.getServiceSettings().dimensionsSetByUser() - && model.getServiceSettings().dimensions() != null - && model.getServiceSettings().dimensions() != embeddingSize) { - throw new ElasticsearchStatusException( - Strings.format( - "The retrieved embeddings size [%s] does not match the size specified in the settings [%s]. " - + "Please recreate the [%s] configuration with the correct dimensions", - embeddingSize, - model.getServiceSettings().dimensions(), - model.getConfigurations().getInferenceEntityId() - ), - RestStatus.BAD_REQUEST + @Override + public Model updateModelWithEmbeddingDetails(Model model, int embeddingSize) { + if (model instanceof GoogleVertexAiEmbeddingsModel embeddingsModel) { + var serviceSettings = embeddingsModel.getServiceSettings(); + + var updatedServiceSettings = new GoogleVertexAiEmbeddingsServiceSettings( + serviceSettings.location(), + serviceSettings.projectId(), + serviceSettings.modelId(), + serviceSettings.dimensionsSetByUser(), + serviceSettings.maxInputTokens(), + embeddingSize, + serviceSettings.similarity(), + serviceSettings.rateLimitSettings() ); - } - - GoogleVertexAiEmbeddingsServiceSettings serviceSettings = new GoogleVertexAiEmbeddingsServiceSettings( - model.getServiceSettings().location(), - model.getServiceSettings().projectId(), - model.getServiceSettings().modelId(), - model.getServiceSettings().dimensionsSetByUser(), - model.getServiceSettings().maxInputTokens(), - embeddingSize, - model.getServiceSettings().similarity(), - model.getServiceSettings().rateLimitSettings() - ); - return new GoogleVertexAiEmbeddingsModel(model, serviceSettings); + return new GoogleVertexAiEmbeddingsModel(embeddingsModel, updatedServiceSettings); + } else { + throw ServiceUtils.invalidModelTypeForUpdateModelWithEmbeddingDetails(model.getClass()); + } } private static GoogleVertexAiModel createModelFromPersistent( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java index f4f4605c667c3..592900d117b39 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java @@ -44,6 +44,7 @@ import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsModel; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsServiceSettings; +import org.elasticsearch.xpack.inference.services.validation.ModelValidatorBuilder; import java.util.EnumSet; import java.util.HashMap; @@ -228,35 +229,34 @@ public TransportVersion getMinimalSupportedVersion() { @Override public void checkModelConfig(Model model, ActionListener listener) { + // TODO: Remove this function once all services have been updated to use the new model validators + ModelValidatorBuilder.buildModelValidator(model.getTaskType()).validate(this, model, listener); + } + + @Override + public Model updateModelWithEmbeddingDetails(Model model, int embeddingSize) { if (model instanceof IbmWatsonxEmbeddingsModel embeddingsModel) { - ServiceUtils.getEmbeddingSize( - model, - this, - listener.delegateFailureAndWrap((l, size) -> l.onResponse(updateModelWithEmbeddingDetails(embeddingsModel, size))) + var serviceSettings = embeddingsModel.getServiceSettings(); + var similarityFromModel = serviceSettings.similarity(); + var similarityToUse = similarityFromModel == null ? SimilarityMeasure.DOT_PRODUCT : similarityFromModel; + + var updatedServiceSettings = new IbmWatsonxEmbeddingsServiceSettings( + serviceSettings.modelId(), + serviceSettings.projectId(), + serviceSettings.url(), + serviceSettings.apiVersion(), + serviceSettings.maxInputTokens(), + embeddingSize, + similarityToUse, + serviceSettings.rateLimitSettings() ); + + return new IbmWatsonxEmbeddingsModel(embeddingsModel, updatedServiceSettings); } else { - listener.onResponse(model); + throw ServiceUtils.invalidModelTypeForUpdateModelWithEmbeddingDetails(model.getClass()); } } - private IbmWatsonxEmbeddingsModel updateModelWithEmbeddingDetails(IbmWatsonxEmbeddingsModel model, int embeddingSize) { - var similarityFromModel = model.getServiceSettings().similarity(); - var similarityToUse = similarityFromModel == null ? SimilarityMeasure.DOT_PRODUCT : similarityFromModel; - - IbmWatsonxEmbeddingsServiceSettings serviceSettings = new IbmWatsonxEmbeddingsServiceSettings( - model.getServiceSettings().modelId(), - model.getServiceSettings().projectId(), - model.getServiceSettings().url(), - model.getServiceSettings().apiVersion(), - model.getServiceSettings().maxInputTokens(), - embeddingSize, - similarityToUse, - model.getServiceSettings().rateLimitSettings() - ); - - return new IbmWatsonxEmbeddingsModel(model, serviceSettings); - } - @Override protected void doInfer( Model model, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java index aac111c22558e..b6d29ccab9a49 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.services.alibabacloudsearch; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.bytes.BytesArray; @@ -22,6 +23,7 @@ import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -50,6 +52,7 @@ import org.elasticsearch.xpack.inference.services.alibabacloudsearch.embeddings.AlibabaCloudSearchEmbeddingsServiceSettingsTests; import org.elasticsearch.xpack.inference.services.alibabacloudsearch.embeddings.AlibabaCloudSearchEmbeddingsTaskSettingsTests; import org.elasticsearch.xpack.inference.services.alibabacloudsearch.sparse.AlibabaCloudSearchSparseModel; +import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests; import org.hamcrest.MatcherAssert; import org.junit.After; import org.junit.Before; @@ -325,6 +328,43 @@ public void doInfer( } } + public void testUpdateModelWithEmbeddingDetails_InvalidModelProvided() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var service = new AlibabaCloudSearchService(senderFactory, createWithEmptySettings(threadPool))) { + var model = OpenAiChatCompletionModelTests.createChatCompletionModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + assertThrows( + ElasticsearchStatusException.class, + () -> { service.updateModelWithEmbeddingDetails(model, randomNonNegativeInt()); } + ); + } + } + + public void testUpdateModelWithEmbeddingDetails_UpdatesEmbeddingSizeAndSimilarity() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = new AlibabaCloudSearchService(senderFactory, createWithEmptySettings(threadPool))) { + var embeddingSize = randomNonNegativeInt(); + var model = AlibabaCloudSearchEmbeddingsModelTests.createModel( + randomAlphaOfLength(10), + randomFrom(TaskType.values()), + AlibabaCloudSearchEmbeddingsServiceSettingsTests.createRandom(), + AlibabaCloudSearchEmbeddingsTaskSettingsTests.createRandom(), + null + ); + + Model updatedModel = service.updateModelWithEmbeddingDetails(model, embeddingSize); + + assertEquals(SimilarityMeasure.DOT_PRODUCT, updatedModel.getServiceSettings().similarity()); + assertEquals(embeddingSize, updatedModel.getServiceSettings().dimensions().intValue()); + } + } + public void testChunkedInfer_TextEmbeddingChunkingSettingsSet() throws IOException { testChunkedInfer(TaskType.TEXT_EMBEDDING, ChunkingSettingsTests.createRandomChunkingSettings()); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java index e76fb10c96131..e583e50075ee7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java @@ -50,6 +50,7 @@ import org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsModel; import org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsModelTests; import org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettingsTests; import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; @@ -72,6 +73,7 @@ import static org.elasticsearch.xpack.inference.results.ChatCompletionResultsTests.buildExpectationCompletion; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProviderCapabilities.getProviderDefaultSimilarityMeasure; import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockSecretSettingsTests.getAmazonBedrockSecretSettingsMap; import static org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionServiceSettingsTests.createChatCompletionRequestSettingsMap; import static org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionTaskSettingsTests.getChatCompletionTaskSettingsMap; @@ -1375,6 +1377,78 @@ public void testCheckModelConfig_ReturnsNewModelReference_AndDoesNotSendDimensio } } + public void testUpdateModelWithEmbeddingDetails_InvalidModelProvided() throws IOException { + var sender = mock(Sender.class); + var factory = mock(HttpRequestSender.Factory.class); + when(factory.createSender()).thenReturn(sender); + + var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory( + ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY), + mockClusterServiceEmpty() + ); + + try (var service = new AmazonBedrockService(factory, amazonBedrockFactory, createWithEmptySettings(threadPool))) { + var model = AmazonBedrockChatCompletionModelTests.createModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomFrom(AmazonBedrockProvider.values()), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + assertThrows( + ElasticsearchStatusException.class, + () -> { service.updateModelWithEmbeddingDetails(model, randomNonNegativeInt()); } + ); + } + } + + public void testUpdateModelWithEmbeddingDetails_NullSimilarityInOriginalModel() throws IOException { + testUpdateModelWithEmbeddingDetails_Successful(null); + } + + public void testUpdateModelWithEmbeddingDetails_NonNullSimilarityInOriginalModel() throws IOException { + testUpdateModelWithEmbeddingDetails_Successful(randomFrom(SimilarityMeasure.values())); + } + + private void testUpdateModelWithEmbeddingDetails_Successful(SimilarityMeasure similarityMeasure) throws IOException { + var sender = mock(Sender.class); + var factory = mock(HttpRequestSender.Factory.class); + when(factory.createSender()).thenReturn(sender); + + var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory( + ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY), + mockClusterServiceEmpty() + ); + + try (var service = new AmazonBedrockService(factory, amazonBedrockFactory, createWithEmptySettings(threadPool))) { + var embeddingSize = randomNonNegativeInt(); + var provider = randomFrom(AmazonBedrockProvider.values()); + var model = AmazonBedrockEmbeddingsModelTests.createModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + provider, + randomNonNegativeInt(), + randomBoolean(), + randomNonNegativeInt(), + similarityMeasure, + RateLimitSettingsTests.createRandom(), + createRandomChunkingSettings(), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + + Model updatedModel = service.updateModelWithEmbeddingDetails(model, embeddingSize); + + SimilarityMeasure expectedSimilarityMeasure = similarityMeasure == null + ? getProviderDefaultSimilarityMeasure(provider) + : similarityMeasure; + assertEquals(expectedSimilarityMeasure, updatedModel.getServiceSettings().similarity()); + assertEquals(embeddingSize, updatedModel.getServiceSettings().dimensions().intValue()); + } + } + public void testInfer_UnauthorizedResponse() throws IOException { var sender = mock(Sender.class); var factory = mock(HttpRequestSender.Factory.class); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java index 40f8b7e0977e4..dc1970e26a3f8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java @@ -1194,6 +1194,53 @@ public void testCheckModelConfig_ReturnsNewModelReference_AndDoesNotSendDimensio } } + public void testUpdateModelWithEmbeddingDetails_InvalidModelProvided() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var service = new AzureOpenAiService(senderFactory, createWithEmptySettings(threadPool))) { + var model = AzureOpenAiCompletionModelTests.createModelWithRandomValues(); + assertThrows( + ElasticsearchStatusException.class, + () -> { service.updateModelWithEmbeddingDetails(model, randomNonNegativeInt()); } + ); + } + } + + public void testUpdateModelWithEmbeddingDetails_NullSimilarityInOriginalModel() throws IOException { + testUpdateModelWithEmbeddingDetails_Successful(null); + } + + public void testUpdateModelWithEmbeddingDetails_NonNullSimilarityInOriginalModel() throws IOException { + testUpdateModelWithEmbeddingDetails_Successful(randomFrom(SimilarityMeasure.values())); + } + + private void testUpdateModelWithEmbeddingDetails_Successful(SimilarityMeasure similarityMeasure) throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var service = new AzureOpenAiService(senderFactory, createWithEmptySettings(threadPool))) { + var embeddingSize = randomNonNegativeInt(); + var model = AzureOpenAiEmbeddingsModelTests.createModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomNonNegativeInt(), + randomBoolean(), + randomNonNegativeInt(), + similarityMeasure, + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + + Model updatedModel = service.updateModelWithEmbeddingDetails(model, embeddingSize); + + SimilarityMeasure expectedSimilarityMeasure = similarityMeasure == null ? SimilarityMeasure.DOT_PRODUCT : similarityMeasure; + assertEquals(expectedSimilarityMeasure, updatedModel.getServiceSettings().similarity()); + assertEquals(embeddingSize, updatedModel.getServiceSettings().dimensions().intValue()); + } + } + public void testInfer_UnauthorisedResponse() throws IOException, URISyntaxException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index 725879e76efc1..30f3b344a268c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -1074,6 +1074,50 @@ public void testCheckModelConfig_DoesNotUpdateSimilarity_WhenItIsSpecifiedAsCosi } } + public void testUpdateModelWithEmbeddingDetails_InvalidModelProvided() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool))) { + var model = CohereCompletionModelTests.createModel(randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10)); + assertThrows( + ElasticsearchStatusException.class, + () -> { service.updateModelWithEmbeddingDetails(model, randomNonNegativeInt()); } + ); + } + } + + public void testUpdateModelWithEmbeddingDetails_NullSimilarityInOriginalModel() throws IOException { + testUpdateModelWithEmbeddingDetails_Successful(null); + } + + public void testUpdateModelWithEmbeddingDetails_NonNullSimilarityInOriginalModel() throws IOException { + testUpdateModelWithEmbeddingDetails_Successful(randomFrom(SimilarityMeasure.values())); + } + + private void testUpdateModelWithEmbeddingDetails_Successful(SimilarityMeasure similarityMeasure) throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool))) { + var embeddingSize = randomNonNegativeInt(); + var model = CohereEmbeddingsModelTests.createModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, + randomNonNegativeInt(), + randomNonNegativeInt(), + randomAlphaOfLength(10), + randomFrom(CohereEmbeddingType.values()), + similarityMeasure + ); + + Model updatedModel = service.updateModelWithEmbeddingDetails(model, embeddingSize); + + SimilarityMeasure expectedSimilarityMeasure = similarityMeasure == null ? CohereService.defaultSimilarity() : similarityMeasure; + assertEquals(expectedSimilarityMeasure, updatedModel.getServiceSettings().similarity()); + assertEquals(embeddingSize, updatedModel.getServiceSettings().dimensions().intValue()); + } + } + public void testInfer_UnauthorisedResponse() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java index 906a825e49561..2aeba5fcbe209 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.http.MockWebServer; @@ -30,9 +31,11 @@ import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.googlevertexai.embeddings.GoogleVertexAiEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.googlevertexai.embeddings.GoogleVertexAiEmbeddingsModelTests; import org.elasticsearch.xpack.inference.services.googlevertexai.embeddings.GoogleVertexAiEmbeddingsServiceSettings; import org.elasticsearch.xpack.inference.services.googlevertexai.embeddings.GoogleVertexAiEmbeddingsTaskSettings; import org.elasticsearch.xpack.inference.services.googlevertexai.rerank.GoogleVertexAiRerankModel; +import org.elasticsearch.xpack.inference.services.googlevertexai.rerank.GoogleVertexAiRerankModelTests; import org.elasticsearch.xpack.inference.services.googlevertexai.rerank.GoogleVertexAiRerankTaskSettings; import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; @@ -827,6 +830,37 @@ public void testParsePersistedConfig_CreatesAnEmbeddingsModelWhenChunkingSetting } } + public void testUpdateModelWithEmbeddingDetails_InvalidModelProvided() throws IOException { + try (var service = createGoogleVertexAiService()) { + var model = GoogleVertexAiRerankModelTests.createModel(randomAlphaOfLength(10), randomNonNegativeInt()); + assertThrows( + ElasticsearchStatusException.class, + () -> { service.updateModelWithEmbeddingDetails(model, randomNonNegativeInt()); } + ); + } + } + + public void testUpdateModelWithEmbeddingDetails_NullSimilarityInOriginalModel() throws IOException { + testUpdateModelWithEmbeddingDetails_Successful(null); + } + + public void testUpdateModelWithEmbeddingDetails_NonNullSimilarityInOriginalModel() throws IOException { + testUpdateModelWithEmbeddingDetails_Successful(randomFrom(SimilarityMeasure.values())); + } + + private void testUpdateModelWithEmbeddingDetails_Successful(SimilarityMeasure similarityMeasure) throws IOException { + try (var service = createGoogleVertexAiService()) { + var embeddingSize = randomNonNegativeInt(); + var model = GoogleVertexAiEmbeddingsModelTests.createModel(randomAlphaOfLength(10), randomBoolean(), similarityMeasure); + + Model updatedModel = service.updateModelWithEmbeddingDetails(model, embeddingSize); + + SimilarityMeasure expectedSimilarityMeasure = similarityMeasure == null ? SimilarityMeasure.DOT_PRODUCT : similarityMeasure; + assertEquals(expectedSimilarityMeasure, updatedModel.getServiceSettings().similarity()); + assertEquals(embeddingSize, updatedModel.getServiceSettings().dimensions().intValue()); + } + } + // testInfer tested via end-to-end notebook tests in AppEx repo @SuppressWarnings("checkstyle:LineLength") diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsModelTests.java index 7836c5c15cfb1..5b016de7493f5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsModelTests.java @@ -64,7 +64,7 @@ public void testOverrideWith_DoesNotOverrideAndModelRemainsEqual_WhenSettingsAre } public void testOverrideWith_SetsInputTypeToOverride_WhenFieldIsNullInModelTaskSettings_AndNullInRequestTaskSettings() { - var model = createModel("model", Boolean.FALSE, null); + var model = createModel("model", Boolean.FALSE, (InputType) null); var overriddenModel = GoogleVertexAiEmbeddingsModel.of(model, getTaskSettingsMap(null, null), InputType.SEARCH); var expectedModel = createModel("model", Boolean.FALSE, InputType.SEARCH); @@ -80,7 +80,7 @@ public void testOverrideWith_SetsInputType_FromRequest_IfValid_OverridingStoredT } public void testOverrideWith_SetsInputType_FromRequest_IfValid_OverridingRequestTaskSettings() { - var model = createModel("model", Boolean.FALSE, null); + var model = createModel("model", Boolean.FALSE, (InputType) null); var overriddenModel = GoogleVertexAiEmbeddingsModel.of(model, getTaskSettingsMap(null, InputType.CLUSTERING), InputType.SEARCH); var expectedModel = createModel("model", Boolean.FALSE, InputType.SEARCH); @@ -96,10 +96,10 @@ public void testOverrideWith_OverridesInputType_WithRequestTaskSettingsSearch_Wh } public void testOverrideWith_DoesNotSetInputType_FromRequest_IfInputTypeIsInvalid() { - var model = createModel("model", Boolean.FALSE, null); + var model = createModel("model", Boolean.FALSE, (InputType) null); var overriddenModel = GoogleVertexAiEmbeddingsModel.of(model, getTaskSettingsMap(null, null), InputType.UNSPECIFIED); - var expectedModel = createModel("model", Boolean.FALSE, null); + var expectedModel = createModel("model", Boolean.FALSE, (InputType) null); MatcherAssert.assertThat(overriddenModel, is(expectedModel)); } @@ -136,6 +136,31 @@ public static GoogleVertexAiEmbeddingsModel createModel( ); } + public static GoogleVertexAiEmbeddingsModel createModel( + String modelId, + @Nullable Boolean autoTruncate, + SimilarityMeasure similarityMeasure + ) { + return new GoogleVertexAiEmbeddingsModel( + "id", + TaskType.TEXT_EMBEDDING, + "service", + new GoogleVertexAiEmbeddingsServiceSettings( + randomAlphaOfLength(8), + randomAlphaOfLength(8), + modelId, + false, + null, + null, + similarityMeasure, + null + ), + new GoogleVertexAiEmbeddingsTaskSettings(autoTruncate, randomFrom(InputType.INGEST, InputType.SEARCH)), + null, + new GoogleVertexAiSecretSettings(new SecureString(randomAlphaOfLength(8).toCharArray())) + ); + } + public static GoogleVertexAiEmbeddingsModel createModel(String modelId, @Nullable Boolean autoTruncate, @Nullable InputType inputType) { return new GoogleVertexAiEmbeddingsModel( "id", diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java index f7f37c5bcd15f..1261e3834437b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java @@ -51,6 +51,7 @@ import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsModel; import org.elasticsearch.xpack.inference.services.ibmwatsonx.embeddings.IbmWatsonxEmbeddingsModelTests; +import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.junit.After; @@ -930,6 +931,56 @@ public void testCheckModelConfig_DoesNotUpdateSimilarity_WhenItIsSpecifiedAsCosi } } + public void testUpdateModelWithEmbeddingDetails_InvalidModelProvided() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var service = new IbmWatsonxServiceWithoutAuth(senderFactory, createWithEmptySettings(threadPool))) { + var model = OpenAiChatCompletionModelTests.createChatCompletionModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + assertThrows( + ElasticsearchStatusException.class, + () -> { service.updateModelWithEmbeddingDetails(model, randomNonNegativeInt()); } + ); + } + } + + public void testUpdateModelWithEmbeddingDetails_NullSimilarityInOriginalModel() throws IOException { + testUpdateModelWithEmbeddingDetails_Successful(null); + } + + public void testUpdateModelWithEmbeddingDetails_NonNullSimilarityInOriginalModel() throws IOException { + testUpdateModelWithEmbeddingDetails_Successful(randomFrom(SimilarityMeasure.values())); + } + + private void testUpdateModelWithEmbeddingDetails_Successful(SimilarityMeasure similarityMeasure) throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var service = new IbmWatsonxServiceWithoutAuth(senderFactory, createWithEmptySettings(threadPool))) { + var embeddingSize = randomNonNegativeInt(); + var model = IbmWatsonxEmbeddingsModelTests.createModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + URI.create(randomAlphaOfLength(10)), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomNonNegativeInt(), + similarityMeasure + ); + + Model updatedModel = service.updateModelWithEmbeddingDetails(model, embeddingSize); + + SimilarityMeasure expectedSimilarityMeasure = similarityMeasure == null ? SimilarityMeasure.DOT_PRODUCT : similarityMeasure; + assertEquals(expectedSimilarityMeasure, updatedModel.getServiceSettings().similarity()); + assertEquals(embeddingSize, updatedModel.getServiceSettings().dimensions().intValue()); + } + } + public void testGetConfiguration() throws Exception { try (var service = createIbmWatsonxService()) { String content = XContentHelper.stripWhitespace(""" From 2f0b095b5d7ce9adcd6c37682e39c277ad5cb512 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 25 Nov 2024 16:45:36 +0100 Subject: [PATCH 226/386] Stop using _source.mode attribute in traces-otel builtin template (#117487) The traces-otel@mappings component template is configured to use logsdb. No need to configure source mode separately. --- .../resources/component-templates/traces-otel@mappings.yaml | 2 -- x-pack/plugin/otel-data/src/main/resources/resources.yaml | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/x-pack/plugin/otel-data/src/main/resources/component-templates/traces-otel@mappings.yaml b/x-pack/plugin/otel-data/src/main/resources/component-templates/traces-otel@mappings.yaml index 2b0d1ec536fa6..3a1ba435b8f1f 100644 --- a/x-pack/plugin/otel-data/src/main/resources/component-templates/traces-otel@mappings.yaml +++ b/x-pack/plugin/otel-data/src/main/resources/component-templates/traces-otel@mappings.yaml @@ -10,8 +10,6 @@ template: sort: field: [ "resource.attributes.host.name", "@timestamp" ] mappings: - _source: - mode: synthetic properties: trace_id: type: keyword diff --git a/x-pack/plugin/otel-data/src/main/resources/resources.yaml b/x-pack/plugin/otel-data/src/main/resources/resources.yaml index b2d30c7f85cc4..9edbe5622b3f1 100644 --- a/x-pack/plugin/otel-data/src/main/resources/resources.yaml +++ b/x-pack/plugin/otel-data/src/main/resources/resources.yaml @@ -1,7 +1,7 @@ # "version" holds the version of the templates and ingest pipelines installed # by xpack-plugin otel-data. This must be increased whenever an existing template is # changed, in order for it to be updated on Elasticsearch upgrade. -version: 6 +version: 7 component-templates: - otel@mappings From b7d801809fce6669aaa530f7375bfdc6a6bcb24e Mon Sep 17 00:00:00 2001 From: padmaprasath21 <168728638+padmaprasath21@users.noreply.github.com> Date: Mon, 25 Nov 2024 21:26:17 +0530 Subject: [PATCH 227/386] Update tsds-reindex.asciidoc (#117446) --- docs/reference/data-streams/tsds-reindex.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/data-streams/tsds-reindex.asciidoc b/docs/reference/data-streams/tsds-reindex.asciidoc index 9d6594db4e779..f4d00f33c179c 100644 --- a/docs/reference/data-streams/tsds-reindex.asciidoc +++ b/docs/reference/data-streams/tsds-reindex.asciidoc @@ -202,7 +202,7 @@ POST /_component_template/destination_template POST /_index_template/2 { "index_patterns": [ - "k8s*" + "k9s*" ], "composed_of": [ "destination_template" From 8c22fc479f7f62e1ac6ce2e6db30c1c723ab3c8f Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Mon, 25 Nov 2024 17:04:48 +0100 Subject: [PATCH 228/386] Make spatial search functions not preview (#117489) --- .../esql/functions/spatial-functions.asciidoc | 16 ++++++++-------- docs/reference/geospatial-analysis.asciidoc | 10 +++++----- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/docs/reference/esql/functions/spatial-functions.asciidoc b/docs/reference/esql/functions/spatial-functions.asciidoc index 79acc2028d983..eee44d337b4c6 100644 --- a/docs/reference/esql/functions/spatial-functions.asciidoc +++ b/docs/reference/esql/functions/spatial-functions.asciidoc @@ -8,19 +8,19 @@ {esql} supports these spatial functions: // tag::spatial_list[] -* experimental:[] <> -* experimental:[] <> -* experimental:[] <> -* experimental:[] <> -* experimental:[] <> -* experimental:[] <> -* experimental:[] <> +* <> +* <> +* <> +* <> +* <> +* <> +* <> // end::spatial_list[] +include::layout/st_distance.asciidoc[] include::layout/st_intersects.asciidoc[] include::layout/st_disjoint.asciidoc[] include::layout/st_contains.asciidoc[] include::layout/st_within.asciidoc[] include::layout/st_x.asciidoc[] include::layout/st_y.asciidoc[] -include::layout/st_distance.asciidoc[] diff --git a/docs/reference/geospatial-analysis.asciidoc b/docs/reference/geospatial-analysis.asciidoc index 6760040e14bc7..678e0ee17aec2 100644 --- a/docs/reference/geospatial-analysis.asciidoc +++ b/docs/reference/geospatial-analysis.asciidoc @@ -38,11 +38,11 @@ Data is often messy and incomplete. <> lets you clean, <> has support for <> functions, enabling efficient index searching for documents that intersect with, are within, are contained by, or are disjoint from a query geometry. In addition, the `ST_DISTANCE` function calculates the distance between two points. -* experimental:[] <> -* experimental:[] <> -* experimental:[] <> -* experimental:[] <> -* experimental:[] <> +* <> +* <> +* <> +* <> +* <> [discrete] [[geospatial-aggregate]] From 4e3301530d16ff937fa835b42a108aee48203af5 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 25 Nov 2024 16:20:18 +0000 Subject: [PATCH 229/386] [ML] Explicitly set chunking settings in preconfigured endpoints (#117327) --- .../xpack/inference/DefaultEndPointsIT.java | 20 +++++++++++++++++++ .../ElasticsearchInternalService.java | 5 +++-- 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java index 69767ce0b24f0..ba3e48e11928d 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java @@ -51,6 +51,14 @@ public void tearDown() throws Exception { super.tearDown(); } + public void testGet() throws IOException { + var elserModel = getModel(ElasticsearchInternalService.DEFAULT_ELSER_ID); + assertDefaultElserConfig(elserModel); + + var e5Model = getModel(ElasticsearchInternalService.DEFAULT_E5_ID); + assertDefaultE5Config(e5Model); + } + @SuppressWarnings("unchecked") public void testInferDeploysDefaultElser() throws IOException { var model = getModel(ElasticsearchInternalService.DEFAULT_ELSER_ID); @@ -79,6 +87,7 @@ private static void assertDefaultElserConfig(Map modelConfig) { adaptiveAllocations, Matchers.is(Map.of("enabled", true, "min_number_of_allocations", 0, "max_number_of_allocations", 32)) ); + assertDefaultChunkingSettings(modelConfig); } @SuppressWarnings("unchecked") @@ -113,6 +122,17 @@ private static void assertDefaultE5Config(Map modelConfig) { adaptiveAllocations, Matchers.is(Map.of("enabled", true, "min_number_of_allocations", 0, "max_number_of_allocations", 32)) ); + assertDefaultChunkingSettings(modelConfig); + } + + @SuppressWarnings("unchecked") + private static void assertDefaultChunkingSettings(Map modelConfig) { + var chunkingSettings = (Map) modelConfig.get("chunking_settings"); + assertThat( + modelConfig.toString(), + chunkingSettings, + Matchers.is(Map.of("strategy", "sentence", "max_chunk_size", 250, "sentence_overlap", 1)) + ); } public void testMultipleInferencesTriggeringDownloadAndDeploy() throws InterruptedException { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 718aeae979fe9..6d124906d65bd 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -862,6 +862,7 @@ public void updateModelsWithDynamicFields(List models, ActionListener> defaultsListener) { preferredModelVariantFn.accept(defaultsListener.delegateFailureAndWrap((delegate, preferredModelVariant) -> { if (PreferredModelVariant.LINUX_X86_OPTIMIZED.equals(preferredModelVariant)) { @@ -892,7 +893,7 @@ private List defaultConfigs(boolean useLinuxOptimizedModel) { new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32) ), ElserMlNodeTaskSettings.DEFAULT, - null // default chunking settings + ChunkingSettingsBuilder.DEFAULT_SETTINGS ); var defaultE5 = new MultilingualE5SmallModel( DEFAULT_E5_ID, @@ -904,7 +905,7 @@ private List defaultConfigs(boolean useLinuxOptimizedModel) { useLinuxOptimizedModel ? MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86 : MULTILINGUAL_E5_SMALL_MODEL_ID, new AdaptiveAllocationsSettings(Boolean.TRUE, 0, 32) ), - null // default chunking settings + ChunkingSettingsBuilder.DEFAULT_SETTINGS ); return List.of(defaultElser, defaultE5); } From 374c88a832edb53525a1c52873db185c2acdfb23 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 25 Nov 2024 11:38:06 -0500 Subject: [PATCH 230/386] Correct bit * byte and bit * float script comparisons (#117404) I goofed on the bit * byte and bit * float comparisons. Naturally, these should be bigendian and compare the dimensions with the binary ones appropriately. Additionally, I added a test to ensure that this is handled correctly. --- docs/changelog/117404.yaml | 5 +++ .../vectors/vector-functions.asciidoc | 4 ++ .../elasticsearch/simdvec/ESVectorUtil.java | 12 ++++-- .../simdvec/ESVectorUtilTests.java | 16 +++++++ .../141_multi_dense_vector_max_sim.yml | 6 +-- .../painless/146_dense_vector_bit_basic.yml | 42 +++++++++---------- .../action/search/SearchCapabilities.java | 4 +- .../MultiVectorScoreScriptUtilsTests.java | 2 +- .../script/VectorScoreScriptUtilsTests.java | 2 +- 9 files changed, 61 insertions(+), 32 deletions(-) create mode 100644 docs/changelog/117404.yaml diff --git a/docs/changelog/117404.yaml b/docs/changelog/117404.yaml new file mode 100644 index 0000000000000..0bab171956ca9 --- /dev/null +++ b/docs/changelog/117404.yaml @@ -0,0 +1,5 @@ +pr: 117404 +summary: Correct bit * byte and bit * float script comparisons +area: Vector Search +type: bug +issues: [] diff --git a/docs/reference/vectors/vector-functions.asciidoc b/docs/reference/vectors/vector-functions.asciidoc index 10dca8084e28a..23419e8eb12b1 100644 --- a/docs/reference/vectors/vector-functions.asciidoc +++ b/docs/reference/vectors/vector-functions.asciidoc @@ -336,6 +336,10 @@ When using `bit` vectors, not all the vector functions are available. The suppor this is the sum of the bitwise AND of the two vectors. If providing `float[]` or `byte[]`, who has `dims` number of elements, as a query vector, the `dotProduct` is the sum of the floating point values using the stored `bit` vector as a mask. +NOTE: When comparing `floats` and `bytes` with `bit` vectors, the `bit` vector is treated as a mask in big-endian order. +For example, if the `bit` vector is `10100001` (e.g. the single byte value `161`) and its compared +with array of values `[1, 2, 3, 4, 5, 6, 7, 8]` the `dotProduct` will be `1 + 3 + 8 = 16`. + Here is an example of using dot-product with bit vectors. [source,console] diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java index de2cb9042610b..2f4743a47a14a 100644 --- a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java @@ -51,6 +51,8 @@ public static long ipByteBinByte(byte[] q, byte[] d) { /** * Compute the inner product of two vectors, where the query vector is a byte vector and the document vector is a bit vector. * This will return the sum of the query vector values using the document vector as a mask. + * When comparing the bits with the bytes, they are done in "big endian" order. For example, if the byte vector + * is [1, 2, 3, 4, 5, 6, 7, 8] and the bit vector is [0b10000000], the inner product will be 1.0. * @param q the query vector * @param d the document vector * @return the inner product of the two vectors @@ -63,9 +65,9 @@ public static int ipByteBit(byte[] q, byte[] d) { // now combine the two vectors, summing the byte dimensions where the bit in d is `1` for (int i = 0; i < d.length; i++) { byte mask = d[i]; - for (int j = 0; j < Byte.SIZE; j++) { + for (int j = Byte.SIZE - 1; j >= 0; j--) { if ((mask & (1 << j)) != 0) { - result += q[i * Byte.SIZE + j]; + result += q[i * Byte.SIZE + Byte.SIZE - 1 - j]; } } } @@ -75,6 +77,8 @@ public static int ipByteBit(byte[] q, byte[] d) { /** * Compute the inner product of two vectors, where the query vector is a float vector and the document vector is a bit vector. * This will return the sum of the query vector values using the document vector as a mask. + * When comparing the bits with the floats, they are done in "big endian" order. For example, if the float vector + * is [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0] and the bit vector is [0b10000000], the inner product will be 1.0. * @param q the query vector * @param d the document vector * @return the inner product of the two vectors @@ -86,9 +90,9 @@ public static float ipFloatBit(float[] q, byte[] d) { float result = 0; for (int i = 0; i < d.length; i++) { byte mask = d[i]; - for (int j = 0; j < Byte.SIZE; j++) { + for (int j = Byte.SIZE - 1; j >= 0; j--) { if ((mask & (1 << j)) != 0) { - result += q[i * Byte.SIZE + j]; + result += q[i * Byte.SIZE + Byte.SIZE - 1 - j]; } } } diff --git a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/ESVectorUtilTests.java b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/ESVectorUtilTests.java index e9e0fd58f7638..368898b934c87 100644 --- a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/ESVectorUtilTests.java +++ b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/ESVectorUtilTests.java @@ -21,6 +21,22 @@ public class ESVectorUtilTests extends BaseVectorizationTests { static final ESVectorizationProvider defaultedProvider = BaseVectorizationTests.defaultProvider(); static final ESVectorizationProvider defOrPanamaProvider = BaseVectorizationTests.maybePanamaProvider(); + public void testIpByteBit() { + byte[] q = new byte[16]; + byte[] d = new byte[] { (byte) Integer.parseInt("01100010", 2), (byte) Integer.parseInt("10100111", 2) }; + random().nextBytes(q); + int expected = q[1] + q[2] + q[6] + q[8] + q[10] + q[13] + q[14] + q[15]; + assertEquals(expected, ESVectorUtil.ipByteBit(q, d)); + } + + public void testIpFloatBit() { + float[] q = new float[16]; + byte[] d = new byte[] { (byte) Integer.parseInt("01100010", 2), (byte) Integer.parseInt("10100111", 2) }; + random().nextFloat(); + float expected = q[1] + q[2] + q[6] + q[8] + q[10] + q[13] + q[14] + q[15]; + assertEquals(expected, ESVectorUtil.ipFloatBit(q, d), 1e-6); + } + public void testBitAndCount() { testBasicBitAndImpl(ESVectorUtil::andBitCountLong); } diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/141_multi_dense_vector_max_sim.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/141_multi_dense_vector_max_sim.yml index caa7c59ab4c42..77d4b70cdfcae 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/141_multi_dense_vector_max_sim.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/141_multi_dense_vector_max_sim.yml @@ -3,7 +3,7 @@ setup: capabilities: - method: POST path: /_search - capabilities: [ multi_dense_vector_script_max_sim ] + capabilities: [ multi_dense_vector_script_max_sim_with_bugfix ] test_runner_features: capabilities reason: "Support for multi dense vector max-sim functions capability required" - skip: @@ -136,10 +136,10 @@ setup: - match: {hits.total: 2} - match: {hits.hits.0._id: "1"} - - close_to: {hits.hits.0._score: {value: 190, error: 0.01}} + - close_to: {hits.hits.0._score: {value: 220, error: 0.01}} - match: {hits.hits.1._id: "3"} - - close_to: {hits.hits.1._score: {value: 125, error: 0.01}} + - close_to: {hits.hits.1._score: {value: 147, error: 0.01}} --- "Test max-sim inv hamming scoring": - skip: diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/146_dense_vector_bit_basic.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/146_dense_vector_bit_basic.yml index 2ee38f849e9d4..cdd65ca0eb296 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/146_dense_vector_bit_basic.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/146_dense_vector_bit_basic.yml @@ -108,7 +108,7 @@ setup: capabilities: - method: POST path: /_search - capabilities: [ byte_float_bit_dot_product ] + capabilities: [ byte_float_bit_dot_product_with_bugfix ] reason: Capability required to run test - do: catch: bad_request @@ -399,7 +399,7 @@ setup: capabilities: - method: POST path: /_search - capabilities: [ byte_float_bit_dot_product ] + capabilities: [ byte_float_bit_dot_product_with_bugfix ] test_runner_features: [capabilities, close_to] reason: Capability required to run test - do: @@ -419,13 +419,13 @@ setup: - match: { hits.total: 3 } - match: {hits.hits.0._id: "2"} - - close_to: {hits.hits.0._score: {value: 35.999, error: 0.01}} + - close_to: {hits.hits.0._score: {value: 33.78, error: 0.01}} - match: {hits.hits.1._id: "3"} - - close_to: {hits.hits.1._score:{value: 27.23, error: 0.01}} + - close_to: {hits.hits.1._score:{value: 22.579, error: 0.01}} - match: {hits.hits.2._id: "1"} - - close_to: {hits.hits.2._score: {value: 16.57, error: 0.01}} + - close_to: {hits.hits.2._score: {value: 11.919, error: 0.01}} - do: headers: @@ -444,20 +444,20 @@ setup: - match: { hits.total: 3 } - match: {hits.hits.0._id: "2"} - - close_to: {hits.hits.0._score: {value: 35.999, error: 0.01}} + - close_to: {hits.hits.0._score: {value: 33.78, error: 0.01}} - match: {hits.hits.1._id: "3"} - - close_to: {hits.hits.1._score:{value: 27.23, error: 0.01}} + - close_to: {hits.hits.1._score:{value: 22.579, error: 0.01}} - match: {hits.hits.2._id: "1"} - - close_to: {hits.hits.2._score: {value: 16.57, error: 0.01}} + - close_to: {hits.hits.2._score: {value: 11.919, error: 0.01}} --- "Dot product with byte": - requires: capabilities: - method: POST path: /_search - capabilities: [ byte_float_bit_dot_product ] + capabilities: [ byte_float_bit_dot_product_with_bugfix ] test_runner_features: capabilities reason: Capability required to run test - do: @@ -476,14 +476,14 @@ setup: - match: { hits.total: 3 } - - match: {hits.hits.0._id: "1"} - - match: {hits.hits.0._score: 248} + - match: {hits.hits.0._id: "3"} + - match: {hits.hits.0._score: 415} - - match: {hits.hits.1._id: "2"} - - match: {hits.hits.1._score: 136} + - match: {hits.hits.1._id: "1"} + - match: {hits.hits.1._score: 168} - - match: {hits.hits.2._id: "3"} - - match: {hits.hits.2._score: 20} + - match: {hits.hits.2._id: "2"} + - match: {hits.hits.2._score: 126} - do: headers: @@ -501,11 +501,11 @@ setup: - match: { hits.total: 3 } - - match: {hits.hits.0._id: "1"} - - match: {hits.hits.0._score: 248} + - match: {hits.hits.0._id: "3"} + - match: {hits.hits.0._score: 415} - - match: {hits.hits.1._id: "2"} - - match: {hits.hits.1._score: 136} + - match: {hits.hits.1._id: "1"} + - match: {hits.hits.1._score: 168} - - match: {hits.hits.2._id: "3"} - - match: {hits.hits.2._score: 20} + - match: {hits.hits.2._id: "2"} + - match: {hits.hits.2._score: 126} diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java index e5c4826bfce97..794b30aa5aab2 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java @@ -27,7 +27,7 @@ private SearchCapabilities() {} /** Support synthetic source with `bit` type in `dense_vector` field when `index` is set to `false`. */ private static final String BIT_DENSE_VECTOR_SYNTHETIC_SOURCE_CAPABILITY = "bit_dense_vector_synthetic_source"; /** Support Byte and Float with Bit dot product. */ - private static final String BYTE_FLOAT_BIT_DOT_PRODUCT_CAPABILITY = "byte_float_bit_dot_product"; + private static final String BYTE_FLOAT_BIT_DOT_PRODUCT_CAPABILITY = "byte_float_bit_dot_product_with_bugfix"; /** Support docvalue_fields parameter for `dense_vector` field. */ private static final String DENSE_VECTOR_DOCVALUE_FIELDS = "dense_vector_docvalue_fields"; /** Support transforming rank rrf queries to the corresponding rrf retriever. */ @@ -41,7 +41,7 @@ private SearchCapabilities() {} /** Support multi-dense-vector script field access. */ private static final String MULTI_DENSE_VECTOR_SCRIPT_ACCESS = "multi_dense_vector_script_access"; /** Initial support for multi-dense-vector maxSim functions access. */ - private static final String MULTI_DENSE_VECTOR_SCRIPT_MAX_SIM = "multi_dense_vector_script_max_sim"; + private static final String MULTI_DENSE_VECTOR_SCRIPT_MAX_SIM = "multi_dense_vector_script_max_sim_with_bugfix"; private static final String RANDOM_SAMPLER_WITH_SCORED_SUBAGGS = "random_sampler_with_scored_subaggs"; diff --git a/server/src/test/java/org/elasticsearch/script/MultiVectorScoreScriptUtilsTests.java b/server/src/test/java/org/elasticsearch/script/MultiVectorScoreScriptUtilsTests.java index c4a1699181efc..f908f51170478 100644 --- a/server/src/test/java/org/elasticsearch/script/MultiVectorScoreScriptUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/script/MultiVectorScoreScriptUtilsTests.java @@ -200,7 +200,7 @@ public void testBitMultiVectorClassBindingsDotProduct() throws IOException { function = new MaxSimDotProduct(scoreScript, floatQueryVector, fieldName); assertEquals( "maxSimDotProduct result is not equal to the expected value!", - 0.42f + 0f + 1f - 1f - 0.42f, + -1.4f + 0.42f + 0f + 1f - 1f, function.maxSimDotProduct(), 0.001 ); diff --git a/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java b/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java index 6b2178310d17c..dcaa64ede9e89 100644 --- a/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java @@ -267,7 +267,7 @@ public void testBitVectorClassBindingsDotProduct() throws IOException { function = new DotProduct(scoreScript, floatQueryVector, fieldName); assertEquals( "dotProduct result is not equal to the expected value!", - 0.42f + 0f + 1f - 1f - 0.42f, + -1.4f + 0.42f + 0f + 1f - 1f, function.dotProduct(), 0.001 ); From 4e1807f0d91c17750c43b7bb43bc93198c61ba7f Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Tue, 26 Nov 2024 03:50:16 +1100 Subject: [PATCH 231/386] Update docker.elastic.co/wolfi/chainguard-base:latest Docker digest to 55b297d (#116255) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Update | Change | |---|---|---| | docker.elastic.co/wolfi/chainguard-base | digest | `9734313` -> `55b297d` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 1pm on tuesday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [Renovate Bot](https://togithub.com/renovatebot/renovate). --- .../main/java/org/elasticsearch/gradle/internal/DockerBase.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index bf901fef90450..71e968557cefe 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -22,7 +22,7 @@ public enum DockerBase { // Chainguard based wolfi image with latest jdk // This is usually updated via renovatebot // spotless:off - WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:973431347ad45f40e01afbbd010bf9de929c088a63382239b90dd84f39618bc8", + WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:55b297da5151d2a2997e8ab9729fe1304e4869389d7090ab7031cc29530f69f8", "-wolfi", "apk" ), From 631345f96531a66b9e4130c92df15f08c86e1a79 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 25 Nov 2024 10:20:58 -0800 Subject: [PATCH 232/386] Adjust index version for deprecating source mode (#117183) There was a bug in the version-checking logic for emitting deprecation warnings for source.mode in mappings. --- .../DataStreamTimestampFieldMapperTests.java | 3 +- .../elasticsearch/upgrades/IndexingIT.java | 9 ++++- .../index/mapper/SourceFieldMapper.java | 13 +++++-- .../mapper/DocumentParserContextTests.java | 1 + .../index/mapper/SourceFieldMapperTests.java | 28 ++++++++++++--- .../index/shard/ShardGetServiceTests.java | 2 ++ .../index/mapper/MetadataMapperTestCase.java | 9 ++--- .../test/rest/ESRestTestCase.java | 34 +++++++++++++++---- .../xpack/deprecation/DeprecationChecks.java | 3 +- .../deprecation/IndexDeprecationChecks.java | 26 ++++++++++++++ .../compute/operator/AsyncOperator.java | 1 + .../logsdb/LogsIndexModeCustomSettingsIT.java | 15 ++++++-- .../xpack/logsdb/LogsIndexModeRestTestIT.java | 6 ++++ ...heticSourceIndexSettingsProviderTests.java | 7 +++- 14 files changed, 133 insertions(+), 24 deletions(-) diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/mapper/DataStreamTimestampFieldMapperTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/mapper/DataStreamTimestampFieldMapperTests.java index a3995d7462b32..e009db7209eab 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/mapper/DataStreamTimestampFieldMapperTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/mapper/DataStreamTimestampFieldMapperTests.java @@ -48,7 +48,8 @@ protected void registerParameters(ParameterChecker checker) throws IOException { checker.registerConflictCheck( "enabled", timestampMapping(true, b -> b.startObject("@timestamp").field("type", "date").endObject()), - timestampMapping(false, b -> b.startObject("@timestamp").field("type", "date").endObject()) + timestampMapping(false, b -> b.startObject("@timestamp").field("type", "date").endObject()), + dm -> {} ); checker.registerUpdateCheck( timestampMapping(false, b -> b.startObject("@timestamp").field("type", "date").endObject()), diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java index 090f409fd46d0..86a0151e33119 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.elasticsearch.test.ListMatcher; import org.elasticsearch.xcontent.XContentBuilder; @@ -417,9 +418,15 @@ public void testSyntheticSource() throws IOException { if (isOldCluster()) { Request createIndex = new Request("PUT", "/synthetic"); XContentBuilder indexSpec = XContentBuilder.builder(XContentType.JSON.xContent()).startObject(); + boolean useIndexSetting = SourceFieldMapper.onOrAfterDeprecateModeVersion(getOldClusterIndexVersion()); + if (useIndexSetting) { + indexSpec.startObject("settings").field("index.mapping.source.mode", "synthetic").endObject(); + } indexSpec.startObject("mappings"); { - indexSpec.startObject("_source").field("mode", "synthetic").endObject(); + if (useIndexSetting == false) { + indexSpec.startObject("_source").field("mode", "synthetic").endObject(); + } indexSpec.startObject("properties").startObject("kwd").field("type", "keyword").endObject().endObject(); } indexSpec.endObject(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index e5b12f748543f..9d0dc9635537b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -26,6 +26,7 @@ import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; @@ -297,7 +298,7 @@ private static SourceFieldMapper resolveStaticInstance(final Mode sourceMode) { if (indexMode == IndexMode.STANDARD && settingSourceMode == Mode.STORED) { return DEFAULT; } - if (c.indexVersionCreated().onOrAfter(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER)) { + if (onOrAfterDeprecateModeVersion(c.indexVersionCreated())) { return resolveStaticInstance(settingSourceMode); } else { return new SourceFieldMapper(settingSourceMode, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, true); @@ -307,14 +308,14 @@ private static SourceFieldMapper resolveStaticInstance(final Mode sourceMode) { c.getIndexSettings().getMode(), c.getSettings(), c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), - c.indexVersionCreated().before(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER) + onOrAfterDeprecateModeVersion(c.indexVersionCreated()) == false ) ) { @Override public MetadataFieldMapper.Builder parse(String name, Map node, MappingParserContext parserContext) throws MapperParsingException { assert name.equals(SourceFieldMapper.NAME) : name; - if (parserContext.indexVersionCreated().after(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER) && node.containsKey("mode")) { + if (onOrAfterDeprecateModeVersion(parserContext.indexVersionCreated()) && node.containsKey("mode")) { deprecationLogger.critical(DeprecationCategory.MAPPINGS, "mapping_source_mode", SourceFieldMapper.DEPRECATION_WARNING); } return super.parse(name, node, parserContext); @@ -481,4 +482,10 @@ public boolean isDisabled() { public boolean isStored() { return mode == null || mode == Mode.STORED; } + + public static boolean onOrAfterDeprecateModeVersion(IndexVersion version) { + return version.onOrAfter(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER); + // Adjust versions after backporting. + // || version.between(IndexVersions.BACKPORT_DEPRECATE_SOURCE_MODE_MAPPER, IndexVersions.UPGRADE_TO_LUCENE_10_0_0); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java index be36ab9d6eac1..a4108caaf4fc3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java @@ -133,5 +133,6 @@ public void testCreateDynamicMapperBuilderContext() throws IOException { assertEquals(ObjectMapper.Defaults.DYNAMIC, resultFromParserContext.getDynamic()); assertEquals(MapperService.MergeReason.MAPPING_UPDATE, resultFromParserContext.getMergeReason()); assertFalse(resultFromParserContext.isInNestedContext()); + assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index d7f33b9cdb3ba..fa173bc64518e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -52,7 +52,8 @@ protected void registerParameters(ParameterChecker checker) throws IOException { checker.registerConflictCheck( "enabled", topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("enabled", false).endObject()), - topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("enabled", true).endObject()) + topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("enabled", true).endObject()), + dm -> {} ); checker.registerUpdateCheck( topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("enabled", true).endObject()), @@ -62,14 +63,18 @@ protected void registerParameters(ParameterChecker checker) throws IOException { checker.registerUpdateCheck( topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("mode", "stored").endObject()), topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("mode", "synthetic").endObject()), - dm -> assertTrue(dm.metadataMapper(SourceFieldMapper.class).isSynthetic()) + dm -> { + assertTrue(dm.metadataMapper(SourceFieldMapper.class).isSynthetic()); + assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); + } ); checker.registerConflictCheck("includes", b -> b.array("includes", "foo*")); checker.registerConflictCheck("excludes", b -> b.array("excludes", "foo*")); checker.registerConflictCheck( "mode", topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("mode", "synthetic").endObject()), - topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("mode", "stored").endObject()) + topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("mode", "stored").endObject()), + dm -> assertWarnings(SourceFieldMapper.DEPRECATION_WARNING) ); } @@ -206,13 +211,14 @@ public void testSyntheticDisabledNotSupported() { ) ); assertThat(e.getMessage(), containsString("Cannot set both [mode] and [enabled] parameters")); + assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } public void testSyntheticUpdates() throws Exception { MapperService mapperService = createMapperService(""" { "_doc" : { "_source" : { "mode" : "synthetic" } } } """); - + assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); SourceFieldMapper mapper = mapperService.documentMapper().sourceMapper(); assertTrue(mapper.enabled()); assertTrue(mapper.isSynthetic()); @@ -220,6 +226,7 @@ public void testSyntheticUpdates() throws Exception { merge(mapperService, """ { "_doc" : { "_source" : { "mode" : "synthetic" } } } """); + assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); mapper = mapperService.documentMapper().sourceMapper(); assertTrue(mapper.enabled()); assertTrue(mapper.isSynthetic()); @@ -230,11 +237,15 @@ public void testSyntheticUpdates() throws Exception { Exception e = expectThrows(IllegalArgumentException.class, () -> merge(mapperService, """ { "_doc" : { "_source" : { "mode" : "stored" } } } """)); + assertThat(e.getMessage(), containsString("Cannot update parameter [mode] from [synthetic] to [stored]")); + assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); merge(mapperService, """ { "_doc" : { "_source" : { "mode" : "disabled" } } } """); + assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); + mapper = mapperService.documentMapper().sourceMapper(); assertFalse(mapper.enabled()); assertFalse(mapper.isSynthetic()); @@ -270,6 +281,7 @@ public void testSupportsNonDefaultParameterValues() throws IOException { topMapping(b -> b.startObject("_source").field("mode", randomBoolean() ? "synthetic" : "stored").endObject()) ).documentMapper().sourceMapper(); assertThat(sourceFieldMapper, notNullValue()); + assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } Exception e = expectThrows( MapperParsingException.class, @@ -301,6 +313,8 @@ public void testSupportsNonDefaultParameterValues() throws IOException { .documentMapper() .sourceMapper() ); + assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); + assertThat(e.getMessage(), containsString("Parameter [mode=disabled] is not allowed in source")); e = expectThrows( @@ -409,6 +423,7 @@ public void testRecoverySourceWithSyntheticSource() throws IOException { ParsedDocument doc = docMapper.parse(source(b -> { b.field("field1", "value1"); })); assertNotNull(doc.rootDoc().getField("_recovery_source")); assertThat(doc.rootDoc().getField("_recovery_source").binaryValue(), equalTo(new BytesRef("{\"field1\":\"value1\"}"))); + assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } { Settings settings = Settings.builder().put(INDICES_RECOVERY_SOURCE_ENABLED_SETTING.getKey(), false).build(); @@ -419,6 +434,7 @@ public void testRecoverySourceWithSyntheticSource() throws IOException { DocumentMapper docMapper = mapperService.documentMapper(); ParsedDocument doc = docMapper.parse(source(b -> b.field("field1", "value1"))); assertNull(doc.rootDoc().getField("_recovery_source")); + assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } } @@ -613,6 +629,7 @@ public void testRecoverySourceWithLogsCustom() throws IOException { ParsedDocument doc = docMapper.parse(source(b -> { b.field("@timestamp", "2012-02-13"); })); assertNotNull(doc.rootDoc().getField("_recovery_source")); assertThat(doc.rootDoc().getField("_recovery_source").binaryValue(), equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\"}"))); + assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } { Settings settings = Settings.builder() @@ -623,6 +640,7 @@ public void testRecoverySourceWithLogsCustom() throws IOException { DocumentMapper docMapper = mapperService.documentMapper(); ParsedDocument doc = docMapper.parse(source(b -> b.field("@timestamp", "2012-02-13"))); assertNull(doc.rootDoc().getField("_recovery_source")); + assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } } @@ -691,6 +709,7 @@ public void testRecoverySourceWithTimeSeriesCustom() throws IOException { doc.rootDoc().getField("_recovery_source").binaryValue(), equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\",\"field\":\"value1\"}")) ); + assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } { Settings settings = Settings.builder() @@ -704,6 +723,7 @@ public void testRecoverySourceWithTimeSeriesCustom() throws IOException { source("123", b -> b.field("@timestamp", "2012-02-13").field("field", randomAlphaOfLength(5)), null) ); assertNull(doc.rootDoc().getField("_recovery_source")); + assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } } } diff --git a/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java b/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java index a49d895f38f67..307bc26c44ba6 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.RoutingFieldMapper; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.xcontent.XContentType; @@ -114,6 +115,7 @@ public void testGetFromTranslogWithSyntheticSource() throws IOException { "mode": "synthetic" """; runGetFromTranslogWithOptions(docToIndex, sourceOptions, expectedFetchedSource, "\"long\"", 7L, true); + assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } public void testGetFromTranslogWithDenseVector() throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java index e86cb8562537f..449ecc099412f 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java @@ -38,7 +38,7 @@ protected boolean isSupportedOn(IndexVersion version) { protected abstract void registerParameters(ParameterChecker checker) throws IOException; - private record ConflictCheck(XContentBuilder init, XContentBuilder update) {} + private record ConflictCheck(XContentBuilder init, XContentBuilder update, Consumer check) {} private record UpdateCheck(XContentBuilder init, XContentBuilder update, Consumer check) {} @@ -58,7 +58,7 @@ public void registerConflictCheck(String param, CheckedConsumer {})); } /** @@ -68,8 +68,8 @@ public void registerConflictCheck(String param, CheckedConsumer check) { + conflictChecks.put(param, new ConflictCheck(init, update, check)); } public void registerUpdateCheck(XContentBuilder init, XContentBuilder update, Consumer check) { @@ -95,6 +95,7 @@ public final void testUpdates() throws IOException { e.getMessage(), anyOf(containsString("Cannot update parameter [" + param + "]"), containsString("different [" + param + "]")) ); + checker.conflictChecks.get(param).check.accept(mapperService.documentMapper()); } for (UpdateCheck updateCheck : checker.updateChecks) { MapperService mapperService = createMapperService(updateCheck.init); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 8ca9c0709b359..bdef0ba631b72 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -112,7 +112,6 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Optional; @@ -1835,9 +1834,10 @@ public static CreateIndexResponse createIndex(RestClient client, String name, Se if (settings != null && settings.getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) == false) { expectSoftDeletesWarning(request, name); - } else if (isSyntheticSourceConfiguredInMapping(mapping)) { - request.setOptions(expectVersionSpecificWarnings(v -> v.compatible(SourceFieldMapper.DEPRECATION_WARNING))); - } + } else if (isSyntheticSourceConfiguredInMapping(mapping) + && SourceFieldMapper.onOrAfterDeprecateModeVersion(minimumIndexVersion())) { + request.setOptions(expectVersionSpecificWarnings(v -> v.current(SourceFieldMapper.DEPRECATION_WARNING))); + } final Response response = client.performRequest(request); try (var parser = responseAsParser(response)) { return TestResponseParsers.parseCreateIndexResponse(parser); @@ -1898,8 +1898,30 @@ protected static boolean isSyntheticSourceConfiguredInMapping(String mapping) { if (sourceMapper == null) { return false; } - Object mode = sourceMapper.get("mode"); - return mode != null && mode.toString().toLowerCase(Locale.ROOT).equals("synthetic"); + return sourceMapper.get("mode") != null; + } + + @SuppressWarnings("unchecked") + protected static boolean isSyntheticSourceConfiguredInTemplate(String template) { + if (template == null) { + return false; + } + var values = XContentHelper.convertToMap(JsonXContent.jsonXContent, template, false); + for (Object value : values.values()) { + Map mappings = (Map) ((Map) value).get("mappings"); + if (mappings == null) { + continue; + } + Map sourceMapper = (Map) mappings.get(SourceFieldMapper.NAME); + if (sourceMapper == null) { + continue; + } + Object mode = sourceMapper.get("mode"); + if (mode != null) { + return true; + } + } + return false; } protected static Map getIndexSettings(String index) throws IOException { diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java index d13f3cda2a82c..f9b2cc5afe3a5 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java @@ -96,7 +96,8 @@ private DeprecationChecks() {} IndexDeprecationChecks::checkIndexDataPath, IndexDeprecationChecks::storeTypeSettingCheck, IndexDeprecationChecks::frozenIndexSettingCheck, - IndexDeprecationChecks::deprecatedCamelCasePattern + IndexDeprecationChecks::deprecatedCamelCasePattern, + IndexDeprecationChecks::checkSourceModeInMapping ); static List> DATA_STREAM_CHECKS = List.of( diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java index 8144d960df2e8..aaf58a44a6565 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java @@ -16,6 +16,7 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.engine.frozen.FrozenEngine; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; import java.util.ArrayList; @@ -201,6 +202,31 @@ static List findInPropertiesRecursively( return issues; } + static DeprecationIssue checkSourceModeInMapping(IndexMetadata indexMetadata, ClusterState clusterState) { + if (SourceFieldMapper.onOrAfterDeprecateModeVersion(indexMetadata.getCreationVersion())) { + boolean[] useSourceMode = { false }; + fieldLevelMappingIssue(indexMetadata, ((mappingMetadata, sourceAsMap) -> { + Object source = sourceAsMap.get("_source"); + if (source instanceof Map sourceMap) { + if (sourceMap.containsKey("mode")) { + useSourceMode[0] = true; + } + } + })); + if (useSourceMode[0]) { + return new DeprecationIssue( + DeprecationIssue.Level.CRITICAL, + SourceFieldMapper.DEPRECATION_WARNING, + "https://github.com/elastic/elasticsearch/pull/117172", + SourceFieldMapper.DEPRECATION_WARNING, + false, + null + ); + } + } + return null; + } + static DeprecationIssue deprecatedCamelCasePattern(IndexMetadata indexMetadata, ClusterState clusterState) { List fields = new ArrayList<>(); fieldLevelMappingIssue( diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java index 2c36b42dee277..06b890603e489 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java @@ -43,6 +43,7 @@ public abstract class AsyncOperator implements Operator { private final int maxOutstandingRequests; private final LongAdder totalTimeInNanos = new LongAdder(); + private boolean finished = false; private volatile boolean closed = false; diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java index f529b9fa1db96..99acbec04551e 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java @@ -7,9 +7,11 @@ package org.elasticsearch.xpack.logsdb; +import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.junit.Before; @@ -112,8 +114,11 @@ public void testConfigureStoredSourceBeforeIndexCreation() throws IOException { }"""; assertOK(putComponentTemplate(client, "logs@custom", storedSourceMapping)); - assertOK(createDataStream(client, "logs-custom-dev")); - + Request request = new Request("PUT", "_data_stream/logs-custom-dev"); + if (SourceFieldMapper.onOrAfterDeprecateModeVersion(minimumIndexVersion())) { + request.setOptions(expectVersionSpecificWarnings(v -> v.current(SourceFieldMapper.DEPRECATION_WARNING))); + } + assertOK(client.performRequest(request)); var mapping = getMapping(client, getDataStreamBackingIndex(client, "logs-custom-dev", 0)); String sourceMode = (String) subObject("_source").apply(mapping).get("mode"); assertThat(sourceMode, equalTo("stored")); @@ -182,7 +187,11 @@ public void testConfigureStoredSourceWhenIndexIsCreated() throws IOException { }"""; assertOK(putComponentTemplate(client, "logs@custom", storedSourceMapping)); - assertOK(createDataStream(client, "logs-custom-dev")); + Request request = new Request("PUT", "_data_stream/logs-custom-dev"); + if (SourceFieldMapper.onOrAfterDeprecateModeVersion(minimumIndexVersion())) { + request.setOptions(expectVersionSpecificWarnings(v -> v.current(SourceFieldMapper.DEPRECATION_WARNING))); + } + assertOK(client.performRequest(request)); var mapping = getMapping(client, getDataStreamBackingIndex(client, "logs-custom-dev", 0)); String sourceMode = (String) subObject("_source").apply(mapping).get("mode"); diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java index cc7f5bdb33871..0990592cef5e3 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.test.rest.ESRestTestCase; import java.io.IOException; @@ -35,6 +36,11 @@ protected static Response putComponentTemplate(final RestClient client, final St throws IOException { final Request request = new Request("PUT", "/_component_template/" + componentTemplate); request.setJsonEntity(contends); + if (isSyntheticSourceConfiguredInTemplate(contends) && SourceFieldMapper.onOrAfterDeprecateModeVersion(minimumIndexVersion())) { + request.setOptions( + expectVersionSpecificWarnings((VersionSensitiveWarningsHandler v) -> v.current(SourceFieldMapper.DEPRECATION_WARNING)) + ); + } return client.performRequest(request); } diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java index 1f5d26eaedf34..d6cdb9f761b31 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java @@ -81,10 +81,12 @@ public void testNewIndexHasSyntheticSourceUsage() throws IOException { boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertTrue(result); assertThat(newMapperServiceCounter.get(), equalTo(1)); + assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } { String mapping; - if (randomBoolean()) { + boolean withSourceMode = randomBoolean(); + if (withSourceMode) { mapping = """ { "_doc": { @@ -115,6 +117,9 @@ public void testNewIndexHasSyntheticSourceUsage() throws IOException { boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertFalse(result); assertThat(newMapperServiceCounter.get(), equalTo(2)); + if (withSourceMode) { + assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); + } } } From 1402e6887ca45a74d2818725e254bd3062db05f8 Mon Sep 17 00:00:00 2001 From: Ioana Tagirta Date: Mon, 25 Nov 2024 19:29:22 +0100 Subject: [PATCH 233/386] Add support for aggregations, GROK and DISSECT for semantic_text (#117337) * Add support for aggregations for semantic_text * Add capability to csv tests for grok and dissect * Sort values to avoid flaky tests --- .../xpack/esql/qa/rest/EsqlSpecTestCase.java | 3 +- .../main/resources/mapping-semantic_text.json | 4 + .../src/main/resources/semantic_text.csv | 8 +- .../src/main/resources/semantic_text.csv-spec | 98 +++++++++++++++++-- .../xpack/esql/action/EsqlCapabilities.java | 7 +- .../function/aggregate/CountDistinct.java | 3 +- .../expression/function/aggregate/Max.java | 1 + .../expression/function/aggregate/Min.java | 3 +- .../expression/function/aggregate/Values.java | 1 + .../xpack/esql/planner/AggregateMapper.java | 9 +- .../aggregate/CountDistinctTests.java | 3 +- .../function/aggregate/CountTests.java | 3 +- .../function/aggregate/MaxTests.java | 3 +- .../function/aggregate/MinTests.java | 3 +- .../function/aggregate/TopTests.java | 3 +- .../function/aggregate/ValuesTests.java | 3 +- 16 files changed, 131 insertions(+), 24 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 265d9f7bd8cd5..2484a428c4b03 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -172,7 +172,8 @@ public final void test() throws Throwable { } protected void shouldSkipTest(String testName) throws IOException { - if (testCase.requiredCapabilities.contains("semantic_text_type")) { + if (testCase.requiredCapabilities.contains("semantic_text_type") + || testCase.requiredCapabilities.contains("semantic_text_aggregations")) { assumeTrue("Inference test service needs to be supported for semantic_text", supportsInferenceTestService()); } checkCapabilities(adminClient(), testFeatureService, testName, testCase); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-semantic_text.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-semantic_text.json index c587b69828170..db15133f036bb 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-semantic_text.json +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-semantic_text.json @@ -72,6 +72,10 @@ "st_base64": { "type": "semantic_text", "inference_id": "test_sparse_inference" + }, + "st_logs": { + "type": "semantic_text", + "inference_id": "test_sparse_inference" } } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv index 6cae82cfefa0a..bd5fe7fad3a4e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv @@ -1,4 +1,4 @@ -_id:keyword,semantic_text_field:semantic_text,st_bool:semantic_text,st_cartesian_point:semantic_text,st_cartesian_shape:semantic_text,st_datetime:semantic_text,st_double:semantic_text,st_geopoint:semantic_text,st_geoshape:semantic_text,st_integer:semantic_text,st_ip:semantic_text,st_long:semantic_text,st_unsigned_long:semantic_text,st_version:semantic_text,st_multi_value:semantic_text,st_unicode:semantic_text,host:keyword,description:text,value:long,st_base64:semantic_text -1,live long and prosper,false,"POINT(4297.11 -1475.53)",,1953-09-02T00:00:00.000Z,5.20128E11,"POINT(42.97109630194 14.7552534413725)","POLYGON ((30 10\, 40 40\, 20 40\, 10 20\, 30 10))",23,1.1.1.1,2147483648,2147483648,1.2.3,["Hello there!", "This is a random value", "for testing purposes"],你吃饭了吗,"host1","some description1",1001,ZWxhc3RpYw== -2,all we have to decide is what to do with the time that is given to us,true,"POINT(7580.93 2272.77)",,2023-09-24T15:57:00.000Z,4541.11,"POINT(37.97109630194 21.7552534413725)","POLYGON ((30 10\, 40 40\, 20 40\, 10 20\, 30 10))",122,1.1.2.1,123,2147483648.2,9.0.0,["nice to meet you", "bye bye!"],["谢谢", "对不起我的中文不好"],"host2","some description2",1002,aGVsbG8= -3,be excellent to each other,,,,,,,,,,,,,,,"host3","some description3",1003, +_id:keyword,semantic_text_field:semantic_text,st_bool:semantic_text,st_cartesian_point:semantic_text,st_cartesian_shape:semantic_text,st_datetime:semantic_text,st_double:semantic_text,st_geopoint:semantic_text,st_geoshape:semantic_text,st_integer:semantic_text,st_ip:semantic_text,st_long:semantic_text,st_unsigned_long:semantic_text,st_version:semantic_text,st_multi_value:semantic_text,st_unicode:semantic_text,host:keyword,description:text,value:long,st_base64:semantic_text,st_logs:semantic_text +1,live long and prosper,false,"POINT(4297.11 -1475.53)",,1953-09-02T00:00:00.000Z,5.20128E11,"POINT(42.97109630194 14.7552534413725)","POLYGON ((30 10\, 40 40\, 20 40\, 10 20\, 30 10))",23,1.1.1.1,2147483648,2147483648,1.2.3,["Hello there!", "This is a random value", "for testing purposes"],你吃饭了吗,"host1","some description1",1001,ZWxhc3RpYw==,"2024-12-23T12:15:00.000Z 1.2.3.4 example@example.com 4553" +2,all we have to decide is what to do with the time that is given to us,true,"POINT(7580.93 2272.77)",,2023-09-24T15:57:00.000Z,4541.11,"POINT(37.97109630194 21.7552534413725)","POLYGON ((30 10\, 40 40\, 20 40\, 10 20\, 30 10))",122,1.1.2.1,123,2147483648.2,9.0.0,["nice to meet you", "bye bye!"],["谢谢", "对不起我的中文不好"],"host2","some description2",1002,aGVsbG8=,"2024-01-23T12:15:00.000Z 1.2.3.4 foo@example.com 42" +3,be excellent to each other,,,,,,,,,,,,,,,"host3","some description3",1003,,"2023-01-23T12:15:00.000Z 127.0.0.1 some.email@foo.com 42" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv-spec index de2a79df06a50..43dc6e4d4acd2 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/semantic_text.csv-spec @@ -88,19 +88,75 @@ _id:keyword | my_field:semantic_text 3 | be excellent to each other ; -simpleStats -required_capability: semantic_text_type +statsWithCount +required_capability: semantic_text_aggregations + +FROM semantic_text METADATA _id +| STATS result = COUNT(st_version) +; + +result:long +2 +; + +statsWithCountDistinct +required_capability: semantic_text_aggregations + +FROM semantic_text METADATA _id +| STATS result = COUNT_DISTINCT(st_version) +; + +result:long +2 +; + +statsWithValues +required_capability: semantic_text_aggregations + +FROM semantic_text METADATA _id +| STATS result = VALUES(st_version) +| EVAL result = MV_SORT(result) +; + +result:keyword +["1.2.3", "9.0.0"] +; + +statsWithMin +required_capability: semantic_text_aggregations + +FROM semantic_text METADATA _id +| STATS result = min(st_version) +; + +result:keyword +1.2.3 +; + +statsWithMax +required_capability: semantic_text_aggregations FROM semantic_text METADATA _id -| STATS COUNT(*) +| STATS result = max(st_version) ; -COUNT(*):long -3 +result:keyword +9.0.0 +; + +statsWithTop +required_capability: semantic_text_aggregations + +FROM semantic_text METADATA _id +| STATS result = top(st_version, 2, "asc") +; + +result:keyword +["1.2.3", "9.0.0"] ; statsWithGrouping -required_capability: semantic_text_type +required_capability: semantic_text_aggregations FROM semantic_text METADATA _id | STATS COUNT(*) BY st_version @@ -132,6 +188,36 @@ COUNT(*):long | my_field:semantic_text 1 | bye bye! ; +grok +required_capability: semantic_text_type + +FROM semantic_text METADATA _id +| GROK st_logs """%{TIMESTAMP_ISO8601:date} %{IP:ip} %{EMAILADDRESS:email} %{NUMBER:num}""" +| KEEP st_logs, date, ip, email, num +| SORT st_logs +; + +st_logs:semantic_text | date:keyword | ip:keyword | email:keyword | num:keyword +2023-01-23T12:15:00.000Z 127.0.0.1 some.email@foo.com 42 | 2023-01-23T12:15:00.000Z | 127.0.0.1 | some.email@foo.com | 42 +2024-01-23T12:15:00.000Z 1.2.3.4 foo@example.com 42 | 2024-01-23T12:15:00.000Z | 1.2.3.4 | foo@example.com | 42 +2024-12-23T12:15:00.000Z 1.2.3.4 example@example.com 4553 | 2024-12-23T12:15:00.000Z | 1.2.3.4 | example@example.com | 4553 +; + +dissect +required_capability: semantic_text_type + +FROM semantic_text METADATA _id +| DISSECT st_logs """%{date} %{ip} %{email} %{num}""" +| KEEP st_logs, date, ip, email, num +| SORT st_logs +; + +st_logs:semantic_text | date:keyword | ip:keyword | email:keyword | num:keyword +2023-01-23T12:15:00.000Z 127.0.0.1 some.email@foo.com 42 | 2023-01-23T12:15:00.000Z | 127.0.0.1 | some.email@foo.com | 42 +2024-01-23T12:15:00.000Z 1.2.3.4 foo@example.com 42 | 2024-01-23T12:15:00.000Z | 1.2.3.4 | foo@example.com | 42 +2024-12-23T12:15:00.000Z 1.2.3.4 example@example.com 4553 | 2024-12-23T12:15:00.000Z | 1.2.3.4 | example@example.com | 4553 +; + simpleWithLongValue required_capability: semantic_text_type diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index d9ce7fca312b3..08fa7f0a9b213 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -526,7 +526,12 @@ public enum Cap { /** * Fix for https://github.com/elastic/elasticsearch/issues/117054 */ - FIX_NESTED_FIELDS_NAME_CLASH_IN_INDEXRESOLVER; + FIX_NESTED_FIELDS_NAME_CLASH_IN_INDEXRESOLVER, + + /** + * support for aggregations on semantic_text + */ + SEMANTIC_TEXT_AGGREGATIONS(EsqlCorePlugin.SEMANTIC_TEXT_FEATURE_FLAG); private final boolean enabled; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java index 5ae162f1fbb12..2e45b1c1fe082 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java @@ -66,7 +66,8 @@ public class CountDistinct extends AggregateFunction implements OptionalArgument Map.entry(DataType.KEYWORD, CountDistinctBytesRefAggregatorFunctionSupplier::new), Map.entry(DataType.IP, CountDistinctBytesRefAggregatorFunctionSupplier::new), Map.entry(DataType.VERSION, CountDistinctBytesRefAggregatorFunctionSupplier::new), - Map.entry(DataType.TEXT, CountDistinctBytesRefAggregatorFunctionSupplier::new) + Map.entry(DataType.TEXT, CountDistinctBytesRefAggregatorFunctionSupplier::new), + Map.entry(DataType.SEMANTIC_TEXT, CountDistinctBytesRefAggregatorFunctionSupplier::new) ); private static final int DEFAULT_PRECISION = 3000; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java index 2165c3c7ad1a0..eb0c8abd1080b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java @@ -51,6 +51,7 @@ public class Max extends AggregateFunction implements ToAggregator, SurrogateExp Map.entry(DataType.IP, MaxIpAggregatorFunctionSupplier::new), Map.entry(DataType.KEYWORD, MaxBytesRefAggregatorFunctionSupplier::new), Map.entry(DataType.TEXT, MaxBytesRefAggregatorFunctionSupplier::new), + Map.entry(DataType.SEMANTIC_TEXT, MaxBytesRefAggregatorFunctionSupplier::new), Map.entry(DataType.VERSION, MaxBytesRefAggregatorFunctionSupplier::new) ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java index 7d67868dd4134..472f0b1ff5cd1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java @@ -51,7 +51,8 @@ public class Min extends AggregateFunction implements ToAggregator, SurrogateExp Map.entry(DataType.IP, MinIpAggregatorFunctionSupplier::new), Map.entry(DataType.VERSION, MinBytesRefAggregatorFunctionSupplier::new), Map.entry(DataType.KEYWORD, MinBytesRefAggregatorFunctionSupplier::new), - Map.entry(DataType.TEXT, MinBytesRefAggregatorFunctionSupplier::new) + Map.entry(DataType.TEXT, MinBytesRefAggregatorFunctionSupplier::new), + Map.entry(DataType.SEMANTIC_TEXT, MinBytesRefAggregatorFunctionSupplier::new) ); @FunctionInfo( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java index e7df990b20422..5260b3e8fa279 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java @@ -46,6 +46,7 @@ public class Values extends AggregateFunction implements ToAggregator { Map.entry(DataType.DOUBLE, ValuesDoubleAggregatorFunctionSupplier::new), Map.entry(DataType.KEYWORD, ValuesBytesRefAggregatorFunctionSupplier::new), Map.entry(DataType.TEXT, ValuesBytesRefAggregatorFunctionSupplier::new), + Map.entry(DataType.SEMANTIC_TEXT, ValuesBytesRefAggregatorFunctionSupplier::new), Map.entry(DataType.IP, ValuesBytesRefAggregatorFunctionSupplier::new), Map.entry(DataType.VERSION, ValuesBytesRefAggregatorFunctionSupplier::new), Map.entry(DataType.BOOLEAN, ValuesBooleanAggregatorFunctionSupplier::new) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java index 605e0d7c3109c..18bbfdf485a81 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java @@ -302,12 +302,13 @@ private static String dataTypeToString(DataType type, Class aggClass) { case DataType.INTEGER, DataType.COUNTER_INTEGER -> "Int"; case DataType.LONG, DataType.DATETIME, DataType.COUNTER_LONG, DataType.DATE_NANOS -> "Long"; case DataType.DOUBLE, DataType.COUNTER_DOUBLE -> "Double"; - case DataType.KEYWORD, DataType.IP, DataType.VERSION, DataType.TEXT -> "BytesRef"; + case DataType.KEYWORD, DataType.IP, DataType.VERSION, DataType.TEXT, DataType.SEMANTIC_TEXT -> "BytesRef"; case GEO_POINT -> "GeoPoint"; case CARTESIAN_POINT -> "CartesianPoint"; - case SEMANTIC_TEXT, UNSUPPORTED, NULL, UNSIGNED_LONG, SHORT, BYTE, FLOAT, HALF_FLOAT, SCALED_FLOAT, OBJECT, SOURCE, DATE_PERIOD, - TIME_DURATION, CARTESIAN_SHAPE, GEO_SHAPE, DOC_DATA_TYPE, TSID_DATA_TYPE, PARTIAL_AGG -> - throw new EsqlIllegalArgumentException("illegal agg type: " + type.typeName()); + case UNSUPPORTED, NULL, UNSIGNED_LONG, SHORT, BYTE, FLOAT, HALF_FLOAT, SCALED_FLOAT, OBJECT, SOURCE, DATE_PERIOD, TIME_DURATION, + CARTESIAN_SHAPE, GEO_SHAPE, DOC_DATA_TYPE, TSID_DATA_TYPE, PARTIAL_AGG -> throw new EsqlIllegalArgumentException( + "illegal agg type: " + type.typeName() + ); }; } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java index fff2d824fc710..e0b8c1356d087 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java @@ -57,7 +57,8 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.ipCases(1, 1000), MultiRowTestCaseSupplier.versionCases(1, 1000), MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.KEYWORD), - MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.TEXT) + MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.TEXT), + MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.SEMANTIC_TEXT) ).flatMap(List::stream).forEach(fieldCaseSupplier -> { // With precision for (var precisionCaseSupplier : precisionSuppliers) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java index 979048534edbf..131072acff870 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountTests.java @@ -47,7 +47,8 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.geoPointCases(1, 1000, true), MultiRowTestCaseSupplier.cartesianPointCases(1, 1000, true), MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.KEYWORD), - MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.TEXT) + MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.TEXT), + MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.SEMANTIC_TEXT) ).flatMap(List::stream).map(CountTests::makeSupplier).collect(Collectors.toCollection(() -> suppliers)); // No rows diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java index 7d4b46f2a902a..ae5b3691b0a7d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java @@ -48,7 +48,8 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.ipCases(1, 1000), MultiRowTestCaseSupplier.versionCases(1, 1000), MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.KEYWORD), - MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.TEXT) + MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.TEXT), + MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.SEMANTIC_TEXT) ).flatMap(List::stream).map(MaxTests::makeSupplier).collect(Collectors.toCollection(() -> suppliers)); suppliers.addAll( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java index 58ef8d86017a8..ad2953f057635 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java @@ -48,7 +48,8 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.ipCases(1, 1000), MultiRowTestCaseSupplier.versionCases(1, 1000), MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.KEYWORD), - MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.TEXT) + MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.TEXT), + MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.SEMANTIC_TEXT) ).flatMap(List::stream).map(MinTests::makeSupplier).collect(Collectors.toCollection(() -> suppliers)); suppliers.addAll( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopTests.java index f7bf338caa099..f236e4d8faf98 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopTests.java @@ -48,7 +48,8 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.booleanCases(1, 1000), MultiRowTestCaseSupplier.ipCases(1, 1000), MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.KEYWORD), - MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.TEXT) + MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.TEXT), + MultiRowTestCaseSupplier.stringCases(1, 1000, DataType.SEMANTIC_TEXT) ) .flatMap(List::stream) .map(fieldCaseSupplier -> TopTests.makeSupplier(fieldCaseSupplier, limitCaseSupplier, order)) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java index 29faceee7497e..5f35f8cada397 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java @@ -51,7 +51,8 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.versionCases(1, 1000), // Lower values for strings, as they take more space and may trigger the circuit breaker MultiRowTestCaseSupplier.stringCases(1, 20, DataType.KEYWORD), - MultiRowTestCaseSupplier.stringCases(1, 20, DataType.TEXT) + MultiRowTestCaseSupplier.stringCases(1, 20, DataType.TEXT), + MultiRowTestCaseSupplier.stringCases(1, 20, DataType.SEMANTIC_TEXT) ).flatMap(List::stream).map(ValuesTests::makeSupplier).collect(Collectors.toCollection(() -> suppliers)); return parameterSuppliersFromTypedDataWithDefaultChecks( From 219372efaaf46a3b496df2142d3091d3434e67ec Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Mon, 25 Nov 2024 13:44:59 -0500 Subject: [PATCH 234/386] [CI] Ignore error about missing UBI artifact (#117506) --- .buildkite/scripts/dra-workflow.sh | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/.buildkite/scripts/dra-workflow.sh b/.buildkite/scripts/dra-workflow.sh index f2dc40ca1927f..bbfa81f51b286 100755 --- a/.buildkite/scripts/dra-workflow.sh +++ b/.buildkite/scripts/dra-workflow.sh @@ -75,6 +75,7 @@ find "$WORKSPACE" -type d -path "*/build/distributions" -exec chmod a+w {} \; echo --- Running release-manager +set +e # Artifacts should be generated docker run --rm \ --name release-manager \ @@ -91,4 +92,16 @@ docker run --rm \ --version "$ES_VERSION" \ --artifact-set main \ --dependency "beats:https://artifacts-${WORKFLOW}.elastic.co/beats/${BEATS_BUILD_ID}/manifest-${ES_VERSION}${VERSION_SUFFIX}.json" \ - --dependency "ml-cpp:https://artifacts-${WORKFLOW}.elastic.co/ml-cpp/${ML_CPP_BUILD_ID}/manifest-${ES_VERSION}${VERSION_SUFFIX}.json" + --dependency "ml-cpp:https://artifacts-${WORKFLOW}.elastic.co/ml-cpp/${ML_CPP_BUILD_ID}/manifest-${ES_VERSION}${VERSION_SUFFIX}.json" \ +2>&1 | tee release-manager.log +EXIT_CODE=$? +set -e + +# This failure is just generating a ton of noise right now, so let's just ignore it +# This should be removed once this issue has been fixed +if grep "elasticsearch-ubi-9.0.0-SNAPSHOT-docker-image.tar.gz" release-manager.log; then + echo "Ignoring error about missing ubi artifact" + exit 0 +fi + +exit "$EXIT_CODE" From 5a6464c552dc8d58dc40119292a99ff4d69cf385 Mon Sep 17 00:00:00 2001 From: Mikhail Berezovskiy Date: Mon, 25 Nov 2024 14:14:37 -0500 Subject: [PATCH 235/386] Add HTTP request info to leaking buffers (#116130) --- .../netty4/Netty4HttpServerTransport.java | 4 ++ .../netty4/Netty4LeakDetectionHandler.java | 40 +++++++++++++++++++ 2 files changed, 44 insertions(+) create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4LeakDetectionHandler.java diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index b971a52b7afb6..36c860f1fb90b 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -33,6 +33,7 @@ import io.netty.handler.timeout.ReadTimeoutException; import io.netty.handler.timeout.ReadTimeoutHandler; import io.netty.util.AttributeKey; +import io.netty.util.ResourceLeakDetector; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -410,6 +411,9 @@ protected Result beginEncode(HttpResponse httpResponse, String acceptEncoding) t } }); } + if (ResourceLeakDetector.isEnabled()) { + ch.pipeline().addLast(new Netty4LeakDetectionHandler()); + } ch.pipeline() .addLast( "pipelining", diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4LeakDetectionHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4LeakDetectionHandler.java new file mode 100644 index 0000000000000..8a0274872e493 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4LeakDetectionHandler.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.http.netty4; + +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import io.netty.handler.codec.http.HttpContent; +import io.netty.handler.codec.http.HttpRequest; + +import org.elasticsearch.tasks.Task; + +/** + * Inbound channel handler that enrich leaking buffers information from HTTP request. + * It helps to detect which handler is leaking buffers. Especially integration tests that run with + * paranoid leak detector that samples all buffers for leaking. Supplying informative opaque-id in + * integ test helps to narrow down problem (for example test name). + */ +public class Netty4LeakDetectionHandler extends ChannelInboundHandlerAdapter { + + private String info; + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) { + if (msg instanceof HttpRequest request) { + var opaqueId = request.headers().get(Task.X_OPAQUE_ID_HTTP_HEADER); + info = "method: " + request.method() + "; uri: " + request.uri() + "; x-opaque-id: " + opaqueId; + } + if (msg instanceof HttpContent content) { + content.touch(info); + } + ctx.fireChannelRead(msg); + } +} From 930a99cc3874e2e195f4a79a4c0f5953fcf27b45 Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Mon, 25 Nov 2024 20:19:24 +0100 Subject: [PATCH 236/386] Fix and unmute synonyms tests using timeout (#117486) --- muted-tests.yml | 3 --- .../rest-api-spec/test/synonyms/10_synonyms_put.yml | 10 ++++++++-- .../test/synonyms/110_synonyms_invalid.yml | 5 ++++- .../rest-api-spec/test/synonyms/20_synonyms_get.yml | 5 ++++- .../rest-api-spec/test/synonyms/30_synonyms_delete.yml | 6 +++++- .../test/synonyms/40_synonyms_sets_get.yml | 5 ++++- .../test/synonyms/50_synonym_rule_put.yml | 6 +++++- .../test/synonyms/60_synonym_rule_get.yml | 6 ++++-- .../test/synonyms/70_synonym_rule_delete.yml | 5 ++++- .../test/synonyms/80_synonyms_from_index.yml | 5 ++++- .../test/synonyms/90_synonyms_reloading_for_synset.yml | 5 ++++- 11 files changed, 46 insertions(+), 15 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index da8a093ebe674..f4c5a418666b9 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -185,9 +185,6 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=snapshot/20_operator_privileges_disabled/Operator only settings can be set and restored by non-operator user when operator privileges is disabled} issue: https://github.com/elastic/elasticsearch/issues/116775 -- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT - method: test {p0=synonyms/90_synonyms_reloading_for_synset/Reload analyzers for specific synonym set} - issue: https://github.com/elastic/elasticsearch/issues/116777 - class: org.elasticsearch.xpack.searchablesnapshots.hdfs.SecureHdfsSearchableSnapshotsIT issue: https://github.com/elastic/elasticsearch/issues/116851 - class: org.elasticsearch.search.basic.SearchWithRandomIOExceptionsIT diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/10_synonyms_put.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/10_synonyms_put.yml index 675b98133ce11..93f1fafa7ab85 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/10_synonyms_put.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/10_synonyms_put.yml @@ -17,7 +17,10 @@ setup: - do: cluster.health: - wait_for_no_initializing_shards: true + index: .synonyms-2 + timeout: 2s + wait_for_status: green + ignore: 408 - do: synonyms.get_synonym: @@ -64,7 +67,10 @@ setup: - do: cluster.health: - wait_for_no_initializing_shards: true + index: .synonyms-2 + timeout: 2s + wait_for_status: green + ignore: 408 - do: synonyms.get_synonym: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/110_synonyms_invalid.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/110_synonyms_invalid.yml index 4e77e10495109..7f545b466e65f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/110_synonyms_invalid.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/110_synonyms_invalid.yml @@ -14,7 +14,10 @@ setup: # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - do: cluster.health: - wait_for_no_initializing_shards: true + index: .synonyms-2 + timeout: 2s + wait_for_status: green + ignore: 408 - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/20_synonyms_get.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/20_synonyms_get.yml index 5e6d4ec2341ad..9e6af0f471e6e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/20_synonyms_get.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/20_synonyms_get.yml @@ -17,7 +17,10 @@ setup: # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - do: cluster.health: - wait_for_no_initializing_shards: true + index: .synonyms-2 + timeout: 2s + wait_for_status: green + ignore: 408 --- "Get synonyms set": diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/30_synonyms_delete.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/30_synonyms_delete.yml index 23c907f6a1137..62e8fe333ce99 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/30_synonyms_delete.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/30_synonyms_delete.yml @@ -15,7 +15,11 @@ setup: # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - do: cluster.health: - wait_for_no_initializing_shards: true + index: .synonyms-2 + timeout: 2s + wait_for_status: green + ignore: 408 + --- "Delete synonyms set": - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml index 7c145dafd81cd..3815ea2c96c97 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml @@ -13,7 +13,10 @@ setup: # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - do: cluster.health: - wait_for_no_initializing_shards: true + index: .synonyms-2 + timeout: 2s + wait_for_status: green + ignore: 408 - do: synonyms.put_synonym: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/50_synonym_rule_put.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/50_synonym_rule_put.yml index d8611000fe465..02757f711f690 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/50_synonym_rule_put.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/50_synonym_rule_put.yml @@ -17,7 +17,11 @@ setup: # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - do: cluster.health: - wait_for_no_initializing_shards: true + index: .synonyms-2 + timeout: 2s + wait_for_status: green + ignore: 408 + --- "Update a synonyms rule": - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/60_synonym_rule_get.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/60_synonym_rule_get.yml index 0c962b51e08cb..9f1aa1d254169 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/60_synonym_rule_get.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/60_synonym_rule_get.yml @@ -17,8 +17,10 @@ setup: # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - do: cluster.health: - wait_for_no_initializing_shards: true - + index: .synonyms-2 + timeout: 2s + wait_for_status: green + ignore: 408 --- "Get a synonym rule": diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/70_synonym_rule_delete.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/70_synonym_rule_delete.yml index 41ab293158a35..d2c706decf4fd 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/70_synonym_rule_delete.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/70_synonym_rule_delete.yml @@ -17,7 +17,10 @@ setup: # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - do: cluster.health: - wait_for_no_initializing_shards: true + index: .synonyms-2 + timeout: 2s + wait_for_status: green + ignore: 408 --- "Delete synonym rule": diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/80_synonyms_from_index.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/80_synonyms_from_index.yml index 3aba0f0b4b78b..965cae551fab2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/80_synonyms_from_index.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/80_synonyms_from_index.yml @@ -16,7 +16,10 @@ setup: # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - do: cluster.health: - wait_for_no_initializing_shards: true + index: .synonyms-2 + timeout: 2s + wait_for_status: green + ignore: 408 # Create an index with synonym_filter that uses that synonyms set - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml index ac01f2dc0178a..d6c98673253fb 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml @@ -28,7 +28,10 @@ # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - do: cluster.health: - wait_for_no_initializing_shards: true + index: .synonyms-2 + timeout: 2s + wait_for_status: green + ignore: 408 # Create my_index1 with synonym_filter that uses synonyms_set1 - do: From 565218e43e0874808d5929ccfecf7359d0423b37 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 26 Nov 2024 08:38:32 +1100 Subject: [PATCH 237/386] Mute org.elasticsearch.xpack.esql.qa.single_node.FieldExtractorIT testConstantKeywordField #117524 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index f4c5a418666b9..2a800c2757f2b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -232,6 +232,9 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_reset/Test reset running transform} issue: https://github.com/elastic/elasticsearch/issues/117473 +- class: org.elasticsearch.xpack.esql.qa.single_node.FieldExtractorIT + method: testConstantKeywordField + issue: https://github.com/elastic/elasticsearch/issues/117524 # Examples: # From 6260746cb958785c6d6c1aa023d4e00ee8b8d56f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 26 Nov 2024 08:39:00 +1100 Subject: [PATCH 238/386] Mute org.elasticsearch.xpack.esql.qa.multi_node.FieldExtractorIT testConstantKeywordField #117524 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 2a800c2757f2b..986bea5b248f1 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -235,6 +235,9 @@ tests: - class: org.elasticsearch.xpack.esql.qa.single_node.FieldExtractorIT method: testConstantKeywordField issue: https://github.com/elastic/elasticsearch/issues/117524 +- class: org.elasticsearch.xpack.esql.qa.multi_node.FieldExtractorIT + method: testConstantKeywordField + issue: https://github.com/elastic/elasticsearch/issues/117524 # Examples: # From e5b9b7e9babe42043184b4a2820645a46b9a000d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 26 Nov 2024 08:46:33 +1100 Subject: [PATCH 239/386] Mute org.elasticsearch.repositories.s3.RepositoryS3EcsClientYamlTestSuiteIT org.elasticsearch.repositories.s3.RepositoryS3EcsClientYamlTestSuiteIT #117525 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 986bea5b248f1..37f36e9a19340 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -238,6 +238,8 @@ tests: - class: org.elasticsearch.xpack.esql.qa.multi_node.FieldExtractorIT method: testConstantKeywordField issue: https://github.com/elastic/elasticsearch/issues/117524 +- class: org.elasticsearch.repositories.s3.RepositoryS3EcsClientYamlTestSuiteIT + issue: https://github.com/elastic/elasticsearch/issues/117525 # Examples: # From 0f5eb0c2762938eac558eb1c09e9189ecd2f5113 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 25 Nov 2024 15:15:14 -0800 Subject: [PATCH 240/386] Adjust deprecate index versions (#117523) Adjust the deprecation index check to support the backport version in 8.x. Relates #117183 --- .../src/main/java/org/elasticsearch/index/IndexVersions.java | 1 + .../org/elasticsearch/index/mapper/SourceFieldMapper.java | 5 ++--- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 7a5f469a57fa1..6344aa2a72ca9 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -131,6 +131,7 @@ private static Version parseUnchecked(String version) { public static final IndexVersion ADD_ROLE_MAPPING_CLEANUP_MIGRATION = def(8_518_00_0, Version.LUCENE_9_12_0); public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT_BACKPORT = def(8_519_00_0, Version.LUCENE_9_12_0); public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID_BACKPORT = def(8_520_00_0, Version.LUCENE_9_12_0); + public static final IndexVersion V8_DEPRECATE_SOURCE_MODE_MAPPER = def(8_521_00_0, Version.LUCENE_9_12_0); public static final IndexVersion UPGRADE_TO_LUCENE_10_0_0 = def(9_000_00_0, Version.LUCENE_10_0_0); public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT = def(9_001_00_0, Version.LUCENE_10_0_0); public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID = def(9_002_00_0, Version.LUCENE_10_0_0); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index 9d0dc9635537b..e7c7ec3535b91 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -484,8 +484,7 @@ public boolean isStored() { } public static boolean onOrAfterDeprecateModeVersion(IndexVersion version) { - return version.onOrAfter(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER); - // Adjust versions after backporting. - // || version.between(IndexVersions.BACKPORT_DEPRECATE_SOURCE_MODE_MAPPER, IndexVersions.UPGRADE_TO_LUCENE_10_0_0); + return version.onOrAfter(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER) + || version.between(IndexVersions.V8_DEPRECATE_SOURCE_MODE_MAPPER, IndexVersions.UPGRADE_TO_LUCENE_10_0_0); } } From fadc752b4a854bb655896b232fd02a2e9d822518 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 25 Nov 2024 18:52:17 -0800 Subject: [PATCH 241/386] Allow exchange source continue on failure (#117410) Currently, when an exchange request fails, we stop fetching pages and abort the ExchangeSource. However, to support partial_results, we need to continue fetching pages from other remote sinks despite failures. This change introduces a failFast flag in ExchangeSource, which enables the process to continue in case of failures. By default, this flag is set to true but switches to false when allow_partial_results is enabled. --- .../operator/exchange/ExchangeService.java | 4 +- .../exchange/ExchangeSinkHandler.java | 2 +- .../exchange/ExchangeSourceHandler.java | 94 ++++++++---- .../operator/ForkingOperatorTestCase.java | 15 +- .../exchange/ExchangeServiceTests.java | 137 +++++++++++++++--- .../xpack/esql/plugin/ComputeService.java | 79 +++++----- .../elasticsearch/xpack/esql/CsvTests.java | 11 +- 7 files changed, 253 insertions(+), 89 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java index e6bae7ba385e6..d633270b5c595 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java @@ -47,7 +47,7 @@ /** * {@link ExchangeService} is responsible for exchanging pages between exchange sinks and sources on the same or different nodes. * It holds a map of {@link ExchangeSinkHandler} instances for each node in the cluster to serve {@link ExchangeRequest}s - * To connect exchange sources to exchange sinks, use the {@link ExchangeSourceHandler#addRemoteSink(RemoteSink, int)} method. + * To connect exchange sources to exchange sinks, use {@link ExchangeSourceHandler#addRemoteSink(RemoteSink, boolean, int, ActionListener)}. */ public final class ExchangeService extends AbstractLifecycleComponent { // TODO: Make this a child action of the data node transport to ensure that exchanges @@ -311,7 +311,7 @@ static final class TransportRemoteSink implements RemoteSink { @Override public void fetchPageAsync(boolean allSourcesFinished, ActionListener listener) { - final long reservedBytes = estimatedPageSizeInBytes.get(); + final long reservedBytes = allSourcesFinished ? 0 : estimatedPageSizeInBytes.get(); if (reservedBytes > 0) { // This doesn't fully protect ESQL from OOM, but reduces the likelihood. blockFactory.breaker().addEstimateBytesAndMaybeBreak(reservedBytes, "fetch page"); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java index 757a3262433c8..614c3fe0ecc5c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java @@ -93,7 +93,7 @@ public IsBlockedResult waitForWriting() { * @param sourceFinished if true, then this handler can finish as sources have enough pages. * @param listener the listener that will be notified when pages are ready or this handler is finished * @see RemoteSink - * @see ExchangeSourceHandler#addRemoteSink(RemoteSink, int) + * @see ExchangeSourceHandler#addRemoteSink(RemoteSink, boolean, int, ActionListener) */ public void fetchPageAsync(boolean sourceFinished, ActionListener listener) { if (sourceFinished) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java index 4baaf9ad89bd6..61b3386ce0274 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java @@ -24,10 +24,10 @@ /** * An {@link ExchangeSourceHandler} asynchronously fetches pages and status from multiple {@link RemoteSink}s * and feeds them to its {@link ExchangeSource}, which are created using the {@link #createExchangeSource()}) method. - * {@link RemoteSink}s are added using the {@link #addRemoteSink(RemoteSink, int)}) method. + * {@link RemoteSink}s are added using the {@link #addRemoteSink(RemoteSink, boolean, int, ActionListener)}) method. * * @see #createExchangeSource() - * @see #addRemoteSink(RemoteSink, int) + * @see #addRemoteSink(RemoteSink, boolean, int, ActionListener) */ public final class ExchangeSourceHandler { private final ExchangeBuffer buffer; @@ -35,13 +35,43 @@ public final class ExchangeSourceHandler { private final PendingInstances outstandingSinks; private final PendingInstances outstandingSources; + // Collect failures that occur while fetching pages from the remote sink with `failFast=true`. + // The exchange source will stop fetching and abort as soon as any failure is added to this failure collector. + // The final failure collected will be notified to callers via the {@code completionListener}. private final FailureCollector failure = new FailureCollector(); - public ExchangeSourceHandler(int maxBufferSize, Executor fetchExecutor) { + /** + * Creates a new ExchangeSourceHandler. + * + * @param maxBufferSize the maximum size of the exchange buffer. A larger buffer reduces ``pauses`` but uses more memory, + * which could otherwise be allocated for other purposes. + * @param fetchExecutor the executor used to fetch pages. + * @param completionListener a listener that will be notified when the exchange source handler fails or completes + */ + public ExchangeSourceHandler(int maxBufferSize, Executor fetchExecutor, ActionListener completionListener) { this.buffer = new ExchangeBuffer(maxBufferSize); this.fetchExecutor = fetchExecutor; this.outstandingSinks = new PendingInstances(() -> buffer.finish(false)); this.outstandingSources = new PendingInstances(() -> buffer.finish(true)); + buffer.addCompletionListener(ActionListener.running(() -> { + final ActionListener listener = ActionListener.assertAtLeastOnce(completionListener).delegateFailure((l, unused) -> { + final Exception e = failure.getFailure(); + if (e != null) { + l.onFailure(e); + } else { + l.onResponse(null); + } + }); + try (RefCountingListener refs = new RefCountingListener(listener)) { + for (PendingInstances pending : List.of(outstandingSinks, outstandingSources)) { + // Create an outstanding instance and then finish to complete the completionListener + // if we haven't registered any instances of exchange sinks or exchange sources before. + pending.trackNewInstance(); + pending.completion.addListener(refs.acquire()); + pending.finishInstance(); + } + } + })); } private class ExchangeSourceImpl implements ExchangeSource { @@ -89,20 +119,6 @@ public int bufferSize() { } } - public void addCompletionListener(ActionListener listener) { - buffer.addCompletionListener(ActionListener.running(() -> { - try (RefCountingListener refs = new RefCountingListener(listener)) { - for (PendingInstances pending : List.of(outstandingSinks, outstandingSources)) { - // Create an outstanding instance and then finish to complete the completionListener - // if we haven't registered any instances of exchange sinks or exchange sources before. - pending.trackNewInstance(); - pending.completion.addListener(refs.acquire()); - pending.finishInstance(); - } - } - })); - } - /** * Create a new {@link ExchangeSource} for exchanging data * @@ -159,10 +175,14 @@ void exited() { private final class RemoteSinkFetcher { private volatile boolean finished = false; private final RemoteSink remoteSink; + private final boolean failFast; + private final ActionListener completionListener; - RemoteSinkFetcher(RemoteSink remoteSink) { + RemoteSinkFetcher(RemoteSink remoteSink, boolean failFast, ActionListener completionListener) { outstandingSinks.trackNewInstance(); this.remoteSink = remoteSink; + this.failFast = failFast; + this.completionListener = completionListener; } void fetchPage() { @@ -198,15 +218,22 @@ void fetchPage() { } void onSinkFailed(Exception e) { - failure.unwrapAndCollect(e); + if (failFast) { + failure.unwrapAndCollect(e); + } buffer.waitForReading().listener().onResponse(null); // resume the Driver if it is being blocked on reading - onSinkComplete(); + if (finished == false) { + finished = true; + outstandingSinks.finishInstance(); + completionListener.onFailure(e); + } } void onSinkComplete() { if (finished == false) { finished = true; outstandingSinks.finishInstance(); + completionListener.onResponse(null); } } } @@ -215,23 +242,36 @@ void onSinkComplete() { * Add a remote sink as a new data source of this handler. The handler will start fetching data from this remote sink intermediately. * * @param remoteSink the remote sink - * @param instances the number of concurrent ``clients`` that this handler should use to fetch pages. More clients reduce latency, - * but add overhead. + * @param failFast determines how failures in this remote sink are handled: + * - If {@code false}, failures from this remote sink will not cause the exchange source to abort. + * Callers must handle these failures notified via {@code listener}. + * - If {@code true}, failures from this remote sink will cause the exchange source to abort. + * Callers can safely ignore failures notified via this listener, as they are collected and + * reported by the exchange source. + * @param instances the number of concurrent ``clients`` that this handler should use to fetch pages. + * More clients reduce latency, but add overhead. + * @param listener a listener that will be notified when the sink fails or completes * @see ExchangeSinkHandler#fetchPageAsync(boolean, ActionListener) */ - public void addRemoteSink(RemoteSink remoteSink, int instances) { + public void addRemoteSink(RemoteSink remoteSink, boolean failFast, int instances, ActionListener listener) { + final ActionListener sinkListener = ActionListener.assertAtLeastOnce(ActionListener.notifyOnce(listener)); fetchExecutor.execute(new AbstractRunnable() { @Override public void onFailure(Exception e) { - failure.unwrapAndCollect(e); + if (failFast) { + failure.unwrapAndCollect(e); + } buffer.waitForReading().listener().onResponse(null); // resume the Driver if it is being blocked on reading + sinkListener.onFailure(e); } @Override protected void doRun() { - for (int i = 0; i < instances; i++) { - var fetcher = new RemoteSinkFetcher(remoteSink); - fetcher.fetchPage(); + try (RefCountingListener refs = new RefCountingListener(sinkListener)) { + for (int i = 0; i < instances; i++) { + var fetcher = new RemoteSinkFetcher(remoteSink, failFast, refs.acquire()); + fetcher.fetchPage(); + } } } }); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java index c0396fdc469aa..542bf5bc384a5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java @@ -209,8 +209,19 @@ List createDriversForInput(List input, List results, boolean randomIntBetween(2, 10), threadPool.relativeTimeInMillisSupplier() ); - ExchangeSourceHandler sourceExchanger = new ExchangeSourceHandler(randomIntBetween(1, 4), threadPool.executor(ESQL_TEST_EXECUTOR)); - sourceExchanger.addRemoteSink(sinkExchanger::fetchPageAsync, 1); + ExchangeSourceHandler sourceExchanger = new ExchangeSourceHandler( + randomIntBetween(1, 4), + threadPool.executor(ESQL_TEST_EXECUTOR), + ActionListener.noop() + ); + sourceExchanger.addRemoteSink( + sinkExchanger::fetchPageAsync, + randomBoolean(), + 1, + ActionListener.noop().delegateResponse((l, e) -> { + throw new AssertionError("unexpected failure", e); + }) + ); Iterator intermediateOperatorItr; int itrSize = (splitInput.size() * 3) + 3; // 3 inter ops per initial source drivers, and 3 per final diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index 0b1ecce8c375b..8949f61b7420d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.node.VersionInformation; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; @@ -56,6 +57,7 @@ import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.function.Supplier; @@ -63,6 +65,7 @@ import java.util.stream.IntStream; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; public class ExchangeServiceTests extends ESTestCase { @@ -94,11 +97,10 @@ public void testBasic() throws Exception { ExchangeSinkHandler sinkExchanger = new ExchangeSinkHandler(blockFactory, 2, threadPool.relativeTimeInMillisSupplier()); ExchangeSink sink1 = sinkExchanger.createExchangeSink(); ExchangeSink sink2 = sinkExchanger.createExchangeSink(); - ExchangeSourceHandler sourceExchanger = new ExchangeSourceHandler(3, threadPool.executor(ESQL_TEST_EXECUTOR)); PlainActionFuture sourceCompletion = new PlainActionFuture<>(); - sourceExchanger.addCompletionListener(sourceCompletion); + ExchangeSourceHandler sourceExchanger = new ExchangeSourceHandler(3, threadPool.executor(ESQL_TEST_EXECUTOR), sourceCompletion); ExchangeSource source = sourceExchanger.createExchangeSource(); - sourceExchanger.addRemoteSink(sinkExchanger::fetchPageAsync, 1); + sourceExchanger.addRemoteSink(sinkExchanger::fetchPageAsync, randomBoolean(), 1, ActionListener.noop()); SubscribableListener waitForReading = source.waitForReading().listener(); assertFalse(waitForReading.isDone()); assertNull(source.pollPage()); @@ -263,7 +265,7 @@ public void close() { } } - void runConcurrentTest( + Set runConcurrentTest( int maxInputSeqNo, int maxOutputSeqNo, Supplier exchangeSource, @@ -318,16 +320,17 @@ protected void start(Driver driver, ActionListener listener) { } }.runToCompletion(drivers, future); future.actionGet(TimeValue.timeValueMinutes(1)); - var expectedSeqNos = IntStream.range(0, Math.min(maxInputSeqNo, maxOutputSeqNo)).boxed().collect(Collectors.toSet()); - assertThat(seqNoCollector.receivedSeqNos, hasSize(expectedSeqNos.size())); - assertThat(seqNoCollector.receivedSeqNos, equalTo(expectedSeqNos)); + return seqNoCollector.receivedSeqNos; } public void testConcurrentWithHandlers() { BlockFactory blockFactory = blockFactory(); PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); - var sourceExchanger = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); - sourceExchanger.addCompletionListener(sourceCompletionFuture); + var sourceExchanger = new ExchangeSourceHandler( + randomExchangeBuffer(), + threadPool.executor(ESQL_TEST_EXECUTOR), + sourceCompletionFuture + ); List sinkHandlers = new ArrayList<>(); Supplier exchangeSink = () -> { final ExchangeSinkHandler sinkHandler; @@ -335,17 +338,89 @@ public void testConcurrentWithHandlers() { sinkHandler = randomFrom(sinkHandlers); } else { sinkHandler = new ExchangeSinkHandler(blockFactory, randomExchangeBuffer(), threadPool.relativeTimeInMillisSupplier()); - sourceExchanger.addRemoteSink(sinkHandler::fetchPageAsync, randomIntBetween(1, 3)); + sourceExchanger.addRemoteSink(sinkHandler::fetchPageAsync, randomBoolean(), randomIntBetween(1, 3), ActionListener.noop()); sinkHandlers.add(sinkHandler); } return sinkHandler.createExchangeSink(); }; final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); - runConcurrentTest(maxInputSeqNo, maxOutputSeqNo, sourceExchanger::createExchangeSource, exchangeSink); + Set actualSeqNos = runConcurrentTest(maxInputSeqNo, maxOutputSeqNo, sourceExchanger::createExchangeSource, exchangeSink); + var expectedSeqNos = IntStream.range(0, Math.min(maxInputSeqNo, maxOutputSeqNo)).boxed().collect(Collectors.toSet()); + assertThat(actualSeqNos, hasSize(expectedSeqNos.size())); + assertThat(actualSeqNos, equalTo(expectedSeqNos)); sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); } + public void testExchangeSourceContinueOnFailure() { + BlockFactory blockFactory = blockFactory(); + PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); + var exchangeSourceHandler = new ExchangeSourceHandler( + randomExchangeBuffer(), + threadPool.executor(ESQL_TEST_EXECUTOR), + sourceCompletionFuture + ); + final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); + final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); + Set expectedSeqNos = ConcurrentCollections.newConcurrentSet(); + AtomicInteger failedRequests = new AtomicInteger(); + AtomicInteger totalSinks = new AtomicInteger(); + AtomicInteger failedSinks = new AtomicInteger(); + AtomicInteger completedSinks = new AtomicInteger(); + Supplier exchangeSink = () -> { + var sinkHandler = new ExchangeSinkHandler(blockFactory, randomExchangeBuffer(), threadPool.relativeTimeInMillisSupplier()); + int failAfter = randomBoolean() ? Integer.MAX_VALUE : randomIntBetween(0, 100); + AtomicInteger fetched = new AtomicInteger(); + int instance = randomIntBetween(1, 3); + totalSinks.incrementAndGet(); + AtomicBoolean sinkFailed = new AtomicBoolean(); + exchangeSourceHandler.addRemoteSink((allSourcesFinished, listener) -> { + if (fetched.incrementAndGet() > failAfter) { + sinkHandler.fetchPageAsync(true, listener.delegateFailure((l, r) -> { + failedRequests.incrementAndGet(); + sinkFailed.set(true); + listener.onFailure(new CircuitBreakingException("simulated", CircuitBreaker.Durability.PERMANENT)); + })); + } else { + sinkHandler.fetchPageAsync(allSourcesFinished, listener.delegateFailure((l, r) -> { + Page page = r.takePage(); + if (page != null) { + IntBlock block = page.getBlock(0); + for (int i = 0; i < block.getPositionCount(); i++) { + int v = block.getInt(i); + if (v < maxOutputSeqNo) { + expectedSeqNos.add(v); + } + } + } + l.onResponse(new ExchangeResponse(blockFactory, page, r.finished())); + })); + } + }, false, instance, ActionListener.wrap(r -> { + assertFalse(sinkFailed.get()); + completedSinks.incrementAndGet(); + }, e -> { + assertTrue(sinkFailed.get()); + failedSinks.incrementAndGet(); + })); + return sinkHandler.createExchangeSink(); + }; + Set actualSeqNos = runConcurrentTest( + maxInputSeqNo, + maxOutputSeqNo, + exchangeSourceHandler::createExchangeSource, + exchangeSink + ); + assertThat(actualSeqNos, equalTo(expectedSeqNos)); + assertThat(completedSinks.get() + failedSinks.get(), equalTo(totalSinks.get())); + sourceCompletionFuture.actionGet(); + if (failedRequests.get() > 0) { + assertThat(failedSinks.get(), greaterThan(0)); + } else { + assertThat(failedSinks.get(), equalTo(0)); + } + } + public void testEarlyTerminate() { BlockFactory blockFactory = blockFactory(); IntBlock block1 = blockFactory.newConstantIntBlockWith(1, 2); @@ -378,15 +453,31 @@ public void testConcurrentWithTransportActions() { try (exchange0; exchange1; node0; node1) { String exchangeId = "exchange"; Task task = new Task(1, "", "", "", null, Collections.emptyMap()); - var sourceHandler = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); - sourceHandler.addCompletionListener(sourceCompletionFuture); + var sourceHandler = new ExchangeSourceHandler( + randomExchangeBuffer(), + threadPool.executor(ESQL_TEST_EXECUTOR), + sourceCompletionFuture + ); ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomExchangeBuffer()); Transport.Connection connection = node0.getConnection(node1.getLocalNode()); - sourceHandler.addRemoteSink(exchange0.newRemoteSink(task, exchangeId, node0, connection), randomIntBetween(1, 5)); + sourceHandler.addRemoteSink( + exchange0.newRemoteSink(task, exchangeId, node0, connection), + randomBoolean(), + randomIntBetween(1, 5), + ActionListener.noop() + ); final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); - runConcurrentTest(maxInputSeqNo, maxOutputSeqNo, sourceHandler::createExchangeSource, sinkHandler::createExchangeSink); + Set actualSeqNos = runConcurrentTest( + maxInputSeqNo, + maxOutputSeqNo, + sourceHandler::createExchangeSource, + sinkHandler::createExchangeSink + ); + var expectedSeqNos = IntStream.range(0, Math.min(maxInputSeqNo, maxOutputSeqNo)).boxed().collect(Collectors.toSet()); + assertThat(actualSeqNos, hasSize(expectedSeqNos.size())); + assertThat(actualSeqNos, equalTo(expectedSeqNos)); sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); } } @@ -437,12 +528,20 @@ public void sendResponse(TransportResponse transportResponse) { try (exchange0; exchange1; node0; node1) { String exchangeId = "exchange"; Task task = new Task(1, "", "", "", null, Collections.emptyMap()); - var sourceHandler = new ExchangeSourceHandler(randomIntBetween(1, 128), threadPool.executor(ESQL_TEST_EXECUTOR)); PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); - sourceHandler.addCompletionListener(sourceCompletionFuture); + var sourceHandler = new ExchangeSourceHandler( + randomIntBetween(1, 128), + threadPool.executor(ESQL_TEST_EXECUTOR), + sourceCompletionFuture + ); ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomIntBetween(1, 128)); Transport.Connection connection = node0.getConnection(node1.getLocalNode()); - sourceHandler.addRemoteSink(exchange0.newRemoteSink(task, exchangeId, node0, connection), randomIntBetween(1, 5)); + sourceHandler.addRemoteSink( + exchange0.newRemoteSink(task, exchangeId, node0, connection), + true, + randomIntBetween(1, 5), + ActionListener.noop() + ); Exception err = expectThrows( Exception.class, () -> runConcurrentTest(maxSeqNo, maxSeqNo, sourceHandler::createExchangeSource, sinkHandler::createExchangeSink) @@ -451,7 +550,7 @@ public void sendResponse(TransportResponse transportResponse) { assertNotNull(cause); assertThat(cause.getMessage(), equalTo("page is too large")); sinkHandler.onFailure(new RuntimeException(cause)); - sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); + expectThrows(Exception.class, () -> sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index eeed811674f60..e40af28fcdcbd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -196,10 +196,6 @@ public void execute( .groupIndices(SearchRequest.DEFAULT_INDICES_OPTIONS, PlannerUtils.planOriginalIndices(physicalPlan)); var localOriginalIndices = clusterToOriginalIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); var localConcreteIndices = clusterToConcreteIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); - final var exchangeSource = new ExchangeSourceHandler( - queryPragmas.exchangeBufferSize(), - transportService.getThreadPool().executor(ThreadPool.Names.SEARCH) - ); String local = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; /* * Grab the output attributes here, so we can pass them to @@ -208,46 +204,58 @@ public void execute( */ List outputAttributes = physicalPlan.output(); try ( - Releasable ignored = exchangeSource.addEmptySink(); // this is the top level ComputeListener called once at the end (e.g., once all clusters have finished for a CCS) var computeListener = ComputeListener.create(local, transportService, rootTask, execInfo, listener.map(r -> { execInfo.markEndQuery(); // TODO: revisit this time recording model as part of INLINESTATS improvements return new Result(outputAttributes, collectedPages, r.getProfiles(), execInfo); })) ) { - // run compute on the coordinator - exchangeSource.addCompletionListener(computeListener.acquireAvoid()); - runCompute( - rootTask, - new ComputeContext(sessionId, RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, List.of(), configuration, exchangeSource, null), - coordinatorPlan, - computeListener.acquireCompute(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY) + var exchangeSource = new ExchangeSourceHandler( + queryPragmas.exchangeBufferSize(), + transportService.getThreadPool().executor(ThreadPool.Names.SEARCH), + computeListener.acquireAvoid() ); - // starts computes on data nodes on the main cluster - if (localConcreteIndices != null && localConcreteIndices.indices().length > 0) { - startComputeOnDataNodes( + try (Releasable ignored = exchangeSource.addEmptySink()) { + // run compute on the coordinator + runCompute( + rootTask, + new ComputeContext( + sessionId, + RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, + List.of(), + configuration, + exchangeSource, + null + ), + coordinatorPlan, + computeListener.acquireCompute(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY) + ); + // starts computes on data nodes on the main cluster + if (localConcreteIndices != null && localConcreteIndices.indices().length > 0) { + startComputeOnDataNodes( + sessionId, + RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, + rootTask, + configuration, + dataNodePlan, + Set.of(localConcreteIndices.indices()), + localOriginalIndices, + exchangeSource, + execInfo, + computeListener + ); + } + // starts computes on remote clusters + startComputeOnRemoteClusters( sessionId, - RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, rootTask, configuration, dataNodePlan, - Set.of(localConcreteIndices.indices()), - localOriginalIndices, exchangeSource, - execInfo, + getRemoteClusters(clusterToConcreteIndices, clusterToOriginalIndices), computeListener ); } - // starts computes on remote clusters - startComputeOnRemoteClusters( - sessionId, - rootTask, - configuration, - dataNodePlan, - exchangeSource, - getRemoteClusters(clusterToConcreteIndices, clusterToOriginalIndices), - computeListener - ); } } @@ -341,7 +349,7 @@ private void startComputeOnDataNodes( esqlExecutor, refs.acquire().delegateFailureAndWrap((l, unused) -> { var remoteSink = exchangeService.newRemoteSink(parentTask, childSessionId, transportService, node.connection); - exchangeSource.addRemoteSink(remoteSink, queryPragmas.concurrentExchangeClients()); + exchangeSource.addRemoteSink(remoteSink, true, queryPragmas.concurrentExchangeClients(), ActionListener.noop()); ActionListener computeResponseListener = computeListener.acquireCompute(clusterAlias); var dataNodeListener = ActionListener.runBefore(computeResponseListener, () -> l.onResponse(null)); transportService.sendChildRequest( @@ -390,7 +398,7 @@ private void startComputeOnRemoteClusters( esqlExecutor, refs.acquire().delegateFailureAndWrap((l, unused) -> { var remoteSink = exchangeService.newRemoteSink(rootTask, childSessionId, transportService, cluster.connection); - exchangeSource.addRemoteSink(remoteSink, queryPragmas.concurrentExchangeClients()); + exchangeSource.addRemoteSink(remoteSink, true, queryPragmas.concurrentExchangeClients(), ActionListener.noop()); var remotePlan = new RemoteClusterPlan(plan, cluster.concreteIndices, cluster.originalIndices); var clusterRequest = new ClusterComputeRequest(cluster.clusterAlias, childSessionId, configuration, remotePlan); var clusterListener = ActionListener.runBefore( @@ -733,9 +741,8 @@ private void runComputeOnDataNode( // run the node-level reduction var externalSink = exchangeService.getSinkHandler(externalId); task.addListener(() -> exchangeService.finishSinkHandler(externalId, new TaskCancelledException(task.getReasonCancelled()))); - var exchangeSource = new ExchangeSourceHandler(1, esqlExecutor); - exchangeSource.addCompletionListener(computeListener.acquireAvoid()); - exchangeSource.addRemoteSink(internalSink::fetchPageAsync, 1); + var exchangeSource = new ExchangeSourceHandler(1, esqlExecutor, computeListener.acquireAvoid()); + exchangeSource.addRemoteSink(internalSink::fetchPageAsync, true, 1, ActionListener.noop()); ActionListener reductionListener = computeListener.acquireCompute(); runCompute( task, @@ -872,11 +879,11 @@ void runComputeOnRemoteCluster( final String localSessionId = clusterAlias + ":" + globalSessionId; var exchangeSource = new ExchangeSourceHandler( configuration.pragmas().exchangeBufferSize(), - transportService.getThreadPool().executor(ThreadPool.Names.SEARCH) + transportService.getThreadPool().executor(ThreadPool.Names.SEARCH), + computeListener.acquireAvoid() ); try (Releasable ignored = exchangeSource.addEmptySink()) { exchangeSink.addCompletionListener(computeListener.acquireAvoid()); - exchangeSource.addCompletionListener(computeListener.acquireAvoid()); PhysicalPlan coordinatorPlan = new ExchangeSinkExec( plan.source(), plan.output(), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 010a60ef7da15..c745801bf505f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -539,7 +539,7 @@ void executeSubPlan( bigArrays, ByteSizeValue.ofBytes(randomLongBetween(1, BlockFactory.DEFAULT_MAX_BLOCK_PRIMITIVE_ARRAY_SIZE.getBytes() * 2)) ); - ExchangeSourceHandler exchangeSource = new ExchangeSourceHandler(between(1, 64), executor); + ExchangeSourceHandler exchangeSource = new ExchangeSourceHandler(between(1, 64), executor, ActionListener.noop()); ExchangeSinkHandler exchangeSink = new ExchangeSinkHandler(blockFactory, between(1, 64), threadPool::relativeTimeInMillis); LocalExecutionPlanner executionPlanner = new LocalExecutionPlanner( @@ -569,7 +569,14 @@ void executeSubPlan( var physicalTestOptimizer = new TestLocalPhysicalPlanOptimizer(new LocalPhysicalOptimizerContext(configuration, searchStats)); var csvDataNodePhysicalPlan = PlannerUtils.localPlan(dataNodePlan, logicalTestOptimizer, physicalTestOptimizer); - exchangeSource.addRemoteSink(exchangeSink::fetchPageAsync, randomIntBetween(1, 3)); + exchangeSource.addRemoteSink( + exchangeSink::fetchPageAsync, + Randomness.get().nextBoolean(), + randomIntBetween(1, 3), + ActionListener.noop().delegateResponse((l, e) -> { + throw new AssertionError("expected no failure", e); + }) + ); LocalExecutionPlan dataNodeExecutionPlan = executionPlanner.plan(csvDataNodePhysicalPlan); drivers.addAll(dataNodeExecutionPlan.createDrivers(getTestName())); From 5d9ad6795bd8876031cc3e270f337b23c7a147bf Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 25 Nov 2024 22:36:18 -0800 Subject: [PATCH 242/386] Move node-level reduction plan to data node (#117422) This change moves the logic for extracting the node-level plan to the data node instead of the coordinator. There are several benefits to doing this on the data node instead: 1. Minimize serialization, especially inter-cluster communications. 2. Resolve the row size estimation issue when generating this plan on data nodes. This will be addressed in a follow-up. 3. Allow each cluster to decide whether to run node-level reduction based on its own topology. --- .../org/elasticsearch/TransportVersions.java | 2 +- .../rules/physical/ProjectAwayColumns.java | 3 +- .../esql/plan/physical/FragmentExec.java | 56 +++++++++---------- .../xpack/esql/plugin/ComputeService.java | 29 +++++----- .../ExchangeSinkExecSerializationTests.java | 12 ++-- .../FragmentExecSerializationTests.java | 9 +-- .../xpack/esql/planner/FilterTests.java | 2 +- 7 files changed, 56 insertions(+), 57 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 688d2aaf905a6..6567f48d6c232 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -206,7 +206,7 @@ static TransportVersion def(int id) { public static final TransportVersion INGEST_PIPELINE_CONFIGURATION_AS_MAP = def(8_797_00_0); public static final TransportVersion INDEXING_PRESSURE_THROTTLING_STATS = def(8_798_00_0); public static final TransportVersion REINDEX_DATA_STREAMS = def(8_799_00_0); - + public static final TransportVersion ESQL_REMOVE_NODE_LEVEL_PLAN = def(8_800_00_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/ProjectAwayColumns.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/ProjectAwayColumns.java index 9f5b35e1eb9fb..d73aaee655860 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/ProjectAwayColumns.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/ProjectAwayColumns.java @@ -73,8 +73,7 @@ public PhysicalPlan apply(PhysicalPlan plan) { Source.EMPTY, new Project(logicalFragment.source(), logicalFragment, output), fragmentExec.esFilter(), - fragmentExec.estimatedRowSize(), - fragmentExec.reducer() + fragmentExec.estimatedRowSize() ); return new ExchangeExec(exec.source(), output, exec.inBetweenAggs(), newChild); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/FragmentExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/FragmentExec.java index 5b1ee14642dbe..444c111539033 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/FragmentExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/FragmentExec.java @@ -31,7 +31,6 @@ public class FragmentExec extends LeafExec implements EstimatesRowSize { private final LogicalPlan fragment; private final QueryBuilder esFilter; - private final PhysicalPlan reducer; // datanode-level physical plan node that performs an intermediate (not partial) reduce /** * Estimate of the number of bytes that'll be loaded per position before @@ -40,25 +39,28 @@ public class FragmentExec extends LeafExec implements EstimatesRowSize { private final int estimatedRowSize; public FragmentExec(LogicalPlan fragment) { - this(fragment.source(), fragment, null, 0, null); + this(fragment.source(), fragment, null, 0); } - public FragmentExec(Source source, LogicalPlan fragment, QueryBuilder esFilter, int estimatedRowSize, PhysicalPlan reducer) { + public FragmentExec(Source source, LogicalPlan fragment, QueryBuilder esFilter, int estimatedRowSize) { super(source); this.fragment = fragment; this.esFilter = esFilter; this.estimatedRowSize = estimatedRowSize; - this.reducer = reducer; } private FragmentExec(StreamInput in) throws IOException { - this( - Source.readFrom((PlanStreamInput) in), - in.readNamedWriteable(LogicalPlan.class), - in.readOptionalNamedWriteable(QueryBuilder.class), - in.readOptionalVInt(), - in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) ? in.readOptionalNamedWriteable(PhysicalPlan.class) : null - ); + super(Source.readFrom((PlanStreamInput) in)); + this.fragment = in.readNamedWriteable(LogicalPlan.class); + this.esFilter = in.readOptionalNamedWriteable(QueryBuilder.class); + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_REMOVE_NODE_LEVEL_PLAN)) { + this.estimatedRowSize = in.readVInt(); + } else { + this.estimatedRowSize = Objects.requireNonNull(in.readOptionalVInt()); + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0)) { + in.readOptionalNamedWriteable(PhysicalPlan.class); // for old reducer + } + } } @Override @@ -66,9 +68,13 @@ public void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); out.writeNamedWriteable(fragment()); out.writeOptionalNamedWriteable(esFilter()); - out.writeOptionalVInt(estimatedRowSize()); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0)) { - out.writeOptionalNamedWriteable(reducer); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_REMOVE_NODE_LEVEL_PLAN)) { + out.writeVInt(estimatedRowSize); + } else { + out.writeOptionalVInt(estimatedRowSize()); + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0)) { + out.writeOptionalNamedWriteable(null);// for old reducer + } } } @@ -89,13 +95,9 @@ public Integer estimatedRowSize() { return estimatedRowSize; } - public PhysicalPlan reducer() { - return reducer; - } - @Override protected NodeInfo info() { - return NodeInfo.create(this, FragmentExec::new, fragment, esFilter, estimatedRowSize, reducer); + return NodeInfo.create(this, FragmentExec::new, fragment, esFilter, estimatedRowSize); } @Override @@ -108,24 +110,20 @@ public PhysicalPlan estimateRowSize(State state) { int estimatedRowSize = state.consumeAllFields(false); return Objects.equals(estimatedRowSize, this.estimatedRowSize) ? this - : new FragmentExec(source(), fragment, esFilter, estimatedRowSize, reducer); + : new FragmentExec(source(), fragment, esFilter, estimatedRowSize); } public FragmentExec withFragment(LogicalPlan fragment) { - return Objects.equals(fragment, this.fragment) ? this : new FragmentExec(source(), fragment, esFilter, estimatedRowSize, reducer); + return Objects.equals(fragment, this.fragment) ? this : new FragmentExec(source(), fragment, esFilter, estimatedRowSize); } public FragmentExec withFilter(QueryBuilder filter) { - return Objects.equals(filter, this.esFilter) ? this : new FragmentExec(source(), fragment, filter, estimatedRowSize, reducer); - } - - public FragmentExec withReducer(PhysicalPlan reducer) { - return Objects.equals(reducer, this.reducer) ? this : new FragmentExec(source(), fragment, esFilter, estimatedRowSize, reducer); + return Objects.equals(filter, this.esFilter) ? this : new FragmentExec(source(), fragment, filter, estimatedRowSize); } @Override public int hashCode() { - return Objects.hash(fragment, esFilter, estimatedRowSize, reducer); + return Objects.hash(fragment, esFilter, estimatedRowSize); } @Override @@ -141,8 +139,7 @@ public boolean equals(Object obj) { FragmentExec other = (FragmentExec) obj; return Objects.equals(fragment, other.fragment) && Objects.equals(esFilter, other.esFilter) - && Objects.equals(estimatedRowSize, other.estimatedRowSize) - && Objects.equals(reducer, other.reducer); + && Objects.equals(estimatedRowSize, other.estimatedRowSize); } @Override @@ -154,7 +151,6 @@ public String nodeString() { sb.append(", estimatedRowSize="); sb.append(estimatedRowSize); sb.append(", reducer=["); - sb.append(reducer == null ? "" : reducer.toString()); sb.append("], fragment=[<>\n"); sb.append(fragment.toString()); sb.append("<>]]"); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index e40af28fcdcbd..6a0d1bf9bb035 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -60,6 +60,7 @@ import org.elasticsearch.xpack.esql.action.EsqlQueryAction; import org.elasticsearch.xpack.esql.action.EsqlSearchShardsAction; import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; import org.elasticsearch.xpack.esql.enrich.LookupFromIndexService; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; @@ -314,14 +315,7 @@ private void startComputeOnDataNodes( EsqlExecutionInfo executionInfo, ComputeListener computeListener ) { - var planWithReducer = configuration.pragmas().nodeLevelReduction() == false - ? dataNodePlan - : dataNodePlan.transformUp(FragmentExec.class, f -> { - PhysicalPlan reductionNode = PlannerUtils.dataNodeReductionPlan(f.fragment(), dataNodePlan); - return reductionNode == null ? f : f.withReducer(reductionNode); - }); - - QueryBuilder requestFilter = PlannerUtils.requestTimestampFilter(planWithReducer); + QueryBuilder requestFilter = PlannerUtils.requestTimestampFilter(dataNodePlan); var lookupListener = ActionListener.releaseAfter(computeListener.acquireAvoid(), exchangeSource.addEmptySink()); // SearchShards API can_match is done in lookupDataNodes lookupDataNodes(parentTask, clusterAlias, requestFilter, concreteIndices, originalIndices, ActionListener.wrap(dataNodeResult -> { @@ -361,7 +355,7 @@ private void startComputeOnDataNodes( clusterAlias, node.shardIds, node.aliasFilters, - planWithReducer, + dataNodePlan, originalIndices.indices(), originalIndices.indicesOptions() ), @@ -450,12 +444,12 @@ void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, ); LOGGER.debug("Received physical plan:\n{}", plan); + plan = PlannerUtils.localPlan(context.searchExecutionContexts(), context.configuration, plan); // the planner will also set the driver parallelism in LocalExecutionPlanner.LocalExecutionPlan (used down below) // it's doing this in the planning of EsQueryExec (the source of the data) // see also EsPhysicalOperationProviders.sourcePhysicalOperation LocalExecutionPlanner.LocalExecutionPlan localExecutionPlan = planner.plan(plan); - if (LOGGER.isDebugEnabled()) { LOGGER.debug("Local execution plan:\n{}", localExecutionPlan.describe()); } @@ -785,14 +779,23 @@ public void messageReceived(DataNodeRequest request, TransportChannel channel, T listener.onFailure(new IllegalStateException("expected a fragment plan for a remote compute; got " + request.plan())); return; } - var localExchangeSource = new ExchangeSourceExec(plan.source(), plan.output(), plan.isIntermediateAgg()); - FragmentExec fragment = (FragmentExec) fragments.get(0); + Holder reducePlanHolder = new Holder<>(); + if (request.pragmas().nodeLevelReduction()) { + PhysicalPlan dataNodePlan = request.plan(); + request.plan() + .forEachUp( + FragmentExec.class, + f -> { reducePlanHolder.set(PlannerUtils.dataNodeReductionPlan(f.fragment(), dataNodePlan)); } + ); + } reducePlan = new ExchangeSinkExec( plan.source(), plan.output(), plan.isIntermediateAgg(), - fragment.reducer() != null ? fragment.reducer().replaceChildren(List.of(localExchangeSource)) : localExchangeSource + reducePlanHolder.get() != null + ? reducePlanHolder.get().replaceChildren(List.of(localExchangeSource)) + : localExchangeSource ); } else { listener.onFailure(new IllegalStateException("expected exchange sink for a remote compute; got " + request.plan())); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java index 5989c0de6b61d..f8e12cd4f5ba9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java @@ -66,12 +66,13 @@ protected boolean alwaysEmptySource() { * See {@link #testManyTypeConflicts(boolean, ByteSizeValue)} for more. */ public void testManyTypeConflicts() throws IOException { - testManyTypeConflicts(false, ByteSizeValue.ofBytes(1424048)); + testManyTypeConflicts(false, ByteSizeValue.ofBytes(1424046L)); /* * History: * 2.3mb - shorten error messages for UnsupportedAttributes #111973 * 1.8mb - cache EsFields #112008 * 1.4mb - string serialization #112929 + * 1424046b - remove node-level plan #117422 */ } @@ -80,7 +81,7 @@ public void testManyTypeConflicts() throws IOException { * See {@link #testManyTypeConflicts(boolean, ByteSizeValue)} for more. */ public void testManyTypeConflictsWithParent() throws IOException { - testManyTypeConflicts(true, ByteSizeValue.ofBytes(2774192)); + testManyTypeConflicts(true, ByteSizeValue.ofBytes(2774190)); /* * History: * 2 gb+ - start @@ -89,6 +90,7 @@ public void testManyTypeConflictsWithParent() throws IOException { * 3.1mb - cache EsFields #112008 * 2774214b - string serialization #112929 * 2774192b - remove field attribute #112881 + * 2774190b - remove node-level plan #117422 */ } @@ -103,11 +105,12 @@ private void testManyTypeConflicts(boolean withParent, ByteSizeValue expected) t * with a single root field that has many children, grandchildren etc. */ public void testDeeplyNestedFields() throws IOException { - ByteSizeValue expected = ByteSizeValue.ofBytes(47252411); + ByteSizeValue expected = ByteSizeValue.ofBytes(47252409); /* * History: * 48223371b - string serialization #112929 * 47252411b - remove field attribute #112881 + * 47252409b - remove node-level plan */ int depth = 6; @@ -123,11 +126,12 @@ public void testDeeplyNestedFields() throws IOException { * with a single root field that has many children, grandchildren etc. */ public void testDeeplyNestedFieldsKeepOnlyOne() throws IOException { - ByteSizeValue expected = ByteSizeValue.ofBytes(9425806); + ByteSizeValue expected = ByteSizeValue.ofBytes(9425804); /* * History: * 9426058b - string serialization #112929 * 9425806b - remove field attribute #112881 + * 9425804b - remove node-level plan #117422 */ int depth = 6; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/FragmentExecSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/FragmentExecSerializationTests.java index 3c70290360a56..b36c42a1a06ab 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/FragmentExecSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/FragmentExecSerializationTests.java @@ -22,8 +22,7 @@ public static FragmentExec randomFragmentExec(int depth) { LogicalPlan fragment = AbstractLogicalPlanSerializationTests.randomChild(depth); QueryBuilder esFilter = EsqlQueryRequestTests.randomQueryBuilder(); int estimatedRowSize = between(0, Integer.MAX_VALUE); - PhysicalPlan reducer = randomChild(depth); - return new FragmentExec(source, fragment, esFilter, estimatedRowSize, reducer); + return new FragmentExec(source, fragment, esFilter, estimatedRowSize); } @Override @@ -36,15 +35,13 @@ protected FragmentExec mutateInstance(FragmentExec instance) throws IOException LogicalPlan fragment = instance.fragment(); QueryBuilder esFilter = instance.esFilter(); int estimatedRowSize = instance.estimatedRowSize(); - PhysicalPlan reducer = instance.reducer(); - switch (between(0, 3)) { + switch (between(0, 2)) { case 0 -> fragment = randomValueOtherThan(fragment, () -> AbstractLogicalPlanSerializationTests.randomChild(0)); case 1 -> esFilter = randomValueOtherThan(esFilter, EsqlQueryRequestTests::randomQueryBuilder); case 2 -> estimatedRowSize = randomValueOtherThan(estimatedRowSize, () -> between(0, Integer.MAX_VALUE)); - case 3 -> reducer = randomValueOtherThan(reducer, () -> randomChild(0)); default -> throw new UnsupportedEncodingException(); } - return new FragmentExec(instance.source(), fragment, esFilter, estimatedRowSize, reducer); + return new FragmentExec(instance.source(), fragment, esFilter, estimatedRowSize); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java index 8d819f9dbcd6c..55f32d07fc2cb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java @@ -305,7 +305,7 @@ private PhysicalPlan plan(String query, QueryBuilder restFilter) { // System.out.println("physical\n" + physical); physical = physical.transformUp( FragmentExec.class, - f -> new FragmentExec(f.source(), f.fragment(), restFilter, f.estimatedRowSize(), f.reducer()) + f -> new FragmentExec(f.source(), f.fragment(), restFilter, f.estimatedRowSize()) ); physical = physicalPlanOptimizer.optimize(physical); // System.out.println("optimized\n" + physical); From 78400b8d05c08a3e443e926648ab98102c9e32a7 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 26 Nov 2024 07:53:40 +0100 Subject: [PATCH 243/386] InternalMultiBucketAggregation.InternalBucket does not implement writable anymore (#117310) This allows to make some Bucket implementations leaner, in particular terms and multi-terms aggregations --- .../adjacency/InternalAdjacencyMatrix.java | 2 +- .../bucket/timeseries/InternalTimeSeries.java | 2 +- .../InternalMultiBucketAggregation.java | 6 ++- .../bucket/composite/InternalComposite.java | 4 +- .../bucket/filter/InternalFilters.java | 2 +- .../bucket/geogrid/InternalGeoGridBucket.java | 2 +- .../histogram/AbstractHistogramBucket.java | 2 +- .../bucket/prefix/InternalIpPrefix.java | 2 +- .../bucket/range/InternalBinaryRange.java | 2 +- .../bucket/range/InternalRange.java | 2 +- .../bucket/terms/AbstractInternalTerms.java | 48 ++++++++++--------- .../bucket/terms/DoubleTerms.java | 6 +-- .../GlobalOrdinalsStringTermsAggregator.java | 1 - .../bucket/terms/InternalMappedTerms.java | 10 +++- .../bucket/terms/InternalRareTerms.java | 6 ++- .../terms/InternalSignificantTerms.java | 2 +- .../bucket/terms/InternalTerms.java | 37 +++----------- .../aggregations/bucket/terms/LongTerms.java | 6 +-- .../bucket/terms/StringTerms.java | 6 +-- .../bucket/terms/UnmappedTerms.java | 5 ++ .../pipeline/BucketHelpersTests.java | 9 ---- .../multiterms/InternalMultiTerms.java | 39 +++++++-------- .../InternalCategorizationAggregation.java | 2 +- .../aggs/changepoint/ChangePointBucket.java | 2 +- 24 files changed, 94 insertions(+), 111 deletions(-) diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java index 824f009bc7d8e..999f790ee8117 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java @@ -33,7 +33,7 @@ public class InternalAdjacencyMatrix extends InternalMultiBucketAggregation implements AdjacencyMatrix { - public static class InternalBucket extends InternalMultiBucketAggregation.InternalBucket implements AdjacencyMatrix.Bucket { + public static class InternalBucket extends InternalMultiBucketAggregation.InternalBucketWritable implements AdjacencyMatrix.Bucket { private final String key; private final long docCount; diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java index d7590f2126325..c4669b1c25224 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java @@ -34,7 +34,7 @@ public class InternalTimeSeries extends InternalMultiBucketAggregation reducePipelineBuckets(AggregationReduceContext reduceContext, Pi return reducedBuckets; } - public abstract static class InternalBucket implements Bucket, Writeable { + public abstract static class InternalBucket implements Bucket { public Object getProperty(String containingAggName, List path) { if (path.isEmpty()) { @@ -248,4 +248,8 @@ public Object getProperty(String containingAggName, List path) { return aggregation.getProperty(path.subList(1, path.size())); } } + + /** A {@link InternalBucket} that implements the {@link Writeable} interface. Most implementation might want + * to use this one except when specific logic is need to write into the stream. */ + public abstract static class InternalBucketWritable extends InternalBucket implements Writeable {} } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index faa953e77edd8..1492e97e6a5a5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -320,7 +320,9 @@ public int hashCode() { return Objects.hash(super.hashCode(), size, buckets, afterKey, Arrays.hashCode(reverseMuls), Arrays.hashCode(missingOrders)); } - public static class InternalBucket extends InternalMultiBucketAggregation.InternalBucket implements CompositeAggregation.Bucket { + public static class InternalBucket extends InternalMultiBucketAggregation.InternalBucketWritable + implements + CompositeAggregation.Bucket { private final CompositeKey key; private final long docCount; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java index c05759582346a..19cd0df9c7122 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java @@ -30,7 +30,7 @@ import java.util.Objects; public class InternalFilters extends InternalMultiBucketAggregation implements Filters { - public static class InternalBucket extends InternalMultiBucketAggregation.InternalBucket implements Filters.Bucket { + public static class InternalBucket extends InternalMultiBucketAggregation.InternalBucketWritable implements Filters.Bucket { private final String key; private long docCount; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java index 9e3c96da2e70b..60de4c3974c92 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java @@ -19,7 +19,7 @@ import java.io.IOException; import java.util.Objects; -public abstract class InternalGeoGridBucket extends InternalMultiBucketAggregation.InternalBucket +public abstract class InternalGeoGridBucket extends InternalMultiBucketAggregation.InternalBucketWritable implements GeoGrid.Bucket, Comparable { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramBucket.java index 16a83ed04e524..7806d8cd8efe2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramBucket.java @@ -16,7 +16,7 @@ /** * A bucket in the histogram where documents fall in */ -public abstract class AbstractHistogramBucket extends InternalMultiBucketAggregation.InternalBucket { +public abstract class AbstractHistogramBucket extends InternalMultiBucketAggregation.InternalBucketWritable { protected final long docCount; protected final InternalAggregations aggregations; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java index 5b456b3246b64..36a8fccc77e99 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java @@ -33,7 +33,7 @@ public class InternalIpPrefix extends InternalMultiBucketAggregation { - public static class Bucket extends InternalMultiBucketAggregation.InternalBucket + public static class Bucket extends InternalMultiBucketAggregation.InternalBucketWritable implements IpPrefix.Bucket, KeyComparable { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java index 9571dfebc6069..34a2ebea88440 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java @@ -36,7 +36,7 @@ public final class InternalBinaryRange extends InternalMultiBucketAggregation buckets, AggregationReduceContext context) { @@ -104,7 +104,7 @@ private B reduceBucket(List buckets, AggregationReduceContext context) { for (B bucket : buckets) { docCount += bucket.getDocCount(); if (docCountError != -1) { - if (bucket.getShowDocCountError() == false || bucket.getDocCountError() == -1) { + if (getShowDocCountError() == false || bucket.getDocCountError() == -1) { docCountError = -1; } else { docCountError += bucket.getDocCountError(); @@ -257,6 +257,7 @@ public void accept(InternalAggregation aggregation) { } otherDocCount[0] += terms.getSumOfOtherDocCounts(); final long thisAggDocCountError = getDocCountError(terms); + setDocCountError(thisAggDocCountError); if (sumDocCountError != -1) { if (thisAggDocCountError == -1) { sumDocCountError = -1; @@ -264,16 +265,17 @@ public void accept(InternalAggregation aggregation) { sumDocCountError += thisAggDocCountError; } } - setDocCountError(thisAggDocCountError); - for (B bucket : terms.getBuckets()) { - // If there is already a doc count error for this bucket - // subtract this aggs doc count error from it to make the - // new value for the bucket. This then means that when the - // final error for the bucket is calculated below we account - // for the existing error calculated in a previous reduce. - // Note that if the error is unbounded (-1) this will be fixed - // later in this method. - bucket.updateDocCountError(-thisAggDocCountError); + if (getShowDocCountError()) { + for (B bucket : terms.getBuckets()) { + // If there is already a doc count error for this bucket + // subtract this aggs doc count error from it to make the + // new value for the bucket. This then means that when the + // final error for the bucket is calculated below we account + // for the existing error calculated in a previous reduce. + // Note that if the error is unbounded (-1) this will be fixed + // later in this method. + bucket.updateDocCountError(-thisAggDocCountError); + } } if (terms.getBuckets().isEmpty() == false) { bucketsList.add(terms.getBuckets()); @@ -319,17 +321,17 @@ public InternalAggregation get() { result.add(bucket.reduced(AbstractInternalTerms.this::reduceBucket, reduceContext)); }); } - for (B r : result) { - if (sumDocCountError == -1) { - r.setDocCountError(-1); - } else { - r.updateDocCountError(sumDocCountError); + if (getShowDocCountError()) { + for (B r : result) { + if (sumDocCountError == -1) { + r.setDocCountError(-1); + } else { + r.updateDocCountError(sumDocCountError); + } } } - long docCountError; - if (sumDocCountError == -1) { - docCountError = -1; - } else { + long docCountError = -1; + if (sumDocCountError != -1) { docCountError = size == 1 ? 0 : sumDocCountError; } return create(name, result, reduceContext.isFinalReduce() ? getOrder() : thisReduceOrder, docCountError, otherDocCount[0]); @@ -349,7 +351,7 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { b -> createBucket( samplingContext.scaleUp(b.getDocCount()), InternalAggregations.finalizeSampling(b.getAggregations(), samplingContext), - b.getShowDocCountError() ? samplingContext.scaleUp(b.getDocCountError()) : 0, + getShowDocCountError() ? samplingContext.scaleUp(b.getDocCountError()) : 0, b ) ) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java index 9789a9edc58f7..5c28c25de6e87 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTerms.java @@ -164,8 +164,8 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) prototype.term, prototype.docCount, aggregations, - prototype.showDocCountError, - prototype.docCountError, + showTermDocCountError, + prototype.getDocCountError(), prototype.format ); } @@ -216,6 +216,6 @@ public void close() { @Override protected Bucket createBucket(long docCount, InternalAggregations aggs, long docCountError, DoubleTerms.Bucket prototype) { - return new Bucket(prototype.term, docCount, aggs, prototype.showDocCountError, docCountError, format); + return new Bucket(prototype.term, docCount, aggs, showTermDocCountError, docCountError, format); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index db9da6ed67207..5a79155d1d4f5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -880,7 +880,6 @@ StringTerms.Bucket convertTempBucketToRealBucket(OrdBucket temp, GlobalOrdLookup BytesRef term = BytesRef.deepCopyOf(lookupGlobalOrd.apply(temp.globalOrd)); StringTerms.Bucket result = new StringTerms.Bucket(term, temp.docCount, null, showTermDocCountError, 0, format); result.bucketOrd = temp.bucketOrd; - result.docCountError = 0; return result; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java index 5b9403840dfff..d7087a121b4f4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java @@ -87,7 +87,10 @@ protected final void writeTermTypeInfoTo(StreamOutput out) throws IOException { writeSize(shardSize, out); out.writeBoolean(showTermDocCountError); out.writeVLong(otherDocCount); - out.writeCollection(buckets); + out.writeVInt(buckets.size()); + for (var bucket : buckets) { + bucket.writeTo(out, showTermDocCountError); + } } @Override @@ -95,6 +98,11 @@ protected void setDocCountError(long docCountError) { this.docCountError = docCountError; } + @Override + protected boolean getShowDocCountError() { + return showTermDocCountError; + } + @Override protected int getShardSize() { return shardSize; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java index 64cebee880141..7859319f4dd0d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.SetBackedScalingCuckooFilter; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.BucketOrder; @@ -29,10 +30,11 @@ public abstract class InternalRareTerms, B ext implements RareTerms { - public abstract static class Bucket> extends InternalMultiBucketAggregation.InternalBucket + public abstract static class Bucket> extends InternalMultiBucketAggregation.InternalBucketWritable implements RareTerms.Bucket, - KeyComparable { + KeyComparable, + Writeable { /** * Reads a bucket. Should be a constructor reference. */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index 3f579947248bb..6c0eb465d1f80 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -45,7 +45,7 @@ public abstract class InternalSignificantTerms> extends InternalMultiBucketAggregation.InternalBucket + public abstract static class Bucket> extends InternalMultiBucketAggregation.InternalBucketWritable implements SignificantTerms.Bucket { /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java index b94b1f5ea40b1..739f0b923eaab 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java @@ -41,9 +41,8 @@ public interface Reader> { long bucketOrd; protected long docCount; - protected long docCountError; + private long docCountError; protected InternalAggregations aggregations; - protected final boolean showDocCountError; protected final DocValueFormat format; protected Bucket( @@ -53,29 +52,23 @@ protected Bucket( long docCountError, DocValueFormat formatter ) { - this.showDocCountError = showDocCountError; this.format = formatter; this.docCount = docCount; this.aggregations = aggregations; - this.docCountError = docCountError; + this.docCountError = showDocCountError ? docCountError : -1; } /** * Read from a stream. */ protected Bucket(StreamInput in, DocValueFormat formatter, boolean showDocCountError) throws IOException { - this.showDocCountError = showDocCountError; this.format = formatter; docCount = in.readVLong(); - docCountError = -1; - if (showDocCountError) { - docCountError = in.readLong(); - } + docCountError = showDocCountError ? in.readLong() : -1; aggregations = InternalAggregations.readFrom(in); } - @Override - public final void writeTo(StreamOutput out) throws IOException { + final void writeTo(StreamOutput out, boolean showDocCountError) throws IOException { out.writeVLong(getDocCount()); if (showDocCountError) { out.writeLong(docCountError); @@ -105,9 +98,6 @@ public void setBucketOrd(long bucketOrd) { @Override public long getDocCountError() { - if (showDocCountError == false) { - throw new IllegalStateException("show_terms_doc_count_error is false"); - } return docCountError; } @@ -121,11 +111,6 @@ protected void updateDocCountError(long docCountErrorDiff) { this.docCountError += docCountErrorDiff; } - @Override - protected boolean getShowDocCountError() { - return showDocCountError; - } - @Override public InternalAggregations getAggregations() { return aggregations; @@ -155,23 +140,15 @@ public boolean equals(Object obj) { return false; } Bucket that = (Bucket) obj; - if (showDocCountError && docCountError != that.docCountError) { - /* - * docCountError doesn't matter if not showing it and - * serialization sets it to -1 no matter what it was - * before. - */ - return false; - } - return Objects.equals(docCount, that.docCount) - && Objects.equals(showDocCountError, that.showDocCountError) + return Objects.equals(docCountError, that.docCountError) + && Objects.equals(docCount, that.docCount) && Objects.equals(format, that.format) && Objects.equals(aggregations, that.aggregations); } @Override public int hashCode() { - return Objects.hash(getClass(), docCount, format, showDocCountError, showDocCountError ? docCountError : -1, aggregations); + return Objects.hash(getClass(), docCount, format, docCountError, aggregations); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java index f536b7f958ca2..6c2444379c8eb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTerms.java @@ -178,8 +178,8 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) prototype.term, prototype.docCount, aggregations, - prototype.showDocCountError, - prototype.docCountError, + showTermDocCountError, + prototype.getDocCountError(), prototype.format ); } @@ -260,7 +260,7 @@ public InternalAggregation get() { @Override protected Bucket createBucket(long docCount, InternalAggregations aggs, long docCountError, LongTerms.Bucket prototype) { - return new Bucket(prototype.term, docCount, aggs, prototype.showDocCountError, docCountError, format); + return new Bucket(prototype.term, docCount, aggs, showTermDocCountError, docCountError, format); } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java index 5faf6e0aaaedf..2370827230c47 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTerms.java @@ -184,15 +184,15 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) prototype.termBytes, prototype.docCount, aggregations, - prototype.showDocCountError, - prototype.docCountError, + showTermDocCountError, + prototype.getDocCountError(), prototype.format ); } @Override protected Bucket createBucket(long docCount, InternalAggregations aggs, long docCountError, StringTerms.Bucket prototype) { - return new Bucket(prototype.termBytes, docCount, aggs, prototype.showDocCountError, docCountError, format); + return new Bucket(prototype.termBytes, docCount, aggs, showTermDocCountError, docCountError, format); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java index 8047d1f06990f..e82a2b7fe9235 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java @@ -114,6 +114,11 @@ public final XContentBuilder doXContentBody(XContentBuilder builder, Params para return doXContentCommon(builder, params, false, 0L, 0, Collections.emptyList()); } + @Override + protected boolean getShowDocCountError() { + return false; + } + @Override protected void setDocCountError(long docCountError) {} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java index b2f79c02baf8d..626adc9a7c41c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.aggregations.pipeline; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; @@ -56,10 +55,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws }; InternalMultiBucketAggregation.InternalBucket bucket = new InternalMultiBucketAggregation.InternalBucket() { - @Override - public void writeTo(StreamOutput out) throws IOException { - - } @Override public Object getKey() { @@ -131,10 +126,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws }; InternalMultiBucketAggregation.InternalBucket bucket = new InternalMultiBucketAggregation.InternalBucket() { - @Override - public void writeTo(StreamOutput out) throws IOException { - - } @Override public Object getKey() { diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java index c6bfb5b1b2778..0d42a2856a10e 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java @@ -42,8 +42,7 @@ public static class Bucket extends AbstractInternalTerms.AbstractTermsBucket formats; protected List terms; protected List keyConverters; @@ -60,8 +59,7 @@ public Bucket( this.terms = terms; this.docCount = docCount; this.aggregations = aggregations; - this.showDocCountError = showDocCountError; - this.docCountError = docCountError; + this.docCountError = showDocCountError ? docCountError : -1; this.formats = formats; this.keyConverters = keyConverters; } @@ -71,7 +69,6 @@ protected Bucket(StreamInput in, List formats, List formats, List { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java index 39bdb69d4da40..aed0c40043cae 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java @@ -18,7 +18,7 @@ import java.io.IOException; import java.util.Objects; -public class ChangePointBucket extends InternalMultiBucketAggregation.InternalBucket implements ToXContent { +public class ChangePointBucket extends InternalMultiBucketAggregation.InternalBucketWritable implements ToXContent { private final Object key; private final long docCount; private final InternalAggregations aggregations; From ed33bea30cd898936e43e24a7927290409f30b18 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 26 Nov 2024 08:02:12 +0100 Subject: [PATCH 244/386] Adjust SyntheticSourceLicenseService (#116647) Allow gold and platinum license to use synthetic source for a limited time. If the start time of a license is before the cut off date, then gold and platinum licenses will not fallback to stored source if synthetic source is used. Co-authored-by: Nikolaj Volgushev --- .../xpack/logsdb/LogsDBPlugin.java | 13 +- .../SyntheticSourceIndexSettingsProvider.java | 32 +++- .../logsdb/SyntheticSourceLicenseService.java | 83 ++++++++- .../logsdb/LegacyLicenceIntegrationTests.java | 146 +++++++++++++++ ...dexSettingsProviderLegacyLicenseTests.java | 129 +++++++++++++ ...heticSourceIndexSettingsProviderTests.java | 13 +- .../SyntheticSourceLicenseServiceTests.java | 173 ++++++++++++++++-- 7 files changed, 562 insertions(+), 27 deletions(-) create mode 100644 x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LegacyLicenceIntegrationTests.java create mode 100644 x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderLegacyLicenseTests.java diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java index 04d12fd51bae7..904b00e6d0450 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java @@ -13,6 +13,8 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettingProvider; +import org.elasticsearch.license.LicenseService; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.xpack.core.XPackPlugin; @@ -46,7 +48,8 @@ public LogsDBPlugin(Settings settings) { @Override public Collection createComponents(PluginServices services) { - licenseService.setLicenseState(XPackPlugin.getSharedLicenseState()); + licenseService.setLicenseService(getLicenseService()); + licenseService.setLicenseState(getLicenseState()); var clusterSettings = services.clusterService().getClusterSettings(); // The `cluster.logsdb.enabled` setting is registered by this plugin, but its value may be updated by other plugins // before this plugin registers its settings update consumer below. This means we might miss updates that occurred earlier. @@ -88,4 +91,12 @@ public List> getSettings() { actions.add(new ActionPlugin.ActionHandler<>(XPackInfoFeatureAction.LOGSDB, LogsDBInfoTransportAction.class)); return actions; } + + protected XPackLicenseState getLicenseState() { + return XPackPlugin.getSharedLicenseState(); + } + + protected LicenseService getLicenseService() { + return XPackPlugin.getSharedLicenseService(); + } } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java index 1f38ecda19515..462bad4b19551 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java @@ -81,8 +81,13 @@ public Settings getAdditionalIndexSettings( // This index name is used when validating component and index templates, we should skip this check in that case. // (See MetadataIndexTemplateService#validateIndexTemplateV2(...) method) boolean isTemplateValidation = "validate-index-name".equals(indexName); + boolean legacyLicensedUsageOfSyntheticSourceAllowed = isLegacyLicensedUsageOfSyntheticSourceAllowed( + templateIndexMode, + indexName, + dataStreamName + ); if (newIndexHasSyntheticSourceUsage(indexName, templateIndexMode, indexTemplateAndCreateRequestSettings, combinedTemplateMappings) - && syntheticSourceLicenseService.fallbackToStoredSource(isTemplateValidation)) { + && syntheticSourceLicenseService.fallbackToStoredSource(isTemplateValidation, legacyLicensedUsageOfSyntheticSourceAllowed)) { LOGGER.debug("creation of index [{}] with synthetic source without it being allowed", indexName); return Settings.builder() .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.STORED.toString()) @@ -167,4 +172,29 @@ private IndexMetadata buildIndexMetadataForMapperService( tmpIndexMetadata.settings(finalResolvedSettings); return tmpIndexMetadata.build(); } + + /** + * The GA-ed use cases in which synthetic source usage is allowed with gold or platinum license. + */ + boolean isLegacyLicensedUsageOfSyntheticSourceAllowed(IndexMode templateIndexMode, String indexName, String dataStreamName) { + if (templateIndexMode == IndexMode.TIME_SERIES) { + return true; + } + + // To allow the following patterns: profiling-metrics and profiling-events + if (dataStreamName != null && dataStreamName.startsWith("profiling-")) { + return true; + } + // To allow the following patterns: .profiling-sq-executables, .profiling-sq-leafframes and .profiling-stacktraces + if (indexName.startsWith(".profiling-")) { + return true; + } + // To allow the following patterns: metrics-apm.transaction.*, metrics-apm.service_transaction.*, metrics-apm.service_summary.*, + // metrics-apm.service_destination.*, "metrics-apm.internal-* and metrics-apm.app.* + if (dataStreamName != null && dataStreamName.startsWith("metrics-apm.")) { + return true; + } + + return false; + } } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java index 55d4bfe05abe3..1b3513f15a86a 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java @@ -7,18 +7,30 @@ package org.elasticsearch.xpack.logsdb; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.License; +import org.elasticsearch.license.LicenseService; import org.elasticsearch.license.LicensedFeature; import org.elasticsearch.license.XPackLicenseState; +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneOffset; + /** * Determines based on license and fallback setting whether synthetic source usages should fallback to stored source. */ final class SyntheticSourceLicenseService { - private static final String MAPPINGS_FEATURE_FAMILY = "mappings"; + static final String MAPPINGS_FEATURE_FAMILY = "mappings"; + // You can only override this property if you received explicit approval from Elastic. + private static final String CUTOFF_DATE_SYS_PROP_NAME = + "es.mapping.synthetic_source_fallback_to_stored_source.cutoff_date_restricted_override"; + private static final Logger LOGGER = LogManager.getLogger(SyntheticSourceLicenseService.class); + static final long DEFAULT_CUTOFF_DATE = LocalDateTime.of(2024, 12, 12, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); /** * A setting that determines whether source mode should always be stored source. Regardless of licence. @@ -30,31 +42,71 @@ final class SyntheticSourceLicenseService { Setting.Property.Dynamic ); - private static final LicensedFeature.Momentary SYNTHETIC_SOURCE_FEATURE = LicensedFeature.momentary( + static final LicensedFeature.Momentary SYNTHETIC_SOURCE_FEATURE = LicensedFeature.momentary( MAPPINGS_FEATURE_FAMILY, "synthetic-source", License.OperationMode.ENTERPRISE ); + static final LicensedFeature.Momentary SYNTHETIC_SOURCE_FEATURE_LEGACY = LicensedFeature.momentary( + MAPPINGS_FEATURE_FAMILY, + "synthetic-source-legacy", + License.OperationMode.GOLD + ); + + private final long cutoffDate; + private LicenseService licenseService; private XPackLicenseState licenseState; private volatile boolean syntheticSourceFallback; SyntheticSourceLicenseService(Settings settings) { - syntheticSourceFallback = FALLBACK_SETTING.get(settings); + this(settings, System.getProperty(CUTOFF_DATE_SYS_PROP_NAME)); + } + + SyntheticSourceLicenseService(Settings settings, String cutoffDate) { + this.syntheticSourceFallback = FALLBACK_SETTING.get(settings); + this.cutoffDate = getCutoffDate(cutoffDate); } /** * @return whether synthetic source mode should fallback to stored source. */ - public boolean fallbackToStoredSource(boolean isTemplateValidation) { + public boolean fallbackToStoredSource(boolean isTemplateValidation, boolean legacyLicensedUsageOfSyntheticSourceAllowed) { if (syntheticSourceFallback) { return true; } + var licenseStateSnapshot = licenseState.copyCurrentLicenseState(); + if (checkFeature(SYNTHETIC_SOURCE_FEATURE, licenseStateSnapshot, isTemplateValidation)) { + return false; + } + + var license = licenseService.getLicense(); + if (license == null) { + return true; + } + + boolean beforeCutoffDate = license.startDate() <= cutoffDate; + if (legacyLicensedUsageOfSyntheticSourceAllowed + && beforeCutoffDate + && checkFeature(SYNTHETIC_SOURCE_FEATURE_LEGACY, licenseStateSnapshot, isTemplateValidation)) { + // platinum license will allow synthetic source with gold legacy licensed feature too. + LOGGER.debug("legacy license [{}] is allowed to use synthetic source", licenseStateSnapshot.getOperationMode().description()); + return false; + } + + return true; + } + + private static boolean checkFeature( + LicensedFeature.Momentary licensedFeature, + XPackLicenseState licenseStateSnapshot, + boolean isTemplateValidation + ) { if (isTemplateValidation) { - return SYNTHETIC_SOURCE_FEATURE.checkWithoutTracking(licenseState) == false; + return licensedFeature.checkWithoutTracking(licenseStateSnapshot); } else { - return SYNTHETIC_SOURCE_FEATURE.check(licenseState) == false; + return licensedFeature.check(licenseStateSnapshot); } } @@ -62,7 +114,26 @@ void setSyntheticSourceFallback(boolean syntheticSourceFallback) { this.syntheticSourceFallback = syntheticSourceFallback; } + void setLicenseService(LicenseService licenseService) { + this.licenseService = licenseService; + } + void setLicenseState(XPackLicenseState licenseState) { this.licenseState = licenseState; } + + private static long getCutoffDate(String cutoffDateAsString) { + if (cutoffDateAsString != null) { + long cutoffDate = LocalDateTime.parse(cutoffDateAsString).toInstant(ZoneOffset.UTC).toEpochMilli(); + LOGGER.warn("Configuring [{}] is only allowed with explicit approval from Elastic.", CUTOFF_DATE_SYS_PROP_NAME); + LOGGER.info( + "Configuring [{}] to [{}]", + CUTOFF_DATE_SYS_PROP_NAME, + LocalDateTime.ofInstant(Instant.ofEpochSecond(cutoffDate), ZoneOffset.UTC) + ); + return cutoffDate; + } else { + return DEFAULT_CUTOFF_DATE; + } + } } diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LegacyLicenceIntegrationTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LegacyLicenceIntegrationTests.java new file mode 100644 index 0000000000000..890bc464a2579 --- /dev/null +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LegacyLicenceIntegrationTests.java @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.license.AbstractLicensesIntegrationTestCase; +import org.elasticsearch.license.GetFeatureUsageRequest; +import org.elasticsearch.license.GetFeatureUsageResponse; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicenseService; +import org.elasticsearch.license.LicensedFeature; +import org.elasticsearch.license.TransportGetFeatureUsageAction; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.hamcrest.Matcher; +import org.junit.Before; + +import java.nio.file.Path; +import java.time.LocalDateTime; +import java.time.ZoneOffset; +import java.util.Collection; +import java.util.List; + +import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; +import static org.elasticsearch.xpack.logsdb.SyntheticSourceLicenseServiceTests.createEnterpriseLicense; +import static org.elasticsearch.xpack.logsdb.SyntheticSourceLicenseServiceTests.createGoldOrPlatinumLicense; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; + +@ESIntegTestCase.ClusterScope(scope = TEST, numDataNodes = 1, numClientNodes = 0, supportsDedicatedMasters = false) +public class LegacyLicenceIntegrationTests extends AbstractLicensesIntegrationTestCase { + + @Override + protected Collection> nodePlugins() { + return List.of(P.class); + } + + @Before + public void setup() throws Exception { + wipeAllLicenses(); + ensureGreen(); + License license = createGoldOrPlatinumLicense(); + putLicense(license); + ensureGreen(); + } + + public void testSyntheticSourceUsageDisallowed() { + createIndexWithSyntheticSourceAndAssertExpectedType("test", "STORED"); + + assertFeatureUsage(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY, nullValue()); + assertFeatureUsage(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE, nullValue()); + } + + public void testSyntheticSourceUsageWithLegacyLicense() { + createIndexWithSyntheticSourceAndAssertExpectedType(".profiling-stacktraces", "synthetic"); + + assertFeatureUsage(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY, not(nullValue())); + assertFeatureUsage(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE, nullValue()); + } + + public void testSyntheticSourceUsageWithLegacyLicensePastCutoff() throws Exception { + long startPastCutoff = LocalDateTime.of(2025, 11, 12, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); + putLicense(createGoldOrPlatinumLicense(startPastCutoff)); + ensureGreen(); + + createIndexWithSyntheticSourceAndAssertExpectedType(".profiling-stacktraces", "STORED"); + assertFeatureUsage(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY, nullValue()); + assertFeatureUsage(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE, nullValue()); + } + + public void testSyntheticSourceUsageWithEnterpriseLicensePastCutoff() throws Exception { + long startPastCutoff = LocalDateTime.of(2025, 11, 12, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); + putLicense(createEnterpriseLicense(startPastCutoff)); + ensureGreen(); + + createIndexWithSyntheticSourceAndAssertExpectedType(".profiling-traces", "synthetic"); + // also supports non-exceptional indices + createIndexWithSyntheticSourceAndAssertExpectedType("test", "synthetic"); + assertFeatureUsage(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY, nullValue()); + assertFeatureUsage(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE, not(nullValue())); + } + + public void testSyntheticSourceUsageTracksBothLegacyAndRegularFeature() throws Exception { + createIndexWithSyntheticSourceAndAssertExpectedType(".profiling-traces", "synthetic"); + + putLicense(createEnterpriseLicense()); + ensureGreen(); + + createIndexWithSyntheticSourceAndAssertExpectedType(".profiling-traces-v2", "synthetic"); + + assertFeatureUsage(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY, not(nullValue())); + assertFeatureUsage(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE, not(nullValue())); + } + + private void createIndexWithSyntheticSourceAndAssertExpectedType(String indexName, String expectedType) { + var settings = Settings.builder().put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "synthetic").build(); + createIndex(indexName, settings); + var response = admin().indices().getSettings(new GetSettingsRequest().indices(indexName)).actionGet(); + assertThat( + response.getIndexToSettings().get(indexName).get(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey()), + equalTo(expectedType) + ); + } + + private List getFeatureUsageInfo() { + return client().execute(TransportGetFeatureUsageAction.TYPE, new GetFeatureUsageRequest()).actionGet().getFeatures(); + } + + private void assertFeatureUsage(LicensedFeature.Momentary syntheticSourceFeature, Matcher matcher) { + GetFeatureUsageResponse.FeatureUsageInfo featureUsage = getFeatureUsageInfo().stream() + .filter(f -> f.getFamily().equals(SyntheticSourceLicenseService.MAPPINGS_FEATURE_FAMILY)) + .filter(f -> f.getName().equals(syntheticSourceFeature.getName())) + .findAny() + .orElse(null); + assertThat(featureUsage, matcher); + } + + public static class P extends LocalStateCompositeXPackPlugin { + + public P(final Settings settings, final Path configPath) { + super(settings, configPath); + plugins.add(new LogsDBPlugin(settings) { + @Override + protected XPackLicenseState getLicenseState() { + return P.this.getLicenseState(); + } + + @Override + protected LicenseService getLicenseService() { + return P.this.getLicenseService(); + } + }); + } + + } +} diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderLegacyLicenseTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderLegacyLicenseTests.java new file mode 100644 index 0000000000000..939d7d892a48d --- /dev/null +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderLegacyLicenseTests.java @@ -0,0 +1,129 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.MapperTestUtils; +import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicenseService; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.license.internal.XPackLicenseStatus; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.time.LocalDateTime; +import java.time.ZoneOffset; +import java.util.List; + +import static org.elasticsearch.xpack.logsdb.SyntheticSourceIndexSettingsProviderTests.getLogsdbIndexModeSettingsProvider; +import static org.elasticsearch.xpack.logsdb.SyntheticSourceLicenseServiceTests.createGoldOrPlatinumLicense; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class SyntheticSourceIndexSettingsProviderLegacyLicenseTests extends ESTestCase { + + private SyntheticSourceIndexSettingsProvider provider; + + @Before + public void setup() throws Exception { + long time = LocalDateTime.of(2024, 11, 12, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); + License license = createGoldOrPlatinumLicense(); + var licenseState = new XPackLicenseState(() -> time, new XPackLicenseStatus(license.operationMode(), true, null)); + + var licenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + licenseService.setLicenseState(licenseState); + var mockLicenseService = mock(LicenseService.class); + when(mockLicenseService.getLicense()).thenReturn(license); + + SyntheticSourceLicenseService syntheticSourceLicenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + syntheticSourceLicenseService.setLicenseState(licenseState); + syntheticSourceLicenseService.setLicenseService(mockLicenseService); + + provider = new SyntheticSourceIndexSettingsProvider( + syntheticSourceLicenseService, + im -> MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()), + getLogsdbIndexModeSettingsProvider(false), + IndexVersion::current + ); + } + + public void testGetAdditionalIndexSettingsDefault() { + Settings settings = Settings.builder().put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "SYNTHETIC").build(); + String dataStreamName = "metrics-my-app"; + String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 0); + var result = provider.getAdditionalIndexSettings(indexName, dataStreamName, null, null, null, settings, List.of()); + assertThat(result.size(), equalTo(1)); + assertThat(result.get(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey()), equalTo("STORED")); + } + + public void testGetAdditionalIndexSettingsApm() throws IOException { + Settings settings = Settings.builder().put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "SYNTHETIC").build(); + String dataStreamName = "metrics-apm.app.test"; + String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 0); + var result = provider.getAdditionalIndexSettings(indexName, dataStreamName, null, null, null, settings, List.of()); + assertThat(result.size(), equalTo(0)); + } + + public void testGetAdditionalIndexSettingsProfiling() throws IOException { + Settings settings = Settings.builder().put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "SYNTHETIC").build(); + for (String dataStreamName : new String[] { "profiling-metrics", "profiling-events" }) { + String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 0); + var result = provider.getAdditionalIndexSettings(indexName, dataStreamName, null, null, null, settings, List.of()); + assertThat(result.size(), equalTo(0)); + } + + for (String indexName : new String[] { ".profiling-sq-executables", ".profiling-sq-leafframes", ".profiling-stacktraces" }) { + var result = provider.getAdditionalIndexSettings(indexName, null, null, null, null, settings, List.of()); + assertThat(result.size(), equalTo(0)); + } + } + + public void testGetAdditionalIndexSettingsTsdb() throws IOException { + Settings settings = Settings.builder().put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "SYNTHETIC").build(); + String dataStreamName = "metrics-my-app"; + String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 0); + var result = provider.getAdditionalIndexSettings(indexName, dataStreamName, IndexMode.TIME_SERIES, null, null, settings, List.of()); + assertThat(result.size(), equalTo(0)); + } + + public void testGetAdditionalIndexSettingsTsdbAfterCutoffDate() throws Exception { + long start = LocalDateTime.of(2024, 12, 20, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); + License license = createGoldOrPlatinumLicense(start); + long time = LocalDateTime.of(2024, 12, 31, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); + var licenseState = new XPackLicenseState(() -> time, new XPackLicenseStatus(license.operationMode(), true, null)); + + var licenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + licenseService.setLicenseState(licenseState); + var mockLicenseService = mock(LicenseService.class); + when(mockLicenseService.getLicense()).thenReturn(license); + + SyntheticSourceLicenseService syntheticSourceLicenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + syntheticSourceLicenseService.setLicenseState(licenseState); + syntheticSourceLicenseService.setLicenseService(mockLicenseService); + + provider = new SyntheticSourceIndexSettingsProvider( + syntheticSourceLicenseService, + im -> MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()), + getLogsdbIndexModeSettingsProvider(false), + IndexVersion::current + ); + + Settings settings = Settings.builder().put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "SYNTHETIC").build(); + String dataStreamName = "metrics-my-app"; + String indexName = DataStream.getDefaultBackingIndexName(dataStreamName, 0); + var result = provider.getAdditionalIndexSettings(indexName, dataStreamName, IndexMode.TIME_SERIES, null, null, settings, List.of()); + assertThat(result.size(), equalTo(1)); + assertThat(result.get(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey()), equalTo("STORED")); + } +} diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java index d6cdb9f761b31..df1fb8f2d958c 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java @@ -18,6 +18,8 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicenseService; import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -28,6 +30,7 @@ import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.common.settings.Settings.builder; +import static org.elasticsearch.xpack.logsdb.SyntheticSourceLicenseServiceTests.createEnterpriseLicense; import static org.hamcrest.Matchers.equalTo; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; @@ -39,18 +42,22 @@ public class SyntheticSourceIndexSettingsProviderTests extends ESTestCase { private SyntheticSourceIndexSettingsProvider provider; private final AtomicInteger newMapperServiceCounter = new AtomicInteger(); - private static LogsdbIndexModeSettingsProvider getLogsdbIndexModeSettingsProvider(boolean enabled) { + static LogsdbIndexModeSettingsProvider getLogsdbIndexModeSettingsProvider(boolean enabled) { return new LogsdbIndexModeSettingsProvider(Settings.builder().put("cluster.logsdb.enabled", enabled).build()); } @Before - public void setup() { - MockLicenseState licenseState = mock(MockLicenseState.class); + public void setup() throws Exception { + MockLicenseState licenseState = MockLicenseState.createMock(); when(licenseState.isAllowed(any())).thenReturn(true); var licenseService = new SyntheticSourceLicenseService(Settings.EMPTY); licenseService.setLicenseState(licenseState); + var mockLicenseService = mock(LicenseService.class); + License license = createEnterpriseLicense(); + when(mockLicenseService.getLicense()).thenReturn(license); syntheticSourceLicenseService = new SyntheticSourceLicenseService(Settings.EMPTY); syntheticSourceLicenseService.setLicenseState(licenseState); + syntheticSourceLicenseService.setLicenseService(mockLicenseService); provider = new SyntheticSourceIndexSettingsProvider(syntheticSourceLicenseService, im -> { newMapperServiceCounter.incrementAndGet(); diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseServiceTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseServiceTests.java index 430ee75eb3561..90a13b16c028e 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseServiceTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseServiceTests.java @@ -8,54 +8,195 @@ package org.elasticsearch.xpack.logsdb; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicenseService; import org.elasticsearch.license.MockLicenseState; +import org.elasticsearch.license.TestUtils; import org.elasticsearch.test.ESTestCase; +import org.junit.Before; import org.mockito.Mockito; +import java.time.LocalDateTime; +import java.time.ZoneOffset; +import java.util.UUID; + +import static org.elasticsearch.license.TestUtils.dateMath; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class SyntheticSourceLicenseServiceTests extends ESTestCase { + private LicenseService mockLicenseService; + private SyntheticSourceLicenseService licenseService; + + @Before + public void setup() throws Exception { + mockLicenseService = mock(LicenseService.class); + License license = createEnterpriseLicense(); + when(mockLicenseService.getLicense()).thenReturn(license); + licenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + } + public void testLicenseAllowsSyntheticSource() { - MockLicenseState licenseState = mock(MockLicenseState.class); - when(licenseState.isAllowed(any())).thenReturn(true); - var licenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + MockLicenseState licenseState = MockLicenseState.createMock(); + when(licenseState.isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE))).thenReturn(true); licenseService.setLicenseState(licenseState); - assertFalse("synthetic source is allowed, so not fallback to stored source", licenseService.fallbackToStoredSource(false)); + licenseService.setLicenseService(mockLicenseService); + assertFalse( + "synthetic source is allowed, so not fallback to stored source", + licenseService.fallbackToStoredSource(false, randomBoolean()) + ); Mockito.verify(licenseState, Mockito.times(1)).featureUsed(any()); } public void testLicenseAllowsSyntheticSourceTemplateValidation() { - MockLicenseState licenseState = mock(MockLicenseState.class); - when(licenseState.isAllowed(any())).thenReturn(true); - var licenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + MockLicenseState licenseState = MockLicenseState.createMock(); + when(licenseState.isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE))).thenReturn(true); licenseService.setLicenseState(licenseState); - assertFalse("synthetic source is allowed, so not fallback to stored source", licenseService.fallbackToStoredSource(true)); + licenseService.setLicenseService(mockLicenseService); + assertFalse( + "synthetic source is allowed, so not fallback to stored source", + licenseService.fallbackToStoredSource(true, randomBoolean()) + ); Mockito.verify(licenseState, Mockito.never()).featureUsed(any()); } public void testDefaultDisallow() { - MockLicenseState licenseState = mock(MockLicenseState.class); - when(licenseState.isAllowed(any())).thenReturn(false); - var licenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + MockLicenseState licenseState = MockLicenseState.createMock(); + when(licenseState.isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE))).thenReturn(false); licenseService.setLicenseState(licenseState); - assertTrue("synthetic source is not allowed, so fallback to stored source", licenseService.fallbackToStoredSource(false)); + licenseService.setLicenseService(mockLicenseService); + assertTrue( + "synthetic source is not allowed, so fallback to stored source", + licenseService.fallbackToStoredSource(false, randomBoolean()) + ); Mockito.verify(licenseState, Mockito.never()).featureUsed(any()); } public void testFallback() { - MockLicenseState licenseState = mock(MockLicenseState.class); - when(licenseState.isAllowed(any())).thenReturn(true); - var licenseService = new SyntheticSourceLicenseService(Settings.EMPTY); + MockLicenseState licenseState = MockLicenseState.createMock(); + when(licenseState.isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE))).thenReturn(true); licenseService.setLicenseState(licenseState); + licenseService.setLicenseService(mockLicenseService); licenseService.setSyntheticSourceFallback(true); assertTrue( "synthetic source is allowed, but fallback has been enabled, so fallback to stored source", - licenseService.fallbackToStoredSource(false) + licenseService.fallbackToStoredSource(false, randomBoolean()) ); Mockito.verifyNoInteractions(licenseState); + Mockito.verifyNoInteractions(mockLicenseService); + } + + public void testGoldOrPlatinumLicense() throws Exception { + mockLicenseService = mock(LicenseService.class); + License license = createGoldOrPlatinumLicense(); + when(mockLicenseService.getLicense()).thenReturn(license); + + MockLicenseState licenseState = MockLicenseState.createMock(); + when(licenseState.getOperationMode()).thenReturn(license.operationMode()); + when(licenseState.isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY))).thenReturn(true); + licenseService.setLicenseState(licenseState); + licenseService.setLicenseService(mockLicenseService); + assertFalse( + "legacy licensed usage is allowed, so not fallback to stored source", + licenseService.fallbackToStoredSource(false, true) + ); + Mockito.verify(licenseState, Mockito.times(1)).featureUsed(any()); } + public void testGoldOrPlatinumLicenseLegacyLicenseNotAllowed() throws Exception { + mockLicenseService = mock(LicenseService.class); + License license = createGoldOrPlatinumLicense(); + when(mockLicenseService.getLicense()).thenReturn(license); + + MockLicenseState licenseState = MockLicenseState.createMock(); + when(licenseState.getOperationMode()).thenReturn(license.operationMode()); + when(licenseState.isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE))).thenReturn(false); + licenseService.setLicenseState(licenseState); + licenseService.setLicenseService(mockLicenseService); + assertTrue( + "legacy licensed usage is not allowed, so fallback to stored source", + licenseService.fallbackToStoredSource(false, false) + ); + Mockito.verify(licenseState, Mockito.never()).featureUsed(any()); + Mockito.verify(licenseState, Mockito.times(1)).isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE)); + } + + public void testGoldOrPlatinumLicenseBeyondCutoffDate() throws Exception { + long start = LocalDateTime.of(2025, 1, 1, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); + License license = createGoldOrPlatinumLicense(start); + mockLicenseService = mock(LicenseService.class); + when(mockLicenseService.getLicense()).thenReturn(license); + + MockLicenseState licenseState = MockLicenseState.createMock(); + when(licenseState.getOperationMode()).thenReturn(license.operationMode()); + when(licenseState.isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE))).thenReturn(false); + licenseService.setLicenseState(licenseState); + licenseService.setLicenseService(mockLicenseService); + assertTrue("beyond cutoff date, so fallback to stored source", licenseService.fallbackToStoredSource(false, true)); + Mockito.verify(licenseState, Mockito.never()).featureUsed(any()); + Mockito.verify(licenseState, Mockito.times(1)).isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE)); + } + + public void testGoldOrPlatinumLicenseCustomCutoffDate() throws Exception { + licenseService = new SyntheticSourceLicenseService(Settings.EMPTY, "2025-01-02T00:00"); + + long start = LocalDateTime.of(2025, 1, 1, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); + License license = createGoldOrPlatinumLicense(start); + mockLicenseService = mock(LicenseService.class); + when(mockLicenseService.getLicense()).thenReturn(license); + + MockLicenseState licenseState = MockLicenseState.createMock(); + when(licenseState.getOperationMode()).thenReturn(license.operationMode()); + when(licenseState.isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY))).thenReturn(true); + licenseService.setLicenseState(licenseState); + licenseService.setLicenseService(mockLicenseService); + assertFalse("custom cutoff date, so fallback to stored source", licenseService.fallbackToStoredSource(false, true)); + Mockito.verify(licenseState, Mockito.times(1)).featureUsed(any()); + Mockito.verify(licenseState, Mockito.times(1)).isAllowed(same(SyntheticSourceLicenseService.SYNTHETIC_SOURCE_FEATURE_LEGACY)); + } + + static License createEnterpriseLicense() throws Exception { + long start = LocalDateTime.of(2024, 11, 12, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); + return createEnterpriseLicense(start); + } + + static License createEnterpriseLicense(long start) throws Exception { + String uid = UUID.randomUUID().toString(); + long currentTime = System.currentTimeMillis(); + final License.Builder builder = License.builder() + .uid(uid) + .version(License.VERSION_CURRENT) + .expiryDate(dateMath("now+2d", currentTime)) + .startDate(start) + .issueDate(currentTime) + .type("enterprise") + .issuedTo("customer") + .issuer("elasticsearch") + .maxResourceUnits(10); + return TestUtils.generateSignedLicense(builder); + } + + static License createGoldOrPlatinumLicense() throws Exception { + long start = LocalDateTime.of(2024, 11, 12, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); + return createGoldOrPlatinumLicense(start); + } + + static License createGoldOrPlatinumLicense(long start) throws Exception { + String uid = UUID.randomUUID().toString(); + long currentTime = System.currentTimeMillis(); + final License.Builder builder = License.builder() + .uid(uid) + .version(License.VERSION_CURRENT) + .expiryDate(dateMath("now+100d", currentTime)) + .startDate(start) + .issueDate(currentTime) + .type(randomBoolean() ? "gold" : "platinum") + .issuedTo("customer") + .issuer("elasticsearch") + .maxNodes(5); + return TestUtils.generateSignedLicense(builder); + } } From b13e0d25c0ef52bf6236a981bee4823b12934a57 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 26 Nov 2024 09:06:02 +0000 Subject: [PATCH 245/386] Support dynamic credentials in `S3HttpFixture` (#117458) Rephrase the authorization check in `S3HttpFixture` in terms of a predicate provided by the caller so that there's no need for a separate subclass that handles session tokens, and so that it can support auto-generated credentials more naturally. Also adapts `Ec2ImdsHttpFixture` to dynamically generate credentials this way. Also extracts the STS fixture in `S3HttpFixtureWithSTS` into a separate service, similarly to #117324, and adapts this new fixture to dynamically generate credentials too. Relates ES-9984 --- modules/repository-s3/build.gradle | 1 + .../RepositoryS3RestReloadCredentialsIT.java | 15 +- .../s3/RepositoryS3ClientYamlTestSuiteIT.java | 25 +- .../RepositoryS3EcsClientYamlTestSuiteIT.java | 25 +- .../RepositoryS3StsClientYamlTestSuiteIT.java | 27 +- settings.gradle | 1 + test/fixtures/aws-sts-fixture/build.gradle | 19 ++ .../fixture/aws/sts/AwsStsHttpFixture.java | 64 +++++ .../fixture/aws/sts/AwsStsHttpHandler.java} | 77 +++-- .../aws/sts/AwsStsHttpHandlerTests.java | 268 ++++++++++++++++++ .../fixture/aws/imds/Ec2ImdsHttpFixture.java | 13 +- .../fixture/aws/imds/Ec2ImdsHttpHandler.java | 12 +- .../aws/imds/Ec2ImdsHttpHandlerTests.java | 15 +- .../java/fixture/s3/DynamicS3Credentials.java | 39 +++ .../main/java/fixture/s3/S3HttpFixture.java | 40 ++- .../s3/S3HttpFixtureWithSessionToken.java | 42 --- ...earchableSnapshotsCredentialsReloadIT.java | 23 +- 17 files changed, 552 insertions(+), 154 deletions(-) create mode 100644 test/fixtures/aws-sts-fixture/build.gradle create mode 100644 test/fixtures/aws-sts-fixture/src/main/java/fixture/aws/sts/AwsStsHttpFixture.java rename test/fixtures/{s3-fixture/src/main/java/fixture/s3/S3HttpFixtureWithSTS.java => aws-sts-fixture/src/main/java/fixture/aws/sts/AwsStsHttpHandler.java} (66%) create mode 100644 test/fixtures/aws-sts-fixture/src/test/java/fixture/aws/sts/AwsStsHttpHandlerTests.java create mode 100644 test/fixtures/s3-fixture/src/main/java/fixture/s3/DynamicS3Credentials.java delete mode 100644 test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixtureWithSessionToken.java diff --git a/modules/repository-s3/build.gradle b/modules/repository-s3/build.gradle index 9a7f0a5994d73..ed1777891f40d 100644 --- a/modules/repository-s3/build.gradle +++ b/modules/repository-s3/build.gradle @@ -46,6 +46,7 @@ dependencies { yamlRestTestImplementation project(":test:framework") yamlRestTestImplementation project(':test:fixtures:s3-fixture') yamlRestTestImplementation project(':test:fixtures:ec2-imds-fixture') + yamlRestTestImplementation project(':test:fixtures:aws-sts-fixture') yamlRestTestImplementation project(':test:fixtures:minio-fixture') internalClusterTestImplementation project(':test:fixtures:minio-fixture') diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestReloadCredentialsIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestReloadCredentialsIT.java index 2f3e995b52468..430c0a1994967 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestReloadCredentialsIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestReloadCredentialsIT.java @@ -35,7 +35,14 @@ public class RepositoryS3RestReloadCredentialsIT extends ESRestTestCase { private static final String BUCKET = "RepositoryS3RestReloadCredentialsIT-bucket-" + HASHED_SEED; private static final String BASE_PATH = "RepositoryS3RestReloadCredentialsIT-base-path-" + HASHED_SEED; - public static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, "ignored"); + private static volatile String repositoryAccessKey; + + public static final S3HttpFixture s3Fixture = new S3HttpFixture( + true, + BUCKET, + BASE_PATH, + S3HttpFixture.mutableAccessKey(() -> repositoryAccessKey) + ); private static final MutableSettingsProvider keystoreSettings = new MutableSettingsProvider(); @@ -68,7 +75,7 @@ public void testReloadCredentialsFromKeystore() throws IOException { // Set up initial credentials final var accessKey1 = randomIdentifier(); - s3Fixture.setAccessKey(accessKey1); + repositoryAccessKey = accessKey1; keystoreSettings.put("s3.client.default.access_key", accessKey1); keystoreSettings.put("s3.client.default.secret_key", randomIdentifier()); cluster.updateStoredSecureSettings(); @@ -79,14 +86,14 @@ public void testReloadCredentialsFromKeystore() throws IOException { // Rotate credentials in blob store final var accessKey2 = randomValueOtherThan(accessKey1, ESTestCase::randomIdentifier); - s3Fixture.setAccessKey(accessKey2); + repositoryAccessKey = accessKey2; // Ensure that initial credentials now invalid final var accessDeniedException2 = expectThrows(ResponseException.class, () -> client().performRequest(verifyRequest)); assertThat(accessDeniedException2.getResponse().getStatusLine().getStatusCode(), equalTo(500)); assertThat( accessDeniedException2.getMessage(), - allOf(containsString("Bad access key"), containsString("Status Code: 403"), containsString("Error Code: AccessDenied")) + allOf(containsString("Access denied"), containsString("Status Code: 403"), containsString("Error Code: AccessDenied")) ); // Set up refreshed credentials diff --git a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java b/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java index 64cb3c3fd3a69..a3b154b4bdfed 100644 --- a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java +++ b/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java @@ -10,8 +10,8 @@ package org.elasticsearch.repositories.s3; import fixture.aws.imds.Ec2ImdsHttpFixture; +import fixture.s3.DynamicS3Credentials; import fixture.s3.S3HttpFixture; -import fixture.s3.S3HttpFixtureWithSessionToken; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; @@ -34,27 +34,30 @@ public class RepositoryS3ClientYamlTestSuiteIT extends AbstractRepositoryS3Clien private static final String HASHED_SEED = Integer.toString(Murmur3HashFunction.hash(System.getProperty("tests.seed"))); private static final String TEMPORARY_SESSION_TOKEN = "session_token-" + HASHED_SEED; - private static final String IMDS_ACCESS_KEY = "imds-access-key-" + HASHED_SEED; - private static final String IMDS_SESSION_TOKEN = "imds-session-token-" + HASHED_SEED; private static final S3HttpFixture s3Fixture = new S3HttpFixture(); - private static final S3HttpFixtureWithSessionToken s3HttpFixtureWithSessionToken = new S3HttpFixtureWithSessionToken( + private static final S3HttpFixture s3HttpFixtureWithSessionToken = new S3HttpFixture( + true, "session_token_bucket", "session_token_base_path_integration_tests", - System.getProperty("s3TemporaryAccessKey"), - TEMPORARY_SESSION_TOKEN + S3HttpFixture.fixedAccessKeyAndToken(System.getProperty("s3TemporaryAccessKey"), TEMPORARY_SESSION_TOKEN) ); - private static final S3HttpFixtureWithSessionToken s3HttpFixtureWithImdsSessionToken = new S3HttpFixtureWithSessionToken( + private static final DynamicS3Credentials dynamicS3Credentials = new DynamicS3Credentials(); + + private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture( + dynamicS3Credentials::addValidCredentials, + Set.of() + ); + + private static final S3HttpFixture s3HttpFixtureWithImdsSessionToken = new S3HttpFixture( + true, "ec2_bucket", "ec2_base_path", - IMDS_ACCESS_KEY, - IMDS_SESSION_TOKEN + dynamicS3Credentials::isAuthorized ); - private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture(IMDS_ACCESS_KEY, IMDS_SESSION_TOKEN, Set.of()); - public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .module("repository-s3") .keystore("s3.client.integration_test_permanent.access_key", System.getProperty("s3PermanentAccessKey")) diff --git a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsClientYamlTestSuiteIT.java b/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsClientYamlTestSuiteIT.java index a522c9b17145b..bbd003f506ead 100644 --- a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsClientYamlTestSuiteIT.java +++ b/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsClientYamlTestSuiteIT.java @@ -10,12 +10,12 @@ package org.elasticsearch.repositories.s3; import fixture.aws.imds.Ec2ImdsHttpFixture; -import fixture.s3.S3HttpFixtureWithSessionToken; +import fixture.s3.DynamicS3Credentials; +import fixture.s3.S3HttpFixture; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.cluster.routing.Murmur3HashFunction; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.junit.ClassRule; @@ -26,23 +26,20 @@ public class RepositoryS3EcsClientYamlTestSuiteIT extends AbstractRepositoryS3ClientYamlTestSuiteIT { - private static final String HASHED_SEED = Integer.toString(Murmur3HashFunction.hash(System.getProperty("tests.seed"))); - private static final String ECS_ACCESS_KEY = "ecs-access-key-" + HASHED_SEED; - private static final String ECS_SESSION_TOKEN = "ecs-session-token-" + HASHED_SEED; - - private static final S3HttpFixtureWithSessionToken s3Fixture = new S3HttpFixtureWithSessionToken( - "ecs_bucket", - "ecs_base_path", - ECS_ACCESS_KEY, - ECS_SESSION_TOKEN - ); + private static final DynamicS3Credentials dynamicS3Credentials = new DynamicS3Credentials(); private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture( - ECS_ACCESS_KEY, - ECS_SESSION_TOKEN, + dynamicS3Credentials::addValidCredentials, Set.of("/ecs_credentials_endpoint") ); + private static final S3HttpFixture s3Fixture = new S3HttpFixture( + true, + "ecs_bucket", + "ecs_base_path", + dynamicS3Credentials::isAuthorized + ); + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .module("repository-s3") .setting("s3.client.integration_test_ecs.endpoint", s3Fixture::getAddress) diff --git a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3StsClientYamlTestSuiteIT.java b/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3StsClientYamlTestSuiteIT.java index 24f03a6ae7624..7c4d719485113 100644 --- a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3StsClientYamlTestSuiteIT.java +++ b/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3StsClientYamlTestSuiteIT.java @@ -9,8 +9,9 @@ package org.elasticsearch.repositories.s3; +import fixture.aws.sts.AwsStsHttpFixture; +import fixture.s3.DynamicS3Credentials; import fixture.s3.S3HttpFixture; -import fixture.s3.S3HttpFixtureWithSTS; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; @@ -24,13 +25,27 @@ public class RepositoryS3StsClientYamlTestSuiteIT extends AbstractRepositoryS3ClientYamlTestSuiteIT { - public static final S3HttpFixture s3Fixture = new S3HttpFixture(); - private static final S3HttpFixtureWithSTS s3Sts = new S3HttpFixtureWithSTS(); + private static final DynamicS3Credentials dynamicS3Credentials = new DynamicS3Credentials(); + + private static final S3HttpFixture s3HttpFixture = new S3HttpFixture( + true, + "sts_bucket", + "sts_base_path", + dynamicS3Credentials::isAuthorized + ); + + private static final AwsStsHttpFixture stsHttpFixture = new AwsStsHttpFixture(dynamicS3Credentials::addValidCredentials, """ + Atza|IQEBLjAsAhRFiXuWpUXuRvQ9PZL3GMFcYevydwIUFAHZwXZXXXXXXXXJnrulxKDHwy87oGKPznh0D6bEQZTSCzyoCtL_8S07pLpr0zMbn6w1lfVZKNTBdDans\ + FBmtGnIsIapjI6xKR02Yc_2bQ8LZbUXSGm6Ry6_BG7PrtLZtj_dfCTj92xNGed-CrKqjG7nPBjNIL016GGvuS5gSvPRUxWES3VYfm1wl7WTI7jn-Pcb6M-buCgHhFO\ + zTQxod27L9CqnOLio7N3gZAGpsp6n1-AJBOCJckcyXe2c6uD0srOJeZlKUm2eTDVMf8IehDVI0r1QOnTV6KzzAI3OY87Vd_cVMQ"""); public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .module("repository-s3") - .setting("s3.client.integration_test_sts.endpoint", s3Sts::getAddress) - .systemProperty("com.amazonaws.sdk.stsMetadataServiceEndpointOverride", () -> s3Sts.getAddress() + "/assume-role-with-web-identity") + .setting("s3.client.integration_test_sts.endpoint", s3HttpFixture::getAddress) + .systemProperty( + "com.amazonaws.sdk.stsMetadataServiceEndpointOverride", + () -> stsHttpFixture.getAddress() + "/assume-role-with-web-identity" + ) .configFile("repository-s3/aws-web-identity-token-file", Resource.fromClasspath("aws-web-identity-token-file")) .environment("AWS_WEB_IDENTITY_TOKEN_FILE", System.getProperty("awsWebIdentityTokenExternalLocation")) // // The AWS STS SDK requires the role and session names to be set. We can verify that they are sent to S3S in the @@ -40,7 +55,7 @@ public class RepositoryS3StsClientYamlTestSuiteIT extends AbstractRepositoryS3Cl .build(); @ClassRule - public static TestRule ruleChain = RuleChain.outerRule(s3Fixture).around(s3Sts).around(cluster); + public static TestRule ruleChain = RuleChain.outerRule(s3HttpFixture).around(stsHttpFixture).around(cluster); @ParametersFactory public static Iterable parameters() throws Exception { diff --git a/settings.gradle b/settings.gradle index 7bf03263031f1..4722fc311480a 100644 --- a/settings.gradle +++ b/settings.gradle @@ -86,6 +86,7 @@ List projects = [ 'distribution:tools:ansi-console', 'server', 'test:framework', + 'test:fixtures:aws-sts-fixture', 'test:fixtures:azure-fixture', 'test:fixtures:ec2-imds-fixture', 'test:fixtures:gcs-fixture', diff --git a/test/fixtures/aws-sts-fixture/build.gradle b/test/fixtures/aws-sts-fixture/build.gradle new file mode 100644 index 0000000000000..57f0f8fe25493 --- /dev/null +++ b/test/fixtures/aws-sts-fixture/build.gradle @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +apply plugin: 'elasticsearch.java' + +description = 'Fixture for emulating the Security Token Service (STS) running in AWS' + +dependencies { + api project(':server') + api("junit:junit:${versions.junit}") { + transitive = false + } + api project(':test:framework') +} diff --git a/test/fixtures/aws-sts-fixture/src/main/java/fixture/aws/sts/AwsStsHttpFixture.java b/test/fixtures/aws-sts-fixture/src/main/java/fixture/aws/sts/AwsStsHttpFixture.java new file mode 100644 index 0000000000000..13ba7eaf8ba67 --- /dev/null +++ b/test/fixtures/aws-sts-fixture/src/main/java/fixture/aws/sts/AwsStsHttpFixture.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package fixture.aws.sts; + +import com.sun.net.httpserver.HttpHandler; +import com.sun.net.httpserver.HttpServer; + +import org.junit.rules.ExternalResource; + +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.UnknownHostException; +import java.util.Objects; +import java.util.function.BiConsumer; + +public class AwsStsHttpFixture extends ExternalResource { + + private HttpServer server; + + private final BiConsumer newCredentialsConsumer; + private final String webIdentityToken; + + public AwsStsHttpFixture(BiConsumer newCredentialsConsumer, String webIdentityToken) { + this.newCredentialsConsumer = Objects.requireNonNull(newCredentialsConsumer); + this.webIdentityToken = Objects.requireNonNull(webIdentityToken); + } + + protected HttpHandler createHandler() { + return new AwsStsHttpHandler(newCredentialsConsumer, webIdentityToken); + } + + public String getAddress() { + return "http://" + server.getAddress().getHostString() + ":" + server.getAddress().getPort(); + } + + public void stop(int delay) { + server.stop(delay); + } + + protected void before() throws Throwable { + server = HttpServer.create(resolveAddress(), 0); + server.createContext("/", Objects.requireNonNull(createHandler())); + server.start(); + } + + @Override + protected void after() { + stop(0); + } + + private static InetSocketAddress resolveAddress() { + try { + return new InetSocketAddress(InetAddress.getByName("localhost"), 0); + } catch (UnknownHostException e) { + throw new RuntimeException(e); + } + } +} diff --git a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixtureWithSTS.java b/test/fixtures/aws-sts-fixture/src/main/java/fixture/aws/sts/AwsStsHttpHandler.java similarity index 66% rename from test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixtureWithSTS.java rename to test/fixtures/aws-sts-fixture/src/main/java/fixture/aws/sts/AwsStsHttpHandler.java index 54e0be1e321a2..84541f5e15211 100644 --- a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixtureWithSTS.java +++ b/test/fixtures/aws-sts-fixture/src/main/java/fixture/aws/sts/AwsStsHttpHandler.java @@ -6,12 +6,16 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -package fixture.s3; +package fixture.aws.sts; +import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.rest.RestStatus; +import java.io.IOException; import java.net.URLDecoder; import java.nio.charset.StandardCharsets; import java.time.ZonedDateTime; @@ -19,53 +23,39 @@ import java.util.Arrays; import java.util.Locale; import java.util.Map; +import java.util.Objects; +import java.util.function.BiConsumer; import java.util.stream.Collectors; -public class S3HttpFixtureWithSTS extends S3HttpFixture { +import static org.elasticsearch.test.ESTestCase.randomIdentifier; - private static final String ROLE_ARN = "arn:aws:iam::123456789012:role/FederatedWebIdentityRole"; - private static final String ROLE_NAME = "sts-fixture-test"; - private final String sessionToken; - private final String webIdentityToken; +/** + * Minimal HTTP handler that emulates the AWS STS server + */ +@SuppressForbidden(reason = "this test uses a HttpServer to emulate the AWS STS endpoint") +public class AwsStsHttpHandler implements HttpHandler { - public S3HttpFixtureWithSTS() { - this(true); - } + static final String ROLE_ARN = "arn:aws:iam::123456789012:role/FederatedWebIdentityRole"; + static final String ROLE_NAME = "sts-fixture-test"; - public S3HttpFixtureWithSTS(boolean enabled) { - this( - enabled, - "sts_bucket", - "sts_base_path", - "sts_access_key", - "sts_session_token", - "Atza|IQEBLjAsAhRFiXuWpUXuRvQ9PZL3GMFcYevydwIUFAHZwXZXXXXXXXXJnrulxKDHwy87oGKPznh0D6bEQZTSCzyoCtL_8S07pLpr0zMbn6w1lfVZKNTBdDansFBmtGnIsIapjI6xKR02Yc_2bQ8LZbUXSGm6Ry6_BG7PrtLZtj_dfCTj92xNGed-CrKqjG7nPBjNIL016GGvuS5gSvPRUxWES3VYfm1wl7WTI7jn-Pcb6M-buCgHhFOzTQxod27L9CqnOLio7N3gZAGpsp6n1-AJBOCJckcyXe2c6uD0srOJeZlKUm2eTDVMf8IehDVI0r1QOnTV6KzzAI3OY87Vd_cVMQ" - ); - } + private final BiConsumer newCredentialsConsumer; + private final String webIdentityToken; - public S3HttpFixtureWithSTS( - boolean enabled, - String bucket, - String basePath, - String accessKey, - String sessionToken, - String webIdentityToken - ) { - super(enabled, bucket, basePath, accessKey); - this.sessionToken = sessionToken; - this.webIdentityToken = webIdentityToken; + public AwsStsHttpHandler(BiConsumer newCredentialsConsumer, String webIdentityToken) { + this.newCredentialsConsumer = Objects.requireNonNull(newCredentialsConsumer); + this.webIdentityToken = Objects.requireNonNull(webIdentityToken); } @Override - protected HttpHandler createHandler() { - final HttpHandler delegate = super.createHandler(); + public void handle(final HttpExchange exchange) throws IOException { + // https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRoleWithWebIdentity.html + + try (exchange) { + final var requestMethod = exchange.getRequestMethod(); + final var path = exchange.getRequestURI().getPath(); + + if ("POST".equals(requestMethod) && "/assume-role-with-web-identity/".equals(path)) { - return exchange -> { - // https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRoleWithWebIdentity.html - // It's run as a separate service, but we emulate it under the `assume-role-with-web-identity` endpoint - // of the S3 serve for the simplicity sake - if ("POST".equals(exchange.getRequestMethod()) - && exchange.getRequestURI().getPath().startsWith("/assume-role-with-web-identity")) { String body = new String(exchange.getRequestBody().readAllBytes(), StandardCharsets.UTF_8); Map params = Arrays.stream(body.split("&")) .map(e -> e.split("=")) @@ -82,6 +72,9 @@ protected HttpHandler createHandler() { exchange.close(); return; } + final var accessKey = randomIdentifier(); + final var sessionToken = randomIdentifier(); + newCredentialsConsumer.accept(accessKey, sessionToken); final byte[] response = String.format( Locale.ROOT, """ @@ -95,7 +88,7 @@ protected HttpHandler createHandler() { %s - secret_access_key + %s %s %s @@ -109,6 +102,7 @@ protected HttpHandler createHandler() { ROLE_ARN, ROLE_NAME, sessionToken, + randomIdentifier(), ZonedDateTime.now().plusDays(1L).format(DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ssZ")), accessKey ).getBytes(StandardCharsets.UTF_8); @@ -118,7 +112,8 @@ protected HttpHandler createHandler() { exchange.close(); return; } - delegate.handle(exchange); - }; + + ExceptionsHelper.maybeDieOnAnotherThread(new AssertionError("not supported: " + requestMethod + " " + path)); + } } } diff --git a/test/fixtures/aws-sts-fixture/src/test/java/fixture/aws/sts/AwsStsHttpHandlerTests.java b/test/fixtures/aws-sts-fixture/src/test/java/fixture/aws/sts/AwsStsHttpHandlerTests.java new file mode 100644 index 0000000000000..4094ce18e7aef --- /dev/null +++ b/test/fixtures/aws-sts-fixture/src/test/java/fixture/aws/sts/AwsStsHttpHandlerTests.java @@ -0,0 +1,268 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package fixture.aws.sts; + +import com.sun.net.httpserver.Headers; +import com.sun.net.httpserver.HttpContext; +import com.sun.net.httpserver.HttpExchange; +import com.sun.net.httpserver.HttpPrincipal; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.InetSocketAddress; +import java.net.URI; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.containsString; + +public class AwsStsHttpHandlerTests extends ESTestCase { + + public void testGenerateCredentials() { + final Map generatedCredentials = new HashMap<>(); + + final var webIdentityToken = randomUnicodeOfLength(10); + final var handler = new AwsStsHttpHandler(generatedCredentials::put, webIdentityToken); + + final var response = handleRequest( + handler, + Map.of( + "Action", + "AssumeRoleWithWebIdentity", + "RoleSessionName", + AwsStsHttpHandler.ROLE_NAME, + "RoleArn", + AwsStsHttpHandler.ROLE_ARN, + "WebIdentityToken", + webIdentityToken + ) + ); + assertEquals(RestStatus.OK, response.status()); + + assertThat(generatedCredentials, aMapWithSize(1)); + final var accessKey = generatedCredentials.keySet().iterator().next(); + final var sessionToken = generatedCredentials.values().iterator().next(); + + final var responseBody = response.body().utf8ToString(); + assertThat(responseBody, containsString("" + accessKey + "")); + assertThat(responseBody, containsString("" + sessionToken + "")); + } + + public void testInvalidAction() { + final var handler = new AwsStsHttpHandler((key, token) -> fail(), randomUnicodeOfLength(10)); + final var response = handleRequest(handler, Map.of("Action", "Unsupported")); + assertEquals(RestStatus.BAD_REQUEST, response.status()); + } + + public void testInvalidRole() { + final var webIdentityToken = randomUnicodeOfLength(10); + final var handler = new AwsStsHttpHandler((key, token) -> fail(), webIdentityToken); + final var response = handleRequest( + handler, + Map.of( + "Action", + "AssumeRoleWithWebIdentity", + "RoleSessionName", + randomValueOtherThan(AwsStsHttpHandler.ROLE_NAME, ESTestCase::randomIdentifier), + "RoleArn", + AwsStsHttpHandler.ROLE_ARN, + "WebIdentityToken", + webIdentityToken + ) + ); + assertEquals(RestStatus.UNAUTHORIZED, response.status()); + } + + public void testInvalidToken() { + final var webIdentityToken = randomUnicodeOfLength(10); + final var handler = new AwsStsHttpHandler((key, token) -> fail(), webIdentityToken); + final var response = handleRequest( + handler, + Map.of( + "Action", + "AssumeRoleWithWebIdentity", + "RoleSessionName", + AwsStsHttpHandler.ROLE_NAME, + "RoleArn", + AwsStsHttpHandler.ROLE_ARN, + "WebIdentityToken", + randomValueOtherThan(webIdentityToken, () -> randomUnicodeOfLength(10)) + ) + ); + assertEquals(RestStatus.UNAUTHORIZED, response.status()); + } + + public void testInvalidARN() { + final var webIdentityToken = randomUnicodeOfLength(10); + final var handler = new AwsStsHttpHandler((key, token) -> fail(), webIdentityToken); + final var response = handleRequest( + handler, + Map.of( + "Action", + "AssumeRoleWithWebIdentity", + "RoleSessionName", + AwsStsHttpHandler.ROLE_NAME, + "RoleArn", + randomValueOtherThan(AwsStsHttpHandler.ROLE_ARN, ESTestCase::randomIdentifier), + "WebIdentityToken", + webIdentityToken + ) + ); + assertEquals(RestStatus.UNAUTHORIZED, response.status()); + } + + private record TestHttpResponse(RestStatus status, BytesReference body) {} + + private static TestHttpResponse handleRequest(AwsStsHttpHandler handler, Map body) { + final var httpExchange = new TestHttpExchange( + "POST", + "/assume-role-with-web-identity/", + new BytesArray( + body.entrySet() + .stream() + .map(e -> e.getKey() + "=" + URLEncoder.encode(e.getValue(), StandardCharsets.UTF_8)) + .collect(Collectors.joining("&")) + ), + TestHttpExchange.EMPTY_HEADERS + ); + try { + handler.handle(httpExchange); + } catch (IOException e) { + fail(e); + } + assertNotEquals(0, httpExchange.getResponseCode()); + return new TestHttpResponse(RestStatus.fromCode(httpExchange.getResponseCode()), httpExchange.getResponseBodyContents()); + } + + private static class TestHttpExchange extends HttpExchange { + + private static final Headers EMPTY_HEADERS = new Headers(); + + private final String method; + private final URI uri; + private final BytesReference requestBody; + private final Headers requestHeaders; + + private final Headers responseHeaders = new Headers(); + private final BytesStreamOutput responseBody = new BytesStreamOutput(); + private int responseCode; + + TestHttpExchange(String method, String uri, BytesReference requestBody, Headers requestHeaders) { + this.method = method; + this.uri = URI.create(uri); + this.requestBody = requestBody; + this.requestHeaders = requestHeaders; + } + + @Override + public Headers getRequestHeaders() { + return requestHeaders; + } + + @Override + public Headers getResponseHeaders() { + return responseHeaders; + } + + @Override + public URI getRequestURI() { + return uri; + } + + @Override + public String getRequestMethod() { + return method; + } + + @Override + public HttpContext getHttpContext() { + return null; + } + + @Override + public void close() {} + + @Override + public InputStream getRequestBody() { + try { + return requestBody.streamInput(); + } catch (IOException e) { + throw new AssertionError(e); + } + } + + @Override + public OutputStream getResponseBody() { + return responseBody; + } + + @Override + public void sendResponseHeaders(int rCode, long responseLength) { + this.responseCode = rCode; + } + + @Override + public InetSocketAddress getRemoteAddress() { + return null; + } + + @Override + public int getResponseCode() { + return responseCode; + } + + public BytesReference getResponseBodyContents() { + return responseBody.bytes(); + } + + @Override + public InetSocketAddress getLocalAddress() { + return null; + } + + @Override + public String getProtocol() { + return "HTTP/1.1"; + } + + @Override + public Object getAttribute(String name) { + return null; + } + + @Override + public void setAttribute(String name, Object value) { + fail("setAttribute not implemented"); + } + + @Override + public void setStreams(InputStream i, OutputStream o) { + fail("setStreams not implemented"); + } + + @Override + public HttpPrincipal getPrincipal() { + fail("getPrincipal not implemented"); + throw new UnsupportedOperationException("getPrincipal not implemented"); + } + } + +} diff --git a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpFixture.java b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpFixture.java index 68f46d778018c..13d36c6fc4812 100644 --- a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpFixture.java +++ b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpFixture.java @@ -18,23 +18,22 @@ import java.net.UnknownHostException; import java.util.Objects; import java.util.Set; +import java.util.function.BiConsumer; public class Ec2ImdsHttpFixture extends ExternalResource { private HttpServer server; - private final String accessKey; - private final String sessionToken; + private final BiConsumer newCredentialsConsumer; private final Set alternativeCredentialsEndpoints; - public Ec2ImdsHttpFixture(String accessKey, String sessionToken, Set alternativeCredentialsEndpoints) { - this.accessKey = accessKey; - this.sessionToken = sessionToken; - this.alternativeCredentialsEndpoints = alternativeCredentialsEndpoints; + public Ec2ImdsHttpFixture(BiConsumer newCredentialsConsumer, Set alternativeCredentialsEndpoints) { + this.newCredentialsConsumer = Objects.requireNonNull(newCredentialsConsumer); + this.alternativeCredentialsEndpoints = Objects.requireNonNull(alternativeCredentialsEndpoints); } protected HttpHandler createHandler() { - return new Ec2ImdsHttpHandler(accessKey, sessionToken, alternativeCredentialsEndpoints); + return new Ec2ImdsHttpHandler(newCredentialsConsumer, alternativeCredentialsEndpoints); } public String getAddress() { diff --git a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java index 04e5e83bddfa9..a92f1bdc5f9ae 100644 --- a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java +++ b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java @@ -25,6 +25,7 @@ import java.util.Collection; import java.util.Objects; import java.util.Set; +import java.util.function.BiConsumer; import static org.elasticsearch.test.ESTestCase.randomIdentifier; @@ -36,13 +37,11 @@ public class Ec2ImdsHttpHandler implements HttpHandler { private static final String IMDS_SECURITY_CREDENTIALS_PATH = "/latest/meta-data/iam/security-credentials/"; - private final String accessKey; - private final String sessionToken; + private final BiConsumer newCredentialsConsumer; private final Set validCredentialsEndpoints = ConcurrentCollections.newConcurrentSet(); - public Ec2ImdsHttpHandler(String accessKey, String sessionToken, Collection alternativeCredentialsEndpoints) { - this.accessKey = Objects.requireNonNull(accessKey); - this.sessionToken = Objects.requireNonNull(sessionToken); + public Ec2ImdsHttpHandler(BiConsumer newCredentialsConsumer, Collection alternativeCredentialsEndpoints) { + this.newCredentialsConsumer = Objects.requireNonNull(newCredentialsConsumer); this.validCredentialsEndpoints.addAll(alternativeCredentialsEndpoints); } @@ -70,6 +69,9 @@ public void handle(final HttpExchange exchange) throws IOException { exchange.getResponseBody().write(response); return; } else if (validCredentialsEndpoints.contains(path)) { + final String accessKey = randomIdentifier(); + final String sessionToken = randomIdentifier(); + newCredentialsConsumer.accept(accessKey, sessionToken); final byte[] response = Strings.format( """ { diff --git a/test/fixtures/ec2-imds-fixture/src/test/java/fixture/aws/imds/Ec2ImdsHttpHandlerTests.java b/test/fixtures/ec2-imds-fixture/src/test/java/fixture/aws/imds/Ec2ImdsHttpHandlerTests.java index 5d5cbfae3fa60..369b0ef449b2f 100644 --- a/test/fixtures/ec2-imds-fixture/src/test/java/fixture/aws/imds/Ec2ImdsHttpHandlerTests.java +++ b/test/fixtures/ec2-imds-fixture/src/test/java/fixture/aws/imds/Ec2ImdsHttpHandlerTests.java @@ -28,15 +28,18 @@ import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.URI; +import java.util.HashMap; +import java.util.Map; import java.util.Set; +import static org.hamcrest.Matchers.aMapWithSize; + public class Ec2ImdsHttpHandlerTests extends ESTestCase { public void testImdsV1() throws IOException { - final var accessKey = randomIdentifier(); - final var sessionToken = randomIdentifier(); + final Map generatedCredentials = new HashMap<>(); - final var handler = new Ec2ImdsHttpHandler(accessKey, sessionToken, Set.of()); + final var handler = new Ec2ImdsHttpHandler(generatedCredentials::put, Set.of()); final var roleResponse = handleRequest(handler, "GET", "/latest/meta-data/iam/security-credentials/"); assertEquals(RestStatus.OK, roleResponse.status()); @@ -46,6 +49,10 @@ public void testImdsV1() throws IOException { final var credentialsResponse = handleRequest(handler, "GET", "/latest/meta-data/iam/security-credentials/" + profileName); assertEquals(RestStatus.OK, credentialsResponse.status()); + assertThat(generatedCredentials, aMapWithSize(1)); + final var accessKey = generatedCredentials.keySet().iterator().next(); + final var sessionToken = generatedCredentials.values().iterator().next(); + final var responseMap = XContentHelper.convertToMap(XContentType.JSON.xContent(), credentialsResponse.body().streamInput(), false); assertEquals(Set.of("AccessKeyId", "Expiration", "RoleArn", "SecretAccessKey", "Token"), responseMap.keySet()); assertEquals(accessKey, responseMap.get("AccessKeyId")); @@ -55,7 +62,7 @@ public void testImdsV1() throws IOException { public void testImdsV2Disabled() { assertEquals( RestStatus.METHOD_NOT_ALLOWED, - handleRequest(new Ec2ImdsHttpHandler(randomIdentifier(), randomIdentifier(), Set.of()), "PUT", "/latest/api/token").status() + handleRequest(new Ec2ImdsHttpHandler((accessKey, sessionToken) -> fail(), Set.of()), "PUT", "/latest/api/token").status() ); } diff --git a/test/fixtures/s3-fixture/src/main/java/fixture/s3/DynamicS3Credentials.java b/test/fixtures/s3-fixture/src/main/java/fixture/s3/DynamicS3Credentials.java new file mode 100644 index 0000000000000..4e8f267ad3543 --- /dev/null +++ b/test/fixtures/s3-fixture/src/main/java/fixture/s3/DynamicS3Credentials.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package fixture.s3; + +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; + +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +/** + * Allows dynamic creation of access-key/session-token credentials for accessing AWS services such as S3. Typically there's one service + * (e.g. IMDS or STS) which creates credentials dynamically and registers them here using {@link #addValidCredentials}, and then the + * {@link S3HttpFixture} uses {@link #isAuthorized} to validate the credentials it receives corresponds with some previously-generated + * credentials. + */ +public class DynamicS3Credentials { + private final Map> validCredentialsMap = ConcurrentCollections.newConcurrentMap(); + + public boolean isAuthorized(String authorizationHeader, String sessionTokenHeader) { + return authorizationHeader != null + && sessionTokenHeader != null + && validCredentialsMap.getOrDefault(sessionTokenHeader, Set.of()).stream().anyMatch(authorizationHeader::contains); + } + + public void addValidCredentials(String accessKey, String sessionToken) { + validCredentialsMap.computeIfAbsent( + Objects.requireNonNull(sessionToken, "sessionToken"), + t -> ConcurrentCollections.newConcurrentSet() + ).add(Objects.requireNonNull(accessKey, "accessKey")); + } +} diff --git a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixture.java b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixture.java index 421478a53e6bc..36f8fedcb3335 100644 --- a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixture.java +++ b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixture.java @@ -21,6 +21,8 @@ import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.util.Objects; +import java.util.function.BiPredicate; +import java.util.function.Supplier; public class S3HttpFixture extends ExternalResource { @@ -29,21 +31,21 @@ public class S3HttpFixture extends ExternalResource { private final boolean enabled; private final String bucket; private final String basePath; - protected volatile String accessKey; + private final BiPredicate authorizationPredicate; public S3HttpFixture() { this(true); } public S3HttpFixture(boolean enabled) { - this(enabled, "bucket", "base_path_integration_tests", "s3_test_access_key"); + this(enabled, "bucket", "base_path_integration_tests", fixedAccessKey("s3_test_access_key")); } - public S3HttpFixture(boolean enabled, String bucket, String basePath, String accessKey) { + public S3HttpFixture(boolean enabled, String bucket, String basePath, BiPredicate authorizationPredicate) { this.enabled = enabled; this.bucket = bucket; this.basePath = basePath; - this.accessKey = accessKey; + this.authorizationPredicate = authorizationPredicate; } protected HttpHandler createHandler() { @@ -51,9 +53,11 @@ protected HttpHandler createHandler() { @Override public void handle(final HttpExchange exchange) throws IOException { try { - final String authorization = exchange.getRequestHeaders().getFirst("Authorization"); - if (authorization == null || authorization.contains(accessKey) == false) { - sendError(exchange, RestStatus.FORBIDDEN, "AccessDenied", "Bad access key"); + if (authorizationPredicate.test( + exchange.getRequestHeaders().getFirst("Authorization"), + exchange.getRequestHeaders().getFirst("x-amz-security-token") + ) == false) { + sendError(exchange, RestStatus.FORBIDDEN, "AccessDenied", "Access denied by " + authorizationPredicate); return; } super.handle(exchange); @@ -76,7 +80,7 @@ public void stop(int delay) { protected void before() throws Throwable { if (enabled) { - InetSocketAddress inetSocketAddress = resolveAddress("localhost", 0); + InetSocketAddress inetSocketAddress = resolveAddress(); this.server = HttpServer.create(inetSocketAddress, 0); HttpHandler handler = createHandler(); this.server.createContext("/", Objects.requireNonNull(handler)); @@ -91,15 +95,27 @@ protected void after() { } } - private static InetSocketAddress resolveAddress(String address, int port) { + private static InetSocketAddress resolveAddress() { try { - return new InetSocketAddress(InetAddress.getByName(address), port); + return new InetSocketAddress(InetAddress.getByName("localhost"), 0); } catch (UnknownHostException e) { throw new RuntimeException(e); } } - public void setAccessKey(String accessKey) { - this.accessKey = accessKey; + public static BiPredicate fixedAccessKey(String accessKey) { + return mutableAccessKey(() -> accessKey); + } + + public static BiPredicate mutableAccessKey(Supplier accessKeySupplier) { + return (authorizationHeader, sessionTokenHeader) -> authorizationHeader != null + && authorizationHeader.contains(accessKeySupplier.get()); + } + + public static BiPredicate fixedAccessKeyAndToken(String accessKey, String sessionToken) { + Objects.requireNonNull(sessionToken); + final var accessKeyPredicate = fixedAccessKey(accessKey); + return (authorizationHeader, sessionTokenHeader) -> accessKeyPredicate.test(authorizationHeader, sessionTokenHeader) + && sessionToken.equals(sessionTokenHeader); } } diff --git a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixtureWithSessionToken.java b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixtureWithSessionToken.java deleted file mode 100644 index 001cc34d9b20d..0000000000000 --- a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixtureWithSessionToken.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ -package fixture.s3; - -import com.sun.net.httpserver.HttpHandler; - -import org.elasticsearch.rest.RestStatus; - -import static fixture.s3.S3HttpHandler.sendError; - -public class S3HttpFixtureWithSessionToken extends S3HttpFixture { - - protected final String sessionToken; - - public S3HttpFixtureWithSessionToken(String bucket, String basePath, String accessKey, String sessionToken) { - super(true, bucket, basePath, accessKey); - this.sessionToken = sessionToken; - } - - @Override - protected HttpHandler createHandler() { - final HttpHandler delegate = super.createHandler(); - return exchange -> { - final String securityToken = exchange.getRequestHeaders().getFirst("x-amz-security-token"); - if (securityToken == null) { - sendError(exchange, RestStatus.FORBIDDEN, "AccessDenied", "No session token"); - return; - } - if (securityToken.equals(sessionToken) == false) { - sendError(exchange, RestStatus.FORBIDDEN, "AccessDenied", "Bad session token"); - return; - } - delegate.handle(exchange); - }; - } -} diff --git a/x-pack/plugin/searchable-snapshots/qa/s3/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/s3/S3SearchableSnapshotsCredentialsReloadIT.java b/x-pack/plugin/searchable-snapshots/qa/s3/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/s3/S3SearchableSnapshotsCredentialsReloadIT.java index 3049fe830e728..989e5468c4fb3 100644 --- a/x-pack/plugin/searchable-snapshots/qa/s3/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/s3/S3SearchableSnapshotsCredentialsReloadIT.java +++ b/x-pack/plugin/searchable-snapshots/qa/s3/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/s3/S3SearchableSnapshotsCredentialsReloadIT.java @@ -44,7 +44,14 @@ public class S3SearchableSnapshotsCredentialsReloadIT extends ESRestTestCase { private static final String BUCKET = "S3SearchableSnapshotsCredentialsReloadIT-bucket"; private static final String BASE_PATH = "S3SearchableSnapshotsCredentialsReloadIT-base-path"; - public static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, "ignored"); + private static volatile String repositoryAccessKey; + + public static final S3HttpFixture s3Fixture = new S3HttpFixture( + true, + BUCKET, + BASE_PATH, + S3HttpFixture.mutableAccessKey(() -> repositoryAccessKey) + ); private static final MutableSettingsProvider keystoreSettings = new MutableSettingsProvider(); @@ -78,7 +85,7 @@ public void testReloadCredentialsFromKeystore() throws IOException { // Set up initial credentials final String accessKey1 = randomIdentifier(); - s3Fixture.setAccessKey(accessKey1); + repositoryAccessKey = accessKey1; keystoreSettings.put("s3.client.default.access_key", accessKey1); keystoreSettings.put("s3.client.default.secret_key", randomIdentifier()); cluster.updateStoredSecureSettings(); @@ -92,7 +99,7 @@ public void testReloadCredentialsFromKeystore() throws IOException { // Rotate credentials in blob store logger.info("--> rotate credentials"); final String accessKey2 = randomValueOtherThan(accessKey1, ESTestCase::randomIdentifier); - s3Fixture.setAccessKey(accessKey2); + repositoryAccessKey = accessKey2; // Ensure searchable snapshot now does not work due to invalid credentials logger.info("--> expect failure"); @@ -118,7 +125,7 @@ public void testReloadCredentialsFromAlternativeClient() throws IOException { final String accessKey2 = randomValueOtherThan(accessKey1, ESTestCase::randomIdentifier); final String alternativeClient = randomValueOtherThan("default", ESTestCase::randomIdentifier); - s3Fixture.setAccessKey(accessKey1); + repositoryAccessKey = accessKey1; keystoreSettings.put("s3.client.default.access_key", accessKey1); keystoreSettings.put("s3.client.default.secret_key", randomIdentifier()); keystoreSettings.put("s3.client." + alternativeClient + ".access_key", accessKey2); @@ -133,7 +140,7 @@ public void testReloadCredentialsFromAlternativeClient() throws IOException { // Rotate credentials in blob store logger.info("--> rotate credentials"); - s3Fixture.setAccessKey(accessKey2); + repositoryAccessKey = accessKey2; // Ensure searchable snapshot now does not work due to invalid credentials logger.info("--> expect failure"); @@ -157,7 +164,7 @@ public void testReloadCredentialsFromMetadata() throws IOException { final String accessKey2 = randomValueOtherThan(accessKey1, ESTestCase::randomIdentifier); testHarness.putRepository(b -> b.put("access_key", accessKey1).put("secret_key", randomIdentifier())); - s3Fixture.setAccessKey(accessKey1); + repositoryAccessKey = accessKey1; testHarness.createFrozenSearchableSnapshotIndex(); @@ -166,7 +173,7 @@ public void testReloadCredentialsFromMetadata() throws IOException { // Rotate credentials in blob store logger.info("--> rotate credentials"); - s3Fixture.setAccessKey(accessKey2); + repositoryAccessKey = accessKey2; // Ensure searchable snapshot now does not work due to invalid credentials logger.info("--> expect failure"); @@ -269,7 +276,7 @@ void ensureSearchFailure() throws IOException { assertThat( expectThrows(ResponseException.class, () -> client().performRequest(searchRequest)).getMessage(), allOf( - containsString("Bad access key"), + containsString("Access denied"), containsString("Status Code: 403"), containsString("Error Code: AccessDenied"), containsString("failed to read data from cache") From a860d3ab33cf12bba782924c3fd87c586fe887ad Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Tue, 26 Nov 2024 10:48:35 +0100 Subject: [PATCH 246/386] [DOCS] Trivial: remove tech preview badge (#117461) --- docs/reference/intro.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/intro.asciidoc b/docs/reference/intro.asciidoc index 2908c55789bab..e0100b1c5640b 100644 --- a/docs/reference/intro.asciidoc +++ b/docs/reference/intro.asciidoc @@ -85,7 +85,7 @@ You can deploy {es} in various ways. **Hosted options** * {cloud}/ec-getting-started-trial.html[*Elastic Cloud Hosted*]: {es} is available as part of the hosted Elastic Stack offering, deployed in the cloud with your provider of choice. Sign up for a https://cloud.elastic.co/registration[14-day free trial]. -* {serverless-docs}/general/sign-up-trial[*Elastic Cloud Serverless* (technical preview)]: Create serverless projects for autoscaled and fully managed {es} deployments. Sign up for a https://cloud.elastic.co/serverless-registration[14-day free trial]. +* {serverless-docs}/general/sign-up-trial[*Elastic Cloud Serverless*]: Create serverless projects for autoscaled and fully managed {es} deployments. Sign up for a https://cloud.elastic.co/serverless-registration[14-day free trial]. **Advanced options** From 5b929d7f415094e1e58609e86ff977b46d71c016 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Tue, 26 Nov 2024 12:01:10 +0100 Subject: [PATCH 247/386] Small wording fix in ESIntegTestCase (#117341) --- .../src/main/java/org/elasticsearch/test/ESIntegTestCase.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index d7c5c598ce978..af92eae8c8a19 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -281,7 +281,7 @@ public abstract class ESIntegTestCase extends ESTestCase { /** * Annotation for third-party integration tests. *

    - * These are tests the require a third-party service in order to run. They + * These are tests, which require a third-party service in order to run. They * may require the user to manually configure an external process (such as rabbitmq), * or may additionally require some external configuration (e.g. AWS credentials) * via the {@code tests.config} system property. From 5e028220c91af4a37d6a0abcc9d5b9359ba0eaf3 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Tue, 26 Nov 2024 12:06:52 +0100 Subject: [PATCH 248/386] [Docs] Update incremental sync note (#117545) --- docs/reference/connector/docs/connectors-content-syncs.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/connector/docs/connectors-content-syncs.asciidoc b/docs/reference/connector/docs/connectors-content-syncs.asciidoc index f1745382677a2..0a2eb54047170 100644 --- a/docs/reference/connector/docs/connectors-content-syncs.asciidoc +++ b/docs/reference/connector/docs/connectors-content-syncs.asciidoc @@ -52,7 +52,7 @@ However, a fast, accessible third-party data source that stores huge amounts of [NOTE] ==== -Incremental syncs for the SharePoint Online connector use specific logic. +Incremental syncs for <> and <> connectors use specific logic. All other connectors use the same shared connector framework logic for incremental syncs. ==== From 5a749a30d6bed5aaff8f057e6c14f53a75713acd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mariusz=20J=C3=B3zala?= <377355+jozala@users.noreply.github.com> Date: Tue, 26 Nov 2024 12:42:41 +0100 Subject: [PATCH 249/386] Changelog for default container image change to UBI (#117482) The image has been changed in #116739 --- docs/changelog/116739.yaml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 docs/changelog/116739.yaml diff --git a/docs/changelog/116739.yaml b/docs/changelog/116739.yaml new file mode 100644 index 0000000000000..ea3b1253a9008 --- /dev/null +++ b/docs/changelog/116739.yaml @@ -0,0 +1,5 @@ +pr: 116739 +summary: Change default Docker image to be based on UBI minimal instead of Ubuntu +area: Infra/Core +type: enhancement +issues: [] From d7797eed31237104a369b54b16d3dcf56fe56fbc Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Tue, 26 Nov 2024 12:50:47 +0100 Subject: [PATCH 250/386] Add a way to log hot threads in plain text (#111053) This adds a way to log current threads in plain text to logs. This way we do not need to decode them and can search by stack trace in logs (for example to know if the issue is recurring). Please note, this produces a multi-line log entry. --- .../action/admin/HotThreadsIT.java | 23 +++++++ .../elasticsearch/monitor/jvm/HotThreads.java | 60 ++++++++++++++----- .../monitor/jvm/HotThreadsTests.java | 2 +- 3 files changed, 68 insertions(+), 17 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java index 8c80cee58f46c..76a6717ab1d09 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java @@ -22,6 +22,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.monitor.jvm.HotThreads; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.MockLog; import org.elasticsearch.test.junit.annotations.TestLogging; import org.hamcrest.Matcher; @@ -31,6 +32,7 @@ import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.test.MockLog.assertThatLogger; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.CoreMatchers.equalTo; @@ -211,4 +213,25 @@ public void testLogLocalHotThreads() { ) ); } + + @TestLogging(reason = "testing logging at various levels", value = "org.elasticsearch.action.admin.HotThreadsIT:TRACE") + public void testLogLocalCurrentThreadsInPlainText() { + final var level = randomFrom(Level.TRACE, Level.DEBUG, Level.INFO, Level.WARN, Level.ERROR); + assertThatLogger( + () -> HotThreads.logLocalCurrentThreads(logger, level, getTestName()), + HotThreadsIT.class, + new MockLog.SeenEventExpectation( + "Should log hot threads header in plain text", + HotThreadsIT.class.getCanonicalName(), + level, + "testLogLocalCurrentThreadsInPlainText: Hot threads at" + ), + new MockLog.SeenEventExpectation( + "Should log hot threads cpu usage in plain text", + HotThreadsIT.class.getCanonicalName(), + level, + "cpu usage by thread" + ) + ); + } } diff --git a/server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java b/server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java index b14ef171ccd1d..8c903fdc634d3 100644 --- a/server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java +++ b/server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java @@ -28,6 +28,7 @@ import java.io.IOException; import java.io.OutputStreamWriter; +import java.io.StringWriter; import java.io.Writer; import java.lang.management.ManagementFactory; import java.lang.management.ThreadInfo; @@ -105,6 +106,33 @@ public static void logLocalHotThreads(Logger logger, Level level, String prefix, } } + /** + * Capture and log the current threads on the local node. Unlike hot threads this does not sample and captures current state only. + * Useful for capturing stack traces for unexpectedly-slow operations in production. The resulting message might be large, so it is + * split per thread and logged as multiple entries. + * + * @param logger The logger to use for the logging + * @param level The log level to use for the logging. + * @param prefix The prefix to emit on each chunk of the logging. + */ + public static void logLocalCurrentThreads(Logger logger, Level level, String prefix) { + if (logger.isEnabled(level) == false) { + return; + } + + try (var writer = new StringWriter()) { + new HotThreads().busiestThreads(500).threadElementsSnapshotCount(1).detect(writer, () -> { + logger.log(level, "{}: {}", prefix, writer.toString()); + writer.getBuffer().setLength(0); + }); + } catch (Exception e) { + logger.error( + () -> org.elasticsearch.common.Strings.format("failed to write local current threads with prefix [%s]", prefix), + e + ); + } + } + public enum ReportType { CPU("cpu"), @@ -192,11 +220,12 @@ public HotThreads sortOrder(SortOrder order) { } public void detect(Writer writer) throws Exception { + detect(writer, () -> {}); + } + + public void detect(Writer writer, Runnable onNextThread) throws Exception { synchronized (mutex) { - innerDetect(ManagementFactory.getThreadMXBean(), SunThreadInfo.INSTANCE, Thread.currentThread().getId(), (interval) -> { - Thread.sleep(interval); - return null; - }, writer); + innerDetect(ManagementFactory.getThreadMXBean(), SunThreadInfo.INSTANCE, Thread.currentThread().getId(), writer, onNextThread); } } @@ -245,13 +274,15 @@ Map getAllValidThreadInfos(ThreadMXBean threadBean, ThreadInfo[][] captureThreadStacks(ThreadMXBean threadBean, long[] threadIds) throws InterruptedException { ThreadInfo[][] result = new ThreadInfo[threadElementsSnapshotCount][]; - for (int j = 0; j < threadElementsSnapshotCount; j++) { - // NOTE, javadoc of getThreadInfo says: If a thread of the given ID is not alive or does not exist, - // null will be set in the corresponding element in the returned array. A thread is alive if it has - // been started and has not yet died. + + // NOTE, javadoc of getThreadInfo says: If a thread of the given ID is not alive or does not exist, + // null will be set in the corresponding element in the returned array. A thread is alive if it has + // been started and has not yet died. + for (int j = 0; j < threadElementsSnapshotCount - 1; j++) { result[j] = threadBean.getThreadInfo(threadIds, Integer.MAX_VALUE); Thread.sleep(threadElementsSnapshotDelay.millis()); } + result[threadElementsSnapshotCount - 1] = threadBean.getThreadInfo(threadIds, Integer.MAX_VALUE); return result; } @@ -267,13 +298,8 @@ private double getTimeSharePercentage(long time) { return (((double) time) / interval.nanos()) * 100; } - void innerDetect( - ThreadMXBean threadBean, - SunThreadInfo sunThreadInfo, - long currentThreadId, - SleepFunction threadSleep, - Writer writer - ) throws Exception { + void innerDetect(ThreadMXBean threadBean, SunThreadInfo sunThreadInfo, long currentThreadId, Writer writer, Runnable onNextThread) + throws Exception { if (threadBean.isThreadCpuTimeSupported() == false) { throw new ElasticsearchException("thread CPU time is not supported on this JDK"); } @@ -297,10 +323,11 @@ void innerDetect( .append(", ignoreIdleThreads=") .append(Boolean.toString(ignoreIdleThreads)) .append(":\n"); + onNextThread.run(); // Capture before and after thread state with timings Map previousThreadInfos = getAllValidThreadInfos(threadBean, sunThreadInfo, currentThreadId); - threadSleep.apply(interval.millis()); + Thread.sleep(interval.millis()); Map latestThreadInfos = getAllValidThreadInfos(threadBean, sunThreadInfo, currentThreadId); latestThreadInfos.forEach((threadId, accumulator) -> accumulator.subtractPrevious(previousThreadInfos.get(threadId))); @@ -430,6 +457,7 @@ void innerDetect( } } } + onNextThread.run(); } } diff --git a/server/src/test/java/org/elasticsearch/monitor/jvm/HotThreadsTests.java b/server/src/test/java/org/elasticsearch/monitor/jvm/HotThreadsTests.java index 93c40185f62ac..37eb69c0ca409 100644 --- a/server/src/test/java/org/elasticsearch/monitor/jvm/HotThreadsTests.java +++ b/server/src/test/java/org/elasticsearch/monitor/jvm/HotThreadsTests.java @@ -947,7 +947,7 @@ private static String innerDetect( long currentThreadId ) throws Exception { try (var writer = new StringWriter()) { - hotThreads.innerDetect(mockedMthreadMXBeanBean, sunThreadInfo, currentThreadId, (interval) -> null, writer); + hotThreads.innerDetect(mockedMthreadMXBeanBean, sunThreadInfo, currentThreadId, writer, () -> {}); return writer.toString(); } } From a245e709ba5a94ad7a476a84d43f0b04bd361fc4 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 26 Nov 2024 23:02:11 +1100 Subject: [PATCH 251/386] Mute org.elasticsearch.xpack.esql.qa.mixed.FieldExtractorIT testConstantKeywordField #117531 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 37f36e9a19340..b3c34505e6561 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -240,6 +240,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/117524 - class: org.elasticsearch.repositories.s3.RepositoryS3EcsClientYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/117525 +- class: org.elasticsearch.xpack.esql.qa.mixed.FieldExtractorIT + method: testConstantKeywordField + issue: https://github.com/elastic/elasticsearch/issues/117531 # Examples: # From 5e16bc3fa615d76a5f188e0b722691da2981e633 Mon Sep 17 00:00:00 2001 From: Alexey Ivanov Date: Tue, 26 Nov 2024 12:49:33 +0000 Subject: [PATCH 252/386] [CI] FileSettingsServiceIT testErrorCanRecoverOnRestart failing (#116895) (#117511) Fixes flaky test FileSettingsServiceIT.testErrorCanRecoverOnRestart Fixes #116895 --- .../reservedstate/service/FileSettingsServiceIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java index 90326abb381d0..85f0e2cf7e3ff 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java @@ -398,7 +398,7 @@ public void testErrorCanRecoverOnRestart() throws Exception { FileSettingsService masterFileSettingsService = internalCluster().getInstance(FileSettingsService.class, masterNode); - assertTrue(masterFileSettingsService.watching()); + assertBusy(() -> assertTrue(masterFileSettingsService.watching())); assertFalse(dataFileSettingsService.watching()); writeJSONFile(masterNode, testErrorJSON, logger, versionCounter.incrementAndGet()); @@ -434,7 +434,7 @@ public void testNewErrorOnRestartReprocessing() throws Exception { FileSettingsService masterFileSettingsService = internalCluster().getInstance(FileSettingsService.class, masterNode); - assertTrue(masterFileSettingsService.watching()); + assertBusy(() -> assertTrue(masterFileSettingsService.watching())); assertFalse(dataFileSettingsService.watching()); writeJSONFile(masterNode, testErrorJSON, logger, versionCounter.incrementAndGet()); From 1495c550ad05af55acec47ca1445b5faeb86d4e8 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 27 Nov 2024 00:54:46 +1100 Subject: [PATCH 253/386] Mute org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT test {p0=synonyms/90_synonyms_reloading_for_synset/Reload analyzers for specific synonym set} #116777 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index b3c34505e6561..49898308e411b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -243,6 +243,9 @@ tests: - class: org.elasticsearch.xpack.esql.qa.mixed.FieldExtractorIT method: testConstantKeywordField issue: https://github.com/elastic/elasticsearch/issues/117531 +- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT + method: test {p0=synonyms/90_synonyms_reloading_for_synset/Reload analyzers for specific synonym set} + issue: https://github.com/elastic/elasticsearch/issues/116777 # Examples: # From 2bc1b4f6062c33a259b4aa0df9a7118bbfc4dc2e Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 26 Nov 2024 13:58:54 +0000 Subject: [PATCH 254/386] Make `PutStoredScriptRequest` immutable (#117556) No need for this request to be mutable, we always know all the values at creation time. Also adjusts the `toString()` impl to use the `source` field, since this is the only spot that we use the `content` so with this change we can follow up with a 9.x-only change to remove it. --- .../script/mustache/SearchTemplateIT.java | 11 +-- .../elasticsearch/script/StoredScriptsIT.java | 26 ++----- .../storedscripts/PutStoredScriptRequest.java | 78 ++++++------------- .../PutStoredScriptRequestTests.java | 12 ++- .../StoredScriptIntegTestUtils.java | 22 ++++-- .../integration/DlsFlsRequestCacheTests.java | 17 +--- 6 files changed, 60 insertions(+), 106 deletions(-) diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/SearchTemplateIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/SearchTemplateIT.java index defd20b64762b..cc0b0122e9cce 100644 --- a/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/SearchTemplateIT.java +++ b/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/SearchTemplateIT.java @@ -13,12 +13,10 @@ import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptAction; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; -import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.TransportDeleteStoredScriptAction; import org.elasticsearch.action.admin.cluster.storedscripts.TransportPutStoredScriptAction; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptType; @@ -39,6 +37,7 @@ import java.util.Map; import java.util.concurrent.ExecutionException; +import static org.elasticsearch.action.admin.cluster.storedscripts.StoredScriptIntegTestUtils.newPutStoredScriptTestRequest; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -467,12 +466,6 @@ public static void assertHitCount(SearchTemplateRequestBuilder requestBuilder, l } private void putJsonStoredScript(String id, String jsonContent) { - assertAcked( - safeExecute( - TransportPutStoredScriptAction.TYPE, - new PutStoredScriptRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).id(id) - .content(new BytesArray(jsonContent), XContentType.JSON) - ) - ); + assertAcked(safeExecute(TransportPutStoredScriptAction.TYPE, newPutStoredScriptTestRequest(id, jsonContent))); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/script/StoredScriptsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/script/StoredScriptsIT.java index e9efab5934e52..76ea5b99a2a6b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/script/StoredScriptsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/script/StoredScriptsIT.java @@ -11,16 +11,13 @@ import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptAction; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; -import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.TransportDeleteStoredScriptAction; import org.elasticsearch.action.admin.cluster.storedscripts.TransportPutStoredScriptAction; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.xcontent.XContentType; import java.util.Arrays; import java.util.Collection; @@ -28,6 +25,7 @@ import java.util.Map; import java.util.function.Function; +import static org.elasticsearch.action.admin.cluster.storedscripts.StoredScriptIntegTestUtils.newPutStoredScriptTestRequest; import static org.elasticsearch.action.admin.cluster.storedscripts.StoredScriptIntegTestUtils.putJsonStoredScript; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -73,14 +71,9 @@ public void testBasics() { safeAwaitAndUnwrapFailure( IllegalArgumentException.class, AcknowledgedResponse.class, - l -> client().execute( - TransportPutStoredScriptAction.TYPE, - new PutStoredScriptRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).id("id#") - .content(new BytesArray(Strings.format(""" - {"script": {"lang": "%s", "source": "1"} } - """, LANG)), XContentType.JSON), - l - ) + l -> client().execute(TransportPutStoredScriptAction.TYPE, newPutStoredScriptTestRequest("id#", Strings.format(""" + {"script": {"lang": "%s", "source": "1"} } + """, LANG)), l) ).getMessage() ); } @@ -91,14 +84,9 @@ public void testMaxScriptSize() { safeAwaitAndUnwrapFailure( IllegalArgumentException.class, AcknowledgedResponse.class, - l -> client().execute( - TransportPutStoredScriptAction.TYPE, - new PutStoredScriptRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).id("foobar") - .content(new BytesArray(Strings.format(""" - {"script": { "lang": "%s", "source":"0123456789abcdef"} }\ - """, LANG)), XContentType.JSON), - l - ) + l -> client().execute(TransportPutStoredScriptAction.TYPE, newPutStoredScriptTestRequest("foobar", Strings.format(""" + {"script": { "lang": "%s", "source":"0123456789abcdef"} }\ + """, LANG)), l) ).getMessage() ); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java index 35e46d3f2a4da..8e453cd5bac3a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java @@ -11,10 +11,12 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.script.StoredScriptSource; import org.elasticsearch.xcontent.ToXContentFragment; @@ -28,11 +30,15 @@ public class PutStoredScriptRequest extends AcknowledgedRequest implements ToXContentFragment { - private String id; - private String context; - private BytesReference content; - private XContentType xContentType; - private StoredScriptSource source; + @Nullable + private final String id; + + @Nullable + private final String context; + + private final BytesReference content; + private final XContentType xContentType; + private final StoredScriptSource source; public PutStoredScriptRequest(StreamInput in) throws IOException { super(in); @@ -43,15 +49,11 @@ public PutStoredScriptRequest(StreamInput in) throws IOException { source = new StoredScriptSource(in); } - public PutStoredScriptRequest(TimeValue masterNodeTimeout, TimeValue ackTimeout) { - super(masterNodeTimeout, ackTimeout); - } - public PutStoredScriptRequest( TimeValue masterNodeTimeout, TimeValue ackTimeout, - String id, - String context, + @Nullable String id, + @Nullable String context, BytesReference content, XContentType xContentType, StoredScriptSource source @@ -59,9 +61,9 @@ public PutStoredScriptRequest( super(masterNodeTimeout, ackTimeout); this.id = id; this.context = context; - this.content = content; + this.content = Objects.requireNonNull(content); this.xContentType = Objects.requireNonNull(xContentType); - this.source = source; + this.source = Objects.requireNonNull(source); } @Override @@ -74,10 +76,6 @@ public ActionRequestValidationException validate() { validationException = addValidationError("id cannot contain '#' for stored script", validationException); } - if (content == null) { - validationException = addValidationError("must specify code for stored script", validationException); - } - return validationException; } @@ -85,20 +83,10 @@ public String id() { return id; } - public PutStoredScriptRequest id(String id) { - this.id = id; - return this; - } - public String context() { return context; } - public PutStoredScriptRequest context(String context) { - this.context = context; - return this; - } - public BytesReference content() { return content; } @@ -111,16 +99,6 @@ public StoredScriptSource source() { return source; } - /** - * Set the script source and the content type of the bytes. - */ - public PutStoredScriptRequest content(BytesReference content, XContentType xContentType) { - this.content = content; - this.xContentType = Objects.requireNonNull(xContentType); - this.source = StoredScriptSource.parse(content, xContentType); - return this; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); @@ -133,28 +111,16 @@ public void writeTo(StreamOutput out) throws IOException { @Override public String toString() { - String source = "_na_"; - - try { - source = XContentHelper.convertToJson(content, false, xContentType); - } catch (Exception e) { - // ignore - } - - return "put stored script {id [" - + id - + "]" - + (context != null ? ", context [" + context + "]" : "") - + ", content [" - + source - + "]}"; + return Strings.format( + "put stored script {id [%s]%s, content [%s]}", + id, + context != null ? ", context [" + context + "]" : "", + source + ); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field("script"); - source.toXContent(builder, params); - - return builder; + return builder.field("script", source, params); } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestTests.java index ffdd588764699..023e7693f8a47 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestTests.java @@ -57,9 +57,15 @@ public void testToXContent() throws IOException { BytesReference expectedRequestBody = BytesReference.bytes(builder); - PutStoredScriptRequest request = new PutStoredScriptRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT); - request.id("test1"); - request.content(expectedRequestBody, xContentType); + PutStoredScriptRequest request = new PutStoredScriptRequest( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + "test1", + null, + expectedRequestBody, + xContentType, + StoredScriptSource.parse(expectedRequestBody, xContentType) + ); XContentBuilder requestBuilder = XContentBuilder.builder(xContentType.xContent()); requestBuilder.startObject(); diff --git a/test/framework/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/StoredScriptIntegTestUtils.java b/test/framework/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/StoredScriptIntegTestUtils.java index 5f979d75ec382..0a090af431dae 100644 --- a/test/framework/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/StoredScriptIntegTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/StoredScriptIntegTestUtils.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.script.StoredScriptSource; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xcontent.XContentType; @@ -25,11 +26,22 @@ public static void putJsonStoredScript(String id, String jsonContent) { } public static void putJsonStoredScript(String id, BytesReference jsonContent) { - assertAcked( - ESIntegTestCase.safeExecute( - TransportPutStoredScriptAction.TYPE, - new PutStoredScriptRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).id(id).content(jsonContent, XContentType.JSON) - ) + assertAcked(ESIntegTestCase.safeExecute(TransportPutStoredScriptAction.TYPE, newPutStoredScriptTestRequest(id, jsonContent))); + } + + public static PutStoredScriptRequest newPutStoredScriptTestRequest(String id, String jsonContent) { + return newPutStoredScriptTestRequest(id, new BytesArray(jsonContent)); + } + + public static PutStoredScriptRequest newPutStoredScriptTestRequest(String id, BytesReference jsonContent) { + return new PutStoredScriptRequest( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + id, + null, + jsonContent, + XContentType.JSON, + StoredScriptSource.parse(jsonContent, XContentType.JSON) ); } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java index a5f827c2a4b53..82a10f21debfb 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java @@ -8,13 +8,11 @@ package org.elasticsearch.integration; import org.elasticsearch.ElasticsearchSecurityException; -import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.TransportPutStoredScriptAction; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; @@ -24,7 +22,6 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.SecuritySingleNodeTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.action.apikey.CreateApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.CreateApiKeyRequest; @@ -43,6 +40,7 @@ import java.util.concurrent.ExecutionException; import java.util.stream.Collectors; +import static org.elasticsearch.action.admin.cluster.storedscripts.StoredScriptIntegTestUtils.newPutStoredScriptTestRequest; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.NONE; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.WAIT_UNTIL; @@ -350,17 +348,8 @@ public void testRequestCacheWithTemplateRoleQuery() { private void prepareIndices() { final Client client = client(); - assertAcked( - safeExecute( - TransportPutStoredScriptAction.TYPE, - new PutStoredScriptRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).id("my-script") - .content( - new BytesArray(""" - {"script":{"source":"{\\"match\\":{\\"username\\":\\"{{_user.username}}\\"}}","lang":"mustache"}}"""), - XContentType.JSON - ) - ) - ); + assertAcked(safeExecute(TransportPutStoredScriptAction.TYPE, newPutStoredScriptTestRequest("my-script", """ + {"script":{"source":"{\\"match\\":{\\"username\\":\\"{{_user.username}}\\"}}","lang":"mustache"}}"""))); assertAcked(indicesAdmin().prepareCreate(DLS_INDEX).addAlias(new Alias("dls-alias")).get()); client.prepareIndex(DLS_INDEX).setId("101").setSource("number", 101, "letter", "A").get(); From e9f899ee6913fe00dc8ef7a4254c76e8dca31b47 Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Tue, 26 Nov 2024 16:44:15 +0100 Subject: [PATCH 255/386] Add current node weight as an APM metric (#117557) As discussed previously, the current node weight (calculated the same way that we calculate for the desired balance computations) might also be useful to have as a metric. The difference is that the current node weight is calculated based on the current cluster state rather than the internal state of the BalancedShardsAllocator (i.e. Balancer and ModelNode). To share all the weight calculation logic I had to move out the weight function and a few related utilities. NodeAllocationStatsProvider is still shared by both the AllocationStatsService and the desired balance metric collection. Relates ES-10080 --- .../DesiredBalanceReconcilerMetricsIT.java | 10 ++ .../elasticsearch/cluster/ClusterModule.java | 2 +- .../allocation/AllocationStatsService.java | 23 ++- .../NodeAllocationStatsProvider.java | 61 ++++++- .../allocator/BalancedShardsAllocator.java | 136 ++------------- .../allocation/allocator/DesiredBalance.java | 2 +- .../allocator/DesiredBalanceMetrics.java | 26 ++- .../allocator/DesiredBalanceReconciler.java | 11 +- .../allocation/allocator/WeightFunction.java | 157 ++++++++++++++++++ .../AllocationStatsServiceTests.java | 6 +- .../BalancedShardsAllocatorTests.java | 2 +- .../cluster/ESAllocationTestCase.java | 10 +- 12 files changed, 297 insertions(+), 149 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/WeightFunction.java diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerMetricsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerMetricsIT.java index b3ec4a5331180..355427c4e059b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerMetricsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerMetricsIT.java @@ -117,6 +117,15 @@ public void testDesiredBalanceMetrics() { assertThat((String) nodeStat.attributes().get("node_id"), is(in(nodeIds))); assertThat((String) nodeStat.attributes().get("node_name"), is(in(nodeNames))); } + final var currentNodeWeightsMetrics = telemetryPlugin.getDoubleGaugeMeasurement( + DesiredBalanceMetrics.CURRENT_NODE_WEIGHT_METRIC_NAME + ); + assertThat(currentNodeWeightsMetrics.size(), equalTo(2)); + for (var nodeStat : currentNodeWeightsMetrics) { + assertTrue(nodeStat.isDouble()); + assertThat((String) nodeStat.attributes().get("node_id"), is(in(nodeIds))); + assertThat((String) nodeStat.attributes().get("node_name"), is(in(nodeNames))); + } final var currentNodeShardCountMetrics = telemetryPlugin.getLongGaugeMeasurement( DesiredBalanceMetrics.CURRENT_NODE_SHARD_COUNT_METRIC_NAME ); @@ -196,6 +205,7 @@ private static void assertMetricsAreBeingPublished(String nodeName, boolean shou testTelemetryPlugin.getLongGaugeMeasurement(DesiredBalanceMetrics.DESIRED_BALANCE_NODE_SHARD_COUNT_METRIC_NAME), matcher ); + assertThat(testTelemetryPlugin.getDoubleGaugeMeasurement(DesiredBalanceMetrics.CURRENT_NODE_WEIGHT_METRIC_NAME), matcher); assertThat(testTelemetryPlugin.getDoubleGaugeMeasurement(DesiredBalanceMetrics.CURRENT_NODE_WRITE_LOAD_METRIC_NAME), matcher); assertThat(testTelemetryPlugin.getLongGaugeMeasurement(DesiredBalanceMetrics.CURRENT_NODE_DISK_USAGE_METRIC_NAME), matcher); assertThat(testTelemetryPlugin.getLongGaugeMeasurement(DesiredBalanceMetrics.CURRENT_NODE_SHARD_COUNT_METRIC_NAME), matcher); diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/server/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 046f4b6b0b251..c2da33f8f4135 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -139,7 +139,7 @@ public ClusterModule( this.clusterPlugins = clusterPlugins; this.deciderList = createAllocationDeciders(settings, clusterService.getClusterSettings(), clusterPlugins); this.allocationDeciders = new AllocationDeciders(deciderList); - var nodeAllocationStatsProvider = new NodeAllocationStatsProvider(writeLoadForecaster); + var nodeAllocationStatsProvider = new NodeAllocationStatsProvider(writeLoadForecaster, clusterService.getClusterSettings()); this.shardsAllocator = createShardsAllocator( settings, clusterService.getClusterSettings(), diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationStatsService.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationStatsService.java index 0c82faaaeaa45..b98e9050d2b4a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationStatsService.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationStatsService.java @@ -17,6 +17,7 @@ import java.util.Map; import java.util.function.Supplier; +import java.util.stream.Collectors; public class AllocationStatsService { private final ClusterService clusterService; @@ -39,6 +40,26 @@ public AllocationStatsService( } public Map stats() { - return nodeAllocationStatsProvider.stats(clusterService.state(), clusterInfoService.getClusterInfo(), desiredBalanceSupplier.get()); + var state = clusterService.state(); + var stats = nodeAllocationStatsProvider.stats( + state.metadata(), + state.getRoutingNodes(), + clusterInfoService.getClusterInfo(), + desiredBalanceSupplier.get() + ); + return stats.entrySet() + .stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + e -> new NodeAllocationStats( + e.getValue().shards(), + e.getValue().undesiredShards(), + e.getValue().forecastedIngestLoad(), + e.getValue().forecastedDiskUsage(), + e.getValue().currentDiskUsage() + ) + ) + ); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationStatsProvider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationStatsProvider.java index 157b409be14d3..8368f5916ef91 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationStatsProvider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationStatsProvider.java @@ -10,11 +10,15 @@ package org.elasticsearch.cluster.routing.allocation; import org.elasticsearch.cluster.ClusterInfo; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; import org.elasticsearch.cluster.routing.allocation.allocator.DesiredBalance; +import org.elasticsearch.cluster.routing.allocation.allocator.WeightFunction; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.Nullable; @@ -23,17 +27,47 @@ public class NodeAllocationStatsProvider { private final WriteLoadForecaster writeLoadForecaster; - public NodeAllocationStatsProvider(WriteLoadForecaster writeLoadForecaster) { + private volatile float indexBalanceFactor; + private volatile float shardBalanceFactor; + private volatile float writeLoadBalanceFactor; + private volatile float diskUsageBalanceFactor; + + public record NodeAllocationAndClusterBalanceStats( + int shards, + int undesiredShards, + double forecastedIngestLoad, + long forecastedDiskUsage, + long currentDiskUsage, + float currentNodeWeight + ) {} + + public NodeAllocationStatsProvider(WriteLoadForecaster writeLoadForecaster, ClusterSettings clusterSettings) { this.writeLoadForecaster = writeLoadForecaster; + clusterSettings.initializeAndWatch(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING, value -> this.shardBalanceFactor = value); + clusterSettings.initializeAndWatch(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING, value -> this.indexBalanceFactor = value); + clusterSettings.initializeAndWatch( + BalancedShardsAllocator.WRITE_LOAD_BALANCE_FACTOR_SETTING, + value -> this.writeLoadBalanceFactor = value + ); + clusterSettings.initializeAndWatch( + BalancedShardsAllocator.DISK_USAGE_BALANCE_FACTOR_SETTING, + value -> this.diskUsageBalanceFactor = value + ); } - public Map stats( - ClusterState clusterState, + public Map stats( + Metadata metadata, + RoutingNodes routingNodes, ClusterInfo clusterInfo, @Nullable DesiredBalance desiredBalance ) { - var stats = Maps.newMapWithExpectedSize(clusterState.getRoutingNodes().size()); - for (RoutingNode node : clusterState.getRoutingNodes()) { + var weightFunction = new WeightFunction(shardBalanceFactor, indexBalanceFactor, writeLoadBalanceFactor, diskUsageBalanceFactor); + var avgShardsPerNode = WeightFunction.avgShardPerNode(metadata, routingNodes); + var avgWriteLoadPerNode = WeightFunction.avgWriteLoadPerNode(writeLoadForecaster, metadata, routingNodes); + var avgDiskUsageInBytesPerNode = WeightFunction.avgDiskUsageInBytesPerNode(clusterInfo, metadata, routingNodes); + + var stats = Maps.newMapWithExpectedSize(routingNodes.size()); + for (RoutingNode node : routingNodes) { int shards = 0; int undesiredShards = 0; double forecastedWriteLoad = 0.0; @@ -44,7 +78,7 @@ public Map stats( continue; } shards++; - IndexMetadata indexMetadata = clusterState.metadata().getIndexSafe(shardRouting.index()); + IndexMetadata indexMetadata = metadata.getIndexSafe(shardRouting.index()); if (isDesiredAllocation(desiredBalance, shardRouting) == false) { undesiredShards++; } @@ -54,14 +88,23 @@ public Map stats( currentDiskUsage += shardSize; } + float currentNodeWeight = weightFunction.nodeWeight( + shards, + avgShardsPerNode, + forecastedWriteLoad, + avgWriteLoadPerNode, + currentDiskUsage, + avgDiskUsageInBytesPerNode + ); stats.put( node.nodeId(), - new NodeAllocationStats( + new NodeAllocationAndClusterBalanceStats( shards, desiredBalance != null ? undesiredShards : -1, forecastedWriteLoad, forecastedDiskUsage, - currentDiskUsage + currentDiskUsage, + currentNodeWeight ) ); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 5b8fb0c7e9203..8dd1f14564ce9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -168,14 +168,17 @@ private void collectAndRecordNodeWeightStats(Balancer balancer, WeightFunction w Map nodeLevelWeights = new HashMap<>(); for (var entry : balancer.nodes.entrySet()) { var node = entry.getValue(); + var nodeWeight = weightFunction.nodeWeight( + node.numShards(), + balancer.avgShardsPerNode(), + node.writeLoad(), + balancer.avgWriteLoadPerNode(), + node.diskUsageInBytes(), + balancer.avgDiskUsageInBytesPerNode() + ); nodeLevelWeights.put( node.routingNode.node(), - new DesiredBalanceMetrics.NodeWeightStats( - node.numShards(), - node.diskUsageInBytes(), - node.writeLoad(), - weightFunction.nodeWeight(balancer, node) - ) + new DesiredBalanceMetrics.NodeWeightStats(node.numShards(), node.diskUsageInBytes(), node.writeLoad(), nodeWeight) ); } allocation.routingNodes().setBalanceWeightStatsPerNode(nodeLevelWeights); @@ -252,65 +255,6 @@ public float getShardBalance() { return shardBalanceFactor; } - /** - * This class is the primary weight function used to create balanced over nodes and shards in the cluster. - * Currently this function has 3 properties: - *

      - *
    • index balance - balance property over shards per index
    • - *
    • shard balance - balance property over shards per cluster
    • - *
    - *

    - * Each of these properties are expressed as factor such that the properties factor defines the relative - * importance of the property for the weight function. For example if the weight function should calculate - * the weights only based on a global (shard) balance the index balance can be set to {@code 0.0} and will - * in turn have no effect on the distribution. - *

    - * The weight per index is calculated based on the following formula: - *
      - *
    • - * weightindex(node, index) = indexBalance * (node.numShards(index) - avgShardsPerNode(index)) - *
    • - *
    • - * weightnode(node, index) = shardBalance * (node.numShards() - avgShardsPerNode) - *
    • - *
    - * weight(node, index) = weightindex(node, index) + weightnode(node, index) - */ - private static class WeightFunction { - - private final float theta0; - private final float theta1; - private final float theta2; - private final float theta3; - - WeightFunction(float shardBalance, float indexBalance, float writeLoadBalance, float diskUsageBalance) { - float sum = shardBalance + indexBalance + writeLoadBalance + diskUsageBalance; - if (sum <= 0.0f) { - throw new IllegalArgumentException("Balance factors must sum to a value > 0 but was: " + sum); - } - theta0 = shardBalance / sum; - theta1 = indexBalance / sum; - theta2 = writeLoadBalance / sum; - theta3 = diskUsageBalance / sum; - } - - float weight(Balancer balancer, ModelNode node, String index) { - final float weightIndex = node.numShards(index) - balancer.avgShardsPerNode(index); - return nodeWeight(balancer, node) + theta1 * weightIndex; - } - - float nodeWeight(Balancer balancer, ModelNode node) { - final float weightShard = node.numShards() - balancer.avgShardsPerNode(); - final float ingestLoad = (float) (node.writeLoad() - balancer.avgWriteLoadPerNode()); - final float diskUsage = (float) (node.diskUsageInBytes() - balancer.avgDiskUsageInBytesPerNode()); - return theta0 * weightShard + theta2 * ingestLoad + theta3 * diskUsage; - } - - float minWeightDelta(Balancer balancer, String index) { - return theta0 * 1 + theta1 * 1 + theta2 * balancer.getShardWriteLoad(index) + theta3 * balancer.maxShardSizeBytes(index); - } - } - /** * A {@link Balancer} */ @@ -335,63 +279,13 @@ private Balancer(WriteLoadForecaster writeLoadForecaster, RoutingAllocation allo this.metadata = allocation.metadata(); this.weight = weight; this.threshold = threshold; - avgShardsPerNode = ((float) metadata.getTotalNumberOfShards()) / routingNodes.size(); - avgWriteLoadPerNode = getTotalWriteLoad(writeLoadForecaster, metadata) / routingNodes.size(); - avgDiskUsageInBytesPerNode = ((double) getTotalDiskUsageInBytes(allocation.clusterInfo(), metadata) / routingNodes.size()); + avgShardsPerNode = WeightFunction.avgShardPerNode(metadata, routingNodes); + avgWriteLoadPerNode = WeightFunction.avgWriteLoadPerNode(writeLoadForecaster, metadata, routingNodes); + avgDiskUsageInBytesPerNode = WeightFunction.avgDiskUsageInBytesPerNode(allocation.clusterInfo(), metadata, routingNodes); nodes = Collections.unmodifiableMap(buildModelFromAssigned()); sorter = newNodeSorter(); } - private static double getTotalWriteLoad(WriteLoadForecaster writeLoadForecaster, Metadata metadata) { - double writeLoad = 0.0; - for (IndexMetadata indexMetadata : metadata.indices().values()) { - writeLoad += getIndexWriteLoad(writeLoadForecaster, indexMetadata); - } - return writeLoad; - } - - private static double getIndexWriteLoad(WriteLoadForecaster writeLoadForecaster, IndexMetadata indexMetadata) { - var shardWriteLoad = writeLoadForecaster.getForecastedWriteLoad(indexMetadata).orElse(0.0); - return shardWriteLoad * numberOfCopies(indexMetadata); - } - - private static long getTotalDiskUsageInBytes(ClusterInfo clusterInfo, Metadata metadata) { - long totalDiskUsageInBytes = 0; - for (IndexMetadata indexMetadata : metadata.indices().values()) { - totalDiskUsageInBytes += getIndexDiskUsageInBytes(clusterInfo, indexMetadata); - } - return totalDiskUsageInBytes; - } - - // Visible for testing - static long getIndexDiskUsageInBytes(ClusterInfo clusterInfo, IndexMetadata indexMetadata) { - if (indexMetadata.ignoreDiskWatermarks()) { - // disk watermarks are ignored for partial searchable snapshots - // and is equivalent to indexMetadata.isPartialSearchableSnapshot() - return 0; - } - final long forecastedShardSize = indexMetadata.getForecastedShardSizeInBytes().orElse(-1L); - long totalSizeInBytes = 0; - int shardCount = 0; - for (int shard = 0; shard < indexMetadata.getNumberOfShards(); shard++) { - final ShardId shardId = new ShardId(indexMetadata.getIndex(), shard); - final long primaryShardSize = Math.max(forecastedShardSize, clusterInfo.getShardSize(shardId, true, -1L)); - if (primaryShardSize != -1L) { - totalSizeInBytes += primaryShardSize; - shardCount++; - } - final long replicaShardSize = Math.max(forecastedShardSize, clusterInfo.getShardSize(shardId, false, -1L)); - if (replicaShardSize != -1L) { - totalSizeInBytes += replicaShardSize * indexMetadata.getNumberOfReplicas(); - shardCount += indexMetadata.getNumberOfReplicas(); - } - } - if (shardCount == numberOfCopies(indexMetadata)) { - return totalSizeInBytes; - } - return shardCount == 0 ? 0 : (totalSizeInBytes / shardCount) * numberOfCopies(indexMetadata); - } - private static long getShardDiskUsageInBytes(ShardRouting shardRouting, IndexMetadata indexMetadata, ClusterInfo clusterInfo) { if (indexMetadata.ignoreDiskWatermarks()) { // disk watermarks are ignored for partial searchable snapshots @@ -401,10 +295,6 @@ private static long getShardDiskUsageInBytes(ShardRouting shardRouting, IndexMet return Math.max(indexMetadata.getForecastedShardSizeInBytes().orElse(0L), clusterInfo.getShardSize(shardRouting, 0L)); } - private static int numberOfCopies(IndexMetadata indexMetadata) { - return indexMetadata.getNumberOfShards() * (1 + indexMetadata.getNumberOfReplicas()); - } - private float getShardWriteLoad(String index) { return (float) writeLoadForecaster.getForecastedWriteLoad(metadata.index(index)).orElse(0.0); } @@ -1433,7 +1323,7 @@ public float weight(ModelNode node) { } public float minWeightDelta() { - return function.minWeightDelta(balancer, index); + return function.minWeightDelta(balancer.getShardWriteLoad(index), balancer.maxShardSizeBytes(index)); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java index 9de95804b49b2..6ad44fdf3a9c0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java @@ -21,7 +21,7 @@ * * @param assignments a set of the (persistent) node IDs to which each {@link ShardId} should be allocated * @param weightsPerNode The node weights calculated based on - * {@link org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator.WeightFunction#nodeWeight} + * {@link org.elasticsearch.cluster.routing.allocation.allocator.WeightFunction#nodeWeight} */ public record DesiredBalance( long lastConvergedIndex, diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceMetrics.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceMetrics.java index cf8840dc95724..9f6487bdc8abd 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceMetrics.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceMetrics.java @@ -10,7 +10,7 @@ package org.elasticsearch.cluster.routing.allocation.allocator; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.allocation.NodeAllocationStats; +import org.elasticsearch.cluster.routing.allocation.NodeAllocationStatsProvider.NodeAllocationAndClusterBalanceStats; import org.elasticsearch.telemetry.metric.DoubleWithAttributes; import org.elasticsearch.telemetry.metric.LongWithAttributes; import org.elasticsearch.telemetry.metric.MeterRegistry; @@ -41,6 +41,7 @@ public record NodeWeightStats(long shardCount, double diskUsageInBytes, double w public static final String DESIRED_BALANCE_NODE_DISK_USAGE_METRIC_NAME = "es.allocator.desired_balance.allocations.node_disk_usage_bytes.current"; + public static final String CURRENT_NODE_WEIGHT_METRIC_NAME = "es.allocator.allocations.node.weight.current"; public static final String CURRENT_NODE_SHARD_COUNT_METRIC_NAME = "es.allocator.allocations.node.shard_count.current"; public static final String CURRENT_NODE_WRITE_LOAD_METRIC_NAME = "es.allocator.allocations.node.write_load.current"; public static final String CURRENT_NODE_DISK_USAGE_METRIC_NAME = "es.allocator.allocations.node.disk_usage_bytes.current"; @@ -68,12 +69,13 @@ public record NodeWeightStats(long shardCount, double diskUsageInBytes, double w private volatile long undesiredAllocations; private final AtomicReference> weightStatsPerNodeRef = new AtomicReference<>(Map.of()); - private final AtomicReference> allocationStatsPerNodeRef = new AtomicReference<>(Map.of()); + private final AtomicReference> allocationStatsPerNodeRef = + new AtomicReference<>(Map.of()); public void updateMetrics( AllocationStats allocationStats, Map weightStatsPerNode, - Map nodeAllocationStats + Map nodeAllocationStats ) { assert allocationStats != null : "allocation stats cannot be null"; assert weightStatsPerNode != null : "node balance weight stats cannot be null"; @@ -124,6 +126,12 @@ public DesiredBalanceMetrics(MeterRegistry meterRegistry) { "bytes", this::getDesiredBalanceNodeDiskUsageMetrics ); + meterRegistry.registerDoublesGauge( + CURRENT_NODE_WEIGHT_METRIC_NAME, + "The weight of nodes based on the current allocation state", + "unit", + this::getCurrentNodeWeightMetrics + ); meterRegistry.registerLongsGauge( DESIRED_BALANCE_NODE_SHARD_COUNT_METRIC_NAME, "Shard count of nodes in the computed desired balance", @@ -291,6 +299,18 @@ private List getCurrentNodeUndesiredShardCountMetrics() { return values; } + private List getCurrentNodeWeightMetrics() { + if (nodeIsMaster == false) { + return List.of(); + } + var stats = allocationStatsPerNodeRef.get(); + List doubles = new ArrayList<>(stats.size()); + for (var node : stats.keySet()) { + doubles.add(new DoubleWithAttributes(stats.get(node).currentNodeWeight(), getNodeAttributes(node))); + } + return doubles; + } + private Map getNodeAttributes(DiscoveryNode node) { return Map.of("node_id", node.getId(), "node_name", node.getName()); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java index 5ad29debc8f20..2ee905634f760 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java @@ -20,8 +20,8 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.UnassignedInfo.AllocationStatus; -import org.elasticsearch.cluster.routing.allocation.NodeAllocationStats; import org.elasticsearch.cluster.routing.allocation.NodeAllocationStatsProvider; +import org.elasticsearch.cluster.routing.allocation.NodeAllocationStatsProvider.NodeAllocationAndClusterBalanceStats; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.allocator.DesiredBalanceMetrics.AllocationStats; import org.elasticsearch.cluster.routing.allocation.decider.Decision; @@ -159,8 +159,13 @@ void run() { } private void updateDesireBalanceMetrics(AllocationStats allocationStats) { - var stats = nodeAllocationStatsProvider.stats(allocation.getClusterState(), allocation.clusterInfo(), desiredBalance); - Map nodeAllocationStats = new HashMap<>(stats.size()); + var stats = nodeAllocationStatsProvider.stats( + allocation.metadata(), + allocation.routingNodes(), + allocation.clusterInfo(), + desiredBalance + ); + Map nodeAllocationStats = new HashMap<>(stats.size()); for (var entry : stats.entrySet()) { var node = allocation.nodes().get(entry.getKey()); if (node != null) { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/WeightFunction.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/WeightFunction.java new file mode 100644 index 0000000000000..7203a92b147f6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/WeightFunction.java @@ -0,0 +1,157 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.cluster.routing.allocation.allocator; + +import org.elasticsearch.cluster.ClusterInfo; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.allocation.WriteLoadForecaster; +import org.elasticsearch.index.shard.ShardId; + +/** + * This class is the primary weight function used to create balanced over nodes and shards in the cluster. + * Currently this function has 3 properties: + *
      + *
    • index balance - balance property over shards per index
    • + *
    • shard balance - balance property over shards per cluster
    • + *
    + *

    + * Each of these properties are expressed as factor such that the properties factor defines the relative + * importance of the property for the weight function. For example if the weight function should calculate + * the weights only based on a global (shard) balance the index balance can be set to {@code 0.0} and will + * in turn have no effect on the distribution. + *

    + * The weight per index is calculated based on the following formula: + *
      + *
    • + * weightindex(node, index) = indexBalance * (node.numShards(index) - avgShardsPerNode(index)) + *
    • + *
    • + * weightnode(node, index) = shardBalance * (node.numShards() - avgShardsPerNode) + *
    • + *
    + * weight(node, index) = weightindex(node, index) + weightnode(node, index) + */ +public class WeightFunction { + + private final float theta0; + private final float theta1; + private final float theta2; + private final float theta3; + + public WeightFunction(float shardBalance, float indexBalance, float writeLoadBalance, float diskUsageBalance) { + float sum = shardBalance + indexBalance + writeLoadBalance + diskUsageBalance; + if (sum <= 0.0f) { + throw new IllegalArgumentException("Balance factors must sum to a value > 0 but was: " + sum); + } + theta0 = shardBalance / sum; + theta1 = indexBalance / sum; + theta2 = writeLoadBalance / sum; + theta3 = diskUsageBalance / sum; + } + + float weight(BalancedShardsAllocator.Balancer balancer, BalancedShardsAllocator.ModelNode node, String index) { + final float weightIndex = node.numShards(index) - balancer.avgShardsPerNode(index); + final float nodeWeight = nodeWeight( + node.numShards(), + balancer.avgShardsPerNode(), + node.writeLoad(), + balancer.avgWriteLoadPerNode(), + node.diskUsageInBytes(), + balancer.avgDiskUsageInBytesPerNode() + ); + return nodeWeight + theta1 * weightIndex; + } + + public float nodeWeight( + int nodeNumShards, + float avgShardsPerNode, + double nodeWriteLoad, + double avgWriteLoadPerNode, + double diskUsageInBytes, + double avgDiskUsageInBytesPerNode + ) { + final float weightShard = nodeNumShards - avgShardsPerNode; + final float ingestLoad = (float) (nodeWriteLoad - avgWriteLoadPerNode); + final float diskUsage = (float) (diskUsageInBytes - avgDiskUsageInBytesPerNode); + return theta0 * weightShard + theta2 * ingestLoad + theta3 * diskUsage; + } + + float minWeightDelta(float shardWriteLoad, float shardSizeBytes) { + return theta0 * 1 + theta1 * 1 + theta2 * shardWriteLoad + theta3 * shardSizeBytes; + } + + public static float avgShardPerNode(Metadata metadata, RoutingNodes routingNodes) { + return ((float) metadata.getTotalNumberOfShards()) / routingNodes.size(); + } + + public static double avgWriteLoadPerNode(WriteLoadForecaster writeLoadForecaster, Metadata metadata, RoutingNodes routingNodes) { + return getTotalWriteLoad(writeLoadForecaster, metadata) / routingNodes.size(); + } + + public static double avgDiskUsageInBytesPerNode(ClusterInfo clusterInfo, Metadata metadata, RoutingNodes routingNodes) { + return ((double) getTotalDiskUsageInBytes(clusterInfo, metadata) / routingNodes.size()); + } + + private static double getTotalWriteLoad(WriteLoadForecaster writeLoadForecaster, Metadata metadata) { + double writeLoad = 0.0; + for (IndexMetadata indexMetadata : metadata.indices().values()) { + writeLoad += getIndexWriteLoad(writeLoadForecaster, indexMetadata); + } + return writeLoad; + } + + private static double getIndexWriteLoad(WriteLoadForecaster writeLoadForecaster, IndexMetadata indexMetadata) { + var shardWriteLoad = writeLoadForecaster.getForecastedWriteLoad(indexMetadata).orElse(0.0); + return shardWriteLoad * numberOfCopies(indexMetadata); + } + + private static int numberOfCopies(IndexMetadata indexMetadata) { + return indexMetadata.getNumberOfShards() * (1 + indexMetadata.getNumberOfReplicas()); + } + + private static long getTotalDiskUsageInBytes(ClusterInfo clusterInfo, Metadata metadata) { + long totalDiskUsageInBytes = 0; + for (IndexMetadata indexMetadata : metadata.indices().values()) { + totalDiskUsageInBytes += getIndexDiskUsageInBytes(clusterInfo, indexMetadata); + } + return totalDiskUsageInBytes; + } + + // Visible for testing + static long getIndexDiskUsageInBytes(ClusterInfo clusterInfo, IndexMetadata indexMetadata) { + if (indexMetadata.ignoreDiskWatermarks()) { + // disk watermarks are ignored for partial searchable snapshots + // and is equivalent to indexMetadata.isPartialSearchableSnapshot() + return 0; + } + final long forecastedShardSize = indexMetadata.getForecastedShardSizeInBytes().orElse(-1L); + long totalSizeInBytes = 0; + int shardCount = 0; + for (int shard = 0; shard < indexMetadata.getNumberOfShards(); shard++) { + final ShardId shardId = new ShardId(indexMetadata.getIndex(), shard); + final long primaryShardSize = Math.max(forecastedShardSize, clusterInfo.getShardSize(shardId, true, -1L)); + if (primaryShardSize != -1L) { + totalSizeInBytes += primaryShardSize; + shardCount++; + } + final long replicaShardSize = Math.max(forecastedShardSize, clusterInfo.getShardSize(shardId, false, -1L)); + if (replicaShardSize != -1L) { + totalSizeInBytes += replicaShardSize * indexMetadata.getNumberOfReplicas(); + shardCount += indexMetadata.getNumberOfReplicas(); + } + } + if (shardCount == numberOfCopies(indexMetadata)) { + return totalSizeInBytes; + } + return shardCount == 0 ? 0 : (totalSizeInBytes / shardCount) * numberOfCopies(indexMetadata); + } +} diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationStatsServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationStatsServiceTests.java index 0efa576a0cddc..35f1780464659 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationStatsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationStatsServiceTests.java @@ -84,7 +84,7 @@ public void testShardStats() { clusterService, () -> clusterInfo, createShardAllocator(), - new NodeAllocationStatsProvider(TEST_WRITE_LOAD_FORECASTER) + new NodeAllocationStatsProvider(TEST_WRITE_LOAD_FORECASTER, ClusterSettings.createBuiltInClusterSettings()) ); assertThat( service.stats(), @@ -125,7 +125,7 @@ public void testRelocatingShardIsOnlyCountedOnceOnTargetNode() { clusterService, EmptyClusterInfoService.INSTANCE, createShardAllocator(), - new NodeAllocationStatsProvider(TEST_WRITE_LOAD_FORECASTER) + new NodeAllocationStatsProvider(TEST_WRITE_LOAD_FORECASTER, ClusterSettings.createBuiltInClusterSettings()) ); assertThat( service.stats(), @@ -182,7 +182,7 @@ public DesiredBalance getDesiredBalance() { ); } }, - new NodeAllocationStatsProvider(TEST_WRITE_LOAD_FORECASTER) + new NodeAllocationStatsProvider(TEST_WRITE_LOAD_FORECASTER, ClusterSettings.createBuiltInClusterSettings()) ); assertThat( service.stats(), diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocatorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocatorTests.java index 98c3451329f52..412329e51a485 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocatorTests.java @@ -59,8 +59,8 @@ import static java.util.stream.Collectors.toSet; import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; -import static org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator.Balancer.getIndexDiskUsageInBytes; import static org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator.DISK_USAGE_BALANCE_FACTOR_SETTING; +import static org.elasticsearch.cluster.routing.allocation.allocator.WeightFunction.getIndexDiskUsageInBytes; import static org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider.SETTING_IGNORE_DISK_WATERMARKS; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java index a041efc9ad3f1..75cd6da44724d 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java @@ -19,12 +19,12 @@ import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingNodesHelper; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.FailedShard; -import org.elasticsearch.cluster.routing.allocation.NodeAllocationStats; import org.elasticsearch.cluster.routing.allocation.NodeAllocationStatsProvider; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.WriteLoadForecaster; @@ -438,11 +438,13 @@ public void allocateUnassigned( } protected static final NodeAllocationStatsProvider EMPTY_NODE_ALLOCATION_STATS = new NodeAllocationStatsProvider( - WriteLoadForecaster.DEFAULT + WriteLoadForecaster.DEFAULT, + createBuiltInClusterSettings() ) { @Override - public Map stats( - ClusterState clusterState, + public Map stats( + Metadata metadata, + RoutingNodes routingNodes, ClusterInfo clusterInfo, @Nullable DesiredBalance desiredBalance ) { From bfe1aad78044d7adc864ad647e88462f8cdce150 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 26 Nov 2024 16:47:25 +0100 Subject: [PATCH 256/386] Cleanup BucketsAggregator#rewriteBuckets (#114574) The array is initialized with the flag clearOnResize set to true so we don't need to set the values to 0 again. --- .../search/aggregations/bucket/BucketsAggregator.java | 1 - 1 file changed, 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java index ea667b821a7dd..665dd49e3381d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java @@ -105,7 +105,6 @@ public final void rewriteBuckets(long newNumBuckets, LongUnaryOperator mergeMap) try { docCounts = bigArrays().newLongArray(newNumBuckets, true); success = true; - docCounts.fill(0, newNumBuckets, 0); for (long i = 0; i < oldDocCounts.size(); i++) { long docCount = oldDocCounts.get(i); From 505c54eb94c71b694d44b8cf424be7ab5894e2e5 Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Tue, 26 Nov 2024 16:59:54 +0100 Subject: [PATCH 257/386] Use feature flags in OperatorPrivilegesIT (#117491) Release runs fail for this suite because some of the actions listed are still behind a feature flag. Closes: https://github.com/elastic/elasticsearch/issues/102992 --- muted-tests.yml | 3 --- .../elasticsearch/xpack/security/operator/Constants.java | 8 +++++--- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 49898308e411b..1f092de410f8e 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -226,9 +226,6 @@ tests: - class: org.elasticsearch.xpack.inference.InferenceRestIT method: test {p0=inference/30_semantic_text_inference/Calculates embeddings using the default ELSER 2 endpoint} issue: https://github.com/elastic/elasticsearch/issues/117349 -- class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT - method: testEveryActionIsEitherOperatorOnlyOrNonOperator - issue: https://github.com/elastic/elasticsearch/issues/102992 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_reset/Test reset running transform} issue: https://github.com/elastic/elasticsearch/issues/117473 diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index bfff63442281d..8df10037affdb 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.security.operator; +import org.elasticsearch.cluster.metadata.DataStream; + import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; @@ -508,9 +510,9 @@ public class Constants { "indices:admin/data_stream/lifecycle/get", "indices:admin/data_stream/lifecycle/put", "indices:admin/data_stream/lifecycle/explain", - "indices:admin/data_stream/options/delete", - "indices:admin/data_stream/options/get", - "indices:admin/data_stream/options/put", + DataStream.isFailureStoreFeatureFlagEnabled() ? "indices:admin/data_stream/options/delete" : null, + DataStream.isFailureStoreFeatureFlagEnabled() ? "indices:admin/data_stream/options/get" : null, + DataStream.isFailureStoreFeatureFlagEnabled() ? "indices:admin/data_stream/options/put" : null, "indices:admin/delete", "indices:admin/flush", "indices:admin/flush[s]", From f57c43cdf5ce8188cc66042b1a8adee420e91825 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Tue, 26 Nov 2024 08:09:30 -0800 Subject: [PATCH 258/386] Include a link to downsampling a TSDS using DSL document (#117510) --- docs/reference/data-streams/tsds.asciidoc | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/reference/data-streams/tsds.asciidoc b/docs/reference/data-streams/tsds.asciidoc index 461c0a1272e96..d0d6d4a455c63 100644 --- a/docs/reference/data-streams/tsds.asciidoc +++ b/docs/reference/data-streams/tsds.asciidoc @@ -339,4 +339,5 @@ include::tsds-index-settings.asciidoc[] include::downsampling.asciidoc[] include::downsampling-ilm.asciidoc[] include::downsampling-manual.asciidoc[] +include::downsampling-dsl.asciidoc[] include::tsds-reindex.asciidoc[] From b22d185b7fca8147ec1cfcd993d7c803ce5a240e Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Tue, 26 Nov 2024 17:46:40 +0100 Subject: [PATCH 259/386] ES|QL: fix stats by constant expresson with alias (#117551) --- docs/changelog/117551.yaml | 5 + .../src/main/resources/stats.csv-spec | 12 ++ .../xpack/esql/action/EsqlCapabilities.java | 7 +- .../xpack/esql/session/EsqlSession.java | 2 +- .../session/IndexResolverFieldNamesTests.java | 108 ++++++++++++++++++ 5 files changed, 132 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/117551.yaml diff --git a/docs/changelog/117551.yaml b/docs/changelog/117551.yaml new file mode 100644 index 0000000000000..081dd9203d82a --- /dev/null +++ b/docs/changelog/117551.yaml @@ -0,0 +1,5 @@ +pr: 117551 +summary: Fix stats by constant expresson with alias +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 5562028a5935f..f95506ff1982f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -2778,6 +2778,18 @@ m:integer | y+1:integer 11 | 12 ; +statsByConstantExpressionWithAliasAndSort +required_capability: fix_stats_by_foldable_expression_2 +FROM employees +| EVAL y = "a" +| STATS count = COUNT() BY x = y +| SORT x +; + +count:long | x:keyword +100 | a +; + filterIsAlwaysTrue required_capability: per_agg_filtering FROM employees diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 08fa7f0a9b213..3eaeceaa86564 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -531,7 +531,12 @@ public enum Cap { /** * support for aggregations on semantic_text */ - SEMANTIC_TEXT_AGGREGATIONS(EsqlCorePlugin.SEMANTIC_TEXT_FEATURE_FLAG); + SEMANTIC_TEXT_AGGREGATIONS(EsqlCorePlugin.SEMANTIC_TEXT_FEATURE_FLAG), + + /** + * Fix for https://github.com/elastic/elasticsearch/issues/114714, again + */ + FIX_STATS_BY_FOLDABLE_EXPRESSION_2,; private final boolean enabled; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 25bb6d80d0dd0..8f65914d1c30d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -511,7 +511,7 @@ static Set fieldNames(LogicalPlan parsed, Set enrichPolicyMatchF // remove any already discovered UnresolvedAttributes that are in fact aliases defined later down in the tree // for example "from test | eval x = salary | stats max = max(x) by gender" // remove the UnresolvedAttribute "x", since that is an Alias defined in "eval" - AttributeSet planRefs = Expressions.references(p.expressions()); + AttributeSet planRefs = p.references(); p.forEachExpressionDown(Alias.class, alias -> { // do not remove the UnresolvedAttribute that has the same name as its alias, ie "rename id = id" // or the UnresolvedAttributes that are used in Functions that have aliases "STATS id = MAX(id)" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java index 5425f770c49e8..0fe89b24dfc6a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java @@ -353,6 +353,114 @@ public void testDocsStats() { | SORT languages""", Set.of("emp_no", "emp_no.*", "languages", "languages.*")); } + public void testEvalStats() { + + assertFieldNames(""" + FROM employees + | EVAL y = "a" + | STATS count = COUNT(*) BY y""", Set.of("_index")); + + assertFieldNames(""" + FROM employees + | EVAL y = "a" + | STATS count = COUNT(*) BY y + | SORT y""", Set.of("_index")); + + assertFieldNames(""" + FROM employees + | EVAL y = "a" + | STATS count = COUNT(*) BY x = y + | SORT x""", Set.of("_index")); + + assertFieldNames(""" + FROM employees + | STATS count = COUNT(*) BY first_name + | SORT first_name""", Set.of("first_name", "first_name.*")); + + assertFieldNames(""" + FROM employees + | EVAL y = "a" + | STATS count = COUNT(*) BY x = y + | SORT x, first_name""", Set.of("first_name", "first_name.*")); + + assertFieldNames(""" + FROM employees + | EVAL first_name = "a" + | STATS count = COUNT(*) BY first_name + | SORT first_name""", Set.of("_index")); + + assertFieldNames(""" + FROM employees + | EVAL y = "a" + | STATS count = COUNT(*) BY first_name = to_upper(y) + | SORT first_name""", Set.of("_index")); + + assertFieldNames(""" + FROM employees + | EVAL y = to_upper(first_name), z = "z" + | STATS count = COUNT(*) BY first_name = to_lower(y), z + | SORT first_name""", Set.of("first_name", "first_name.*")); + + assertFieldNames(""" + FROM employees + | EVAL y = "a" + | STATS count = COUNT(*) BY x = y, z = first_name + | SORT x, z""", Set.of("first_name", "first_name.*")); + + assertFieldNames(""" + FROM employees + | EVAL y = "a" + | STATS count = COUNT(*) BY x = y, first_name + | SORT x, first_name""", Set.of("first_name", "first_name.*")); + + assertFieldNames(""" + FROM employees + | EVAL y = "a" + | STATS count = COUNT(first_name) BY x = y + | SORT x + | DROP first_name""", Set.of("first_name", "first_name.*")); + + assertFieldNames(""" + FROM employees + | EVAL y = "a" + | STATS count = COUNT(*) BY x = y + | MV_EXPAND x""", Set.of("_index")); + + assertFieldNames(""" + FROM employees + | EVAL y = "a" + | STATS count = COUNT(*) BY first_name, y + | MV_EXPAND first_name""", Set.of("first_name", "first_name.*")); + + assertFieldNames(""" + FROM employees + | MV_EXPAND first_name + | EVAL y = "a" + | STATS count = COUNT(*) BY first_name, y + | SORT y""", Set.of("first_name", "first_name.*")); + + assertFieldNames(""" + FROM employees + | EVAL y = "a" + | MV_EXPAND y + | STATS count = COUNT(*) BY x = y + | SORT x""", Set.of("_index")); + + assertFieldNames(""" + FROM employees + | EVAL y = "a" + | STATS count = COUNT(*) BY x = y + | STATS count = COUNT(count) by x + | SORT x""", Set.of("_index")); + + assertFieldNames(""" + FROM employees + | EVAL y = "a" + | STATS count = COUNT(*) BY first_name, y + | STATS count = COUNT(count) by x = y + | SORT x""", Set.of("first_name", "first_name.*")); + } + public void testSortWithLimitOne_DropHeight() { assertFieldNames("from employees | sort languages | limit 1 | drop height*", ALL_FIELDS); } From 1866299fa46e387238d28fe4e0d26c713926d47e Mon Sep 17 00:00:00 2001 From: Mikhail Berezovskiy Date: Tue, 26 Nov 2024 12:23:19 -0500 Subject: [PATCH 260/386] Remove HTTP content copies (#117303) --- .../forbidden/es-server-signatures.txt | 2 - docs/changelog/117303.yaml | 5 +++ .../netty4/Netty4TrashingAllocatorIT.java | 2 +- .../system/indices/SystemIndicesQA.java | 7 +-- .../elasticsearch/action/ActionListener.java | 8 ++++ .../common/bytes/BytesReference.java | 23 ---------- .../org/elasticsearch/http/HttpTracer.java | 2 +- .../org/elasticsearch/rest/RestRequest.java | 43 +++++-------------- .../elasticsearch/rest/RestRequestFilter.java | 4 +- .../cluster/RestPutStoredScriptAction.java | 7 ++- .../rest/action/document/RestBulkAction.java | 2 +- .../rest/action/document/RestIndexAction.java | 2 +- .../action/ingest/RestPutPipelineAction.java | 14 ++++-- .../ingest/RestSimulateIngestAction.java | 3 +- .../ingest/RestSimulatePipelineAction.java | 10 +++-- .../action/search/RestMultiSearchAction.java | 6 +-- .../common/bytes/BytesArrayTests.java | 5 --- .../elasticsearch/rest/RestRequestTests.java | 4 +- .../EnterpriseSearchBaseRestHandler.java | 2 +- .../action/RestPostAnalyticsEventAction.java | 42 +++++++++--------- .../rules/action/RestPutQueryRuleAction.java | 2 +- .../action/RestPutQueryRulesetAction.java | 2 +- .../rest/RestPutInferenceModelAction.java | 13 +++--- .../rest/RestUpdateInferenceModelAction.java | 10 ++++- .../logstash/rest/RestPutPipelineAction.java | 2 +- .../xpack/ml/rest/job/RestPostDataAction.java | 10 ++++- .../rest/action/RestMonitoringBulkAction.java | 6 ++- .../xpack/security/audit/AuditUtil.java | 2 +- .../rest/action/SecurityBaseRestHandler.java | 2 +- .../action/user/RestHasPrivilegesAction.java | 4 +- .../rest/RestFindStructureAction.java | 16 +++---- .../rest/action/RestPutWatchAction.java | 24 +++++++---- 32 files changed, 141 insertions(+), 145 deletions(-) create mode 100644 docs/changelog/117303.yaml diff --git a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt index 68b97050ea012..a9da7995c2b36 100644 --- a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt +++ b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt @@ -167,5 +167,3 @@ org.elasticsearch.cluster.SnapshotDeletionsInProgress$Entry#(java.lang.Str @defaultMessage Use a Thread constructor with a name, anonymous threads are more difficult to debug java.lang.Thread#(java.lang.Runnable) java.lang.Thread#(java.lang.ThreadGroup, java.lang.Runnable) - -org.elasticsearch.common.bytes.BytesReference#copyBytes(org.elasticsearch.common.bytes.BytesReference) @ This method is a subject for removal. Copying bytes is prone to performance regressions and unnecessary allocations. diff --git a/docs/changelog/117303.yaml b/docs/changelog/117303.yaml new file mode 100644 index 0000000000000..71d134f2cd077 --- /dev/null +++ b/docs/changelog/117303.yaml @@ -0,0 +1,5 @@ +pr: 117303 +summary: Remove HTTP content copies +area: Network +type: enhancement +issues: [] diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4TrashingAllocatorIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4TrashingAllocatorIT.java index 18c91068ff4f9..f3a10ce228117 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4TrashingAllocatorIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4TrashingAllocatorIT.java @@ -89,7 +89,7 @@ public List routes() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - var content = request.releasableContent(); + var content = request.content(); var iter = content.iterator(); return (chan) -> { request.getHttpRequest().release(); diff --git a/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java b/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java index 6e15e40efa69a..46c6d1b9228d6 100644 --- a/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java +++ b/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java @@ -10,6 +10,7 @@ package org.elasticsearch.system.indices; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.internal.node.NodeClient; @@ -177,12 +178,12 @@ public List routes() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + var content = request.requiredContent(); IndexRequest indexRequest = new IndexRequest(".net-new-system-index-primary"); - indexRequest.source(request.requiredContent(), request.getXContentType()); + indexRequest.source(content, request.getXContentType()); indexRequest.id(request.param("id")); indexRequest.setRefreshPolicy(request.param("refresh")); - - return channel -> client.index(indexRequest, new RestToXContentListener<>(channel)); + return channel -> client.index(indexRequest, ActionListener.withRef(new RestToXContentListener<>(channel), content)); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/ActionListener.java b/server/src/main/java/org/elasticsearch/action/ActionListener.java index 890c3251e4f9a..a158669d936fe 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionListener.java +++ b/server/src/main/java/org/elasticsearch/action/ActionListener.java @@ -475,4 +475,12 @@ static void runWithResource( ActionListener.run(ActionListener.runBefore(listener, resource::close), l -> action.accept(l, resource)); } + /** + * Increments ref count and returns a listener that will decrement ref count on listener completion. + */ + static ActionListener withRef(ActionListener listener, RefCounted ref) { + ref.mustIncRef(); + return releaseAfter(listener, ref::decRef); + } + } diff --git a/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java b/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java index 51e6512072e41..ddcfc1ea7eed8 100644 --- a/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java +++ b/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java @@ -74,29 +74,6 @@ static ByteBuffer[] toByteBuffers(BytesReference reference) { } } - /** - * Allocates new buffer and copy bytes from given BytesReference. - * - * @deprecated copying bytes is a right place for performance regression and unnecessary allocations. - * This method exists to serve very few places that struggle to handle reference counted buffers. - */ - @Deprecated(forRemoval = true) - static BytesReference copyBytes(BytesReference bytesReference) { - byte[] arr = new byte[bytesReference.length()]; - int offset = 0; - final BytesRefIterator iterator = bytesReference.iterator(); - try { - BytesRef slice; - while ((slice = iterator.next()) != null) { - System.arraycopy(slice.bytes, slice.offset, arr, offset, slice.length); - offset += slice.length; - } - return new BytesArray(arr); - } catch (IOException e) { - throw new AssertionError(e); - } - } - /** * Returns BytesReference composed of the provided ByteBuffers. */ diff --git a/server/src/main/java/org/elasticsearch/http/HttpTracer.java b/server/src/main/java/org/elasticsearch/http/HttpTracer.java index d6daf11c0539a..3d8360e6ee3fa 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpTracer.java +++ b/server/src/main/java/org/elasticsearch/http/HttpTracer.java @@ -94,7 +94,7 @@ HttpTracer maybeLogRequest(RestRequest restRequest, @Nullable Exception e) { private void logFullContent(RestRequest restRequest) { try (var stream = HttpBodyTracer.getBodyOutputStream(restRequest.getRequestId(), HttpBodyTracer.Type.REQUEST)) { - restRequest.releasableContent().writeTo(stream); + restRequest.content().writeTo(stream); } catch (Exception e2) { assert false : e2; // no real IO here } diff --git a/server/src/main/java/org/elasticsearch/rest/RestRequest.java b/server/src/main/java/org/elasticsearch/rest/RestRequest.java index 17d85a8eabb1c..a04bdcb32f2b4 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestRequest.java +++ b/server/src/main/java/org/elasticsearch/rest/RestRequest.java @@ -23,7 +23,6 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.http.HttpBody; @@ -303,22 +302,13 @@ public boolean isFullContent() { return httpRequest.body().isFull(); } - /** - * Returns a copy of HTTP content. The copy is GC-managed and does not require reference counting. - * Please use {@link #releasableContent()} to avoid content copy. - */ - @SuppressForbidden(reason = "temporarily support content copy while migrating RestHandlers to ref counted pooled buffers") - public BytesReference content() { - return BytesReference.copyBytes(releasableContent()); - } - /** * Returns a direct reference to the network buffer containing the request body. The HTTP layers will release their references to this * buffer as soon as they have finished the synchronous steps of processing the request on the network thread, which will by default * release the buffer back to the pool where it may be re-used for another request. If you need to keep the buffer alive past the end of * these synchronous steps, acquire your own reference to this buffer and release it once it's no longer needed. */ - public ReleasableBytesReference releasableContent() { + public ReleasableBytesReference content() { this.contentConsumed = true; var bytes = httpRequest.body().asFull().bytes(); if (bytes.hasReferences() == false) { @@ -338,32 +328,19 @@ public HttpBody.Stream contentStream() { return httpRequest.body().asStream(); } - private void ensureContent() { + /** + * Returns reference to the network buffer of HTTP content or throw an exception if the body or content type is missing. + * See {@link #content()}. + */ + public ReleasableBytesReference requiredContent() { if (hasContent() == false) { throw new ElasticsearchParseException("request body is required"); } else if (xContentType.get() == null) { throwValidationException("unknown content type"); } - } - - /** - * @return copy of the request body or throw an exception if the body or content type is missing. - * See {@link #content()}. Please use {@link #requiredReleasableContent()} to avoid content copy. - */ - public final BytesReference requiredContent() { - ensureContent(); return content(); } - /** - * Returns reference to the network buffer of HTTP content or throw an exception if the body or content type is missing. - * See {@link #releasableContent()}. It's a recommended method to handle HTTP content without copying it. - */ - public ReleasableBytesReference requiredReleasableContent() { - ensureContent(); - return releasableContent(); - } - private static void throwValidationException(String msg) { ValidationException unknownContentType = new ValidationException(); unknownContentType.addValidationError(msg); @@ -596,7 +573,7 @@ public final boolean hasContentOrSourceParam() { * if you need to handle the absence request content gracefully. */ public final XContentParser contentOrSourceParamParser() throws IOException { - Tuple tuple = contentOrSourceParam(); + Tuple tuple = contentOrSourceParam(); return XContentHelper.createParserNotCompressed(parserConfig, tuple.v2(), tuple.v1().xContent().type()); } @@ -607,7 +584,7 @@ public final XContentParser contentOrSourceParamParser() throws IOException { */ public final void withContentOrSourceParamParserOrNull(CheckedConsumer withParser) throws IOException { if (hasContentOrSourceParam()) { - Tuple tuple = contentOrSourceParam(); + Tuple tuple = contentOrSourceParam(); try (XContentParser parser = XContentHelper.createParserNotCompressed(parserConfig, tuple.v2(), tuple.v1())) { withParser.accept(parser); } @@ -620,7 +597,7 @@ public final void withContentOrSourceParamParserOrNull(CheckedConsumer contentOrSourceParam() { + public final Tuple contentOrSourceParam() { if (hasContentOrSourceParam() == false) { throw new ElasticsearchParseException("request body or source parameter is required"); } else if (hasContent()) { @@ -636,7 +613,7 @@ public final Tuple contentOrSourceParam() { if (xContentType == null) { throwValidationException("Unknown value for source_content_type [" + typeParam + "]"); } - return new Tuple<>(xContentType, bytes); + return new Tuple<>(xContentType, ReleasableBytesReference.wrap(bytes)); } public ParsedMediaType getParsedAccept() { diff --git a/server/src/main/java/org/elasticsearch/rest/RestRequestFilter.java b/server/src/main/java/org/elasticsearch/rest/RestRequestFilter.java index 57b4d2990c8e0..7c90d9168e6c8 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestRequestFilter.java +++ b/server/src/main/java/org/elasticsearch/rest/RestRequestFilter.java @@ -45,10 +45,10 @@ public boolean hasContent() { } @Override - public ReleasableBytesReference releasableContent() { + public ReleasableBytesReference content() { if (filteredBytes == null) { Tuple> result = XContentHelper.convertToMap( - restRequest.requiredReleasableContent(), + restRequest.requiredContent(), true, restRequest.getXContentType() ); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutStoredScriptAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutStoredScriptAction.java index 4451117fa4792..a698dc3f30577 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutStoredScriptAction.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.rest.action.admin.cluster; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.TransportPutStoredScriptAction; import org.elasticsearch.client.internal.node.NodeClient; @@ -57,6 +58,10 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client request.getXContentType(), StoredScriptSource.parse(content, xContentType) ); - return channel -> client.execute(TransportPutStoredScriptAction.TYPE, putRequest, new RestToXContentListener<>(channel)); + return channel -> client.execute( + TransportPutStoredScriptAction.TYPE, + putRequest, + ActionListener.withRef(new RestToXContentListener<>(channel), content) + ); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java index 9428ef5390b2f..dea7b7138d0d0 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java @@ -103,7 +103,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC boolean defaultRequireDataStream = request.paramAsBoolean(DocWriteRequest.REQUIRE_DATA_STREAM, false); bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT)); bulkRequest.setRefreshPolicy(request.param("refresh")); - ReleasableBytesReference content = request.requiredReleasableContent(); + ReleasableBytesReference content = request.requiredContent(); try { bulkRequest.add( diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java index d81ac03492d59..d40c6225cc7b4 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java @@ -106,7 +106,7 @@ public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - ReleasableBytesReference source = request.requiredReleasableContent(); + ReleasableBytesReference source = request.requiredContent(); IndexRequest indexRequest = new IndexRequest(request.param("index")); indexRequest.id(request.param("id")); indexRequest.routing(request.param("routing")); diff --git a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java index 269d9b08ab66b..c6b3daa38d663 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java @@ -9,10 +9,11 @@ package org.elasticsearch.rest.action.ingest; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.ingest.PutPipelineTransportAction; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.core.Tuple; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -56,15 +57,20 @@ public RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient cl } } - Tuple sourceTuple = restRequest.contentOrSourceParam(); + Tuple sourceTuple = restRequest.contentOrSourceParam(); + var content = sourceTuple.v2(); final var request = new PutPipelineRequest( getMasterNodeTimeout(restRequest), getAckTimeout(restRequest), restRequest.param("id"), - sourceTuple.v2(), + content, sourceTuple.v1(), ifVersion ); - return channel -> client.execute(PutPipelineTransportAction.TYPE, request, new RestToXContentListener<>(channel)); + return channel -> client.execute( + PutPipelineTransportAction.TYPE, + request, + ActionListener.withRef(new RestToXContentListener<>(channel), content) + ); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java index c825a8198e6e4..978b6d1c3a92d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.bulk.SimulateBulkRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Tuple; import org.elasticsearch.ingest.ConfigurationUtils; @@ -72,7 +73,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC String defaultIndex = request.param("index"); FetchSourceContext defaultFetchSourceContext = FetchSourceContext.parseFromRestRequest(request); String defaultPipeline = request.param("pipeline"); - Tuple sourceTuple = request.contentOrSourceParam(); + Tuple sourceTuple = request.contentOrSourceParam(); Map sourceMap = XContentHelper.convertToMap(sourceTuple.v2(), false, sourceTuple.v1()).v2(); Map> pipelineSubstitutions = (Map>) sourceMap.remove( "pipeline_substitutions" diff --git a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java index f85b89f774477..faf977b54885d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java @@ -9,9 +9,10 @@ package org.elasticsearch.rest.action.ingest; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.core.Tuple; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -46,10 +47,13 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - Tuple sourceTuple = restRequest.contentOrSourceParam(); + Tuple sourceTuple = restRequest.contentOrSourceParam(); + var content = sourceTuple.v2(); SimulatePipelineRequest request = new SimulatePipelineRequest(sourceTuple.v2(), sourceTuple.v1(), restRequest.getRestApiVersion()); request.setId(restRequest.param("id")); request.setVerbose(restRequest.paramAsBoolean("verbose", false)); - return channel -> client.admin().cluster().simulatePipeline(request, new RestToXContentListener<>(channel)); + return channel -> client.admin() + .cluster() + .simulatePipeline(request, ActionListener.withRef(new RestToXContentListener<>(channel), content)); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index aeb182978e1eb..89775b4ca8e15 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -17,7 +17,7 @@ import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.TriFunction; -import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Tuple; import org.elasticsearch.features.NodeFeature; @@ -184,9 +184,9 @@ public static void parseMultiLineRequest( boolean ccsMinimizeRoundtrips = request.paramAsBoolean("ccs_minimize_roundtrips", true); String routing = request.param("routing"); - final Tuple sourceTuple = request.contentOrSourceParam(); + final Tuple sourceTuple = request.contentOrSourceParam(); final XContent xContent = sourceTuple.v1().xContent(); - final BytesReference data = sourceTuple.v2(); + final ReleasableBytesReference data = sourceTuple.v2(); MultiSearchRequest.readMultiLineFormat( xContent, request.contentParserConfig(), diff --git a/server/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java b/server/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java index 3fd8535cd5c27..e067be6b1b0da 100644 --- a/server/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java +++ b/server/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java @@ -108,9 +108,4 @@ public void testGetDoubleLE() { assertThat(e.getMessage(), equalTo("Index 9 out of bounds for length 9")); } - public void testCopyBytes() { - var data = randomByteArrayOfLength(between(1024, 1024 * 1024 * 50)); - var copy = BytesReference.copyBytes(new BytesArray(data)); - assertArrayEquals(data, BytesReference.toBytes(copy)); - } } diff --git a/server/src/test/java/org/elasticsearch/rest/RestRequestTests.java b/server/src/test/java/org/elasticsearch/rest/RestRequestTests.java index 8a0ca5ba6c8a5..b391b77503400 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestRequestTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestRequestTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.http.HttpBody; import org.elasticsearch.http.HttpChannel; @@ -321,7 +321,7 @@ public String uri() { } @Override - public BytesReference content() { + public ReleasableBytesReference content() { return restRequest.content(); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java index 214f9150dfcc5..aa200f7ae9acb 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java @@ -32,7 +32,7 @@ protected final BaseRestHandler.RestChannelConsumer prepareRequest(RestRequest r // We need to consume parameters and content from the REST request in order to bypass unrecognized param errors // and return a license error. request.params().keySet().forEach(key -> request.param(key, "")); - request.releasableContent(); + request.content(); return channel -> channel.sendResponse( new RestResponse(channel, LicenseUtils.newComplianceException(this.licenseState, this.product)) ); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPostAnalyticsEventAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPostAnalyticsEventAction.java index 34292c4669333..5706e5e384053 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPostAnalyticsEventAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPostAnalyticsEventAction.java @@ -7,8 +7,9 @@ package org.elasticsearch.xpack.application.analytics.action; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.core.Tuple; import org.elasticsearch.license.XPackLicenseState; @@ -48,11 +49,26 @@ public List routes() { @Override protected RestChannelConsumer innerPrepareRequest(RestRequest restRequest, NodeClient client) { - PostAnalyticsEventAction.Request request = buidRequest(restRequest); + Tuple sourceTuple = restRequest.contentOrSourceParam(); + + var content = sourceTuple.v2(); + PostAnalyticsEventAction.RequestBuilder builder = PostAnalyticsEventAction.Request.builder( + restRequest.param("collection_name"), + restRequest.param("event_type"), + sourceTuple.v1(), + content + ); + + builder.debug(restRequest.paramAsBoolean("debug", false)); + + final Map> headers = restRequest.getHeaders(); + builder.headers(headers); + builder.clientAddress(getClientAddress(restRequest, headers)); + return channel -> client.execute( PostAnalyticsEventAction.INSTANCE, - request, - new RestToXContentListener<>(channel, r -> RestStatus.ACCEPTED) + builder.request(), + ActionListener.withRef(new RestToXContentListener<>(channel, r -> RestStatus.ACCEPTED), content) ); } @@ -71,22 +87,4 @@ private static InetAddress getClientAddress(RestRequest restRequest, Map sourceTuple = restRequest.contentOrSourceParam(); - - PostAnalyticsEventAction.RequestBuilder builder = PostAnalyticsEventAction.Request.builder( - restRequest.param("collection_name"), - restRequest.param("event_type"), - sourceTuple.v1(), - sourceTuple.v2() - ); - - builder.debug(restRequest.paramAsBoolean("debug", false)); - - final Map> headers = restRequest.getHeaders(); - builder.headers(headers); - builder.clientAddress(getClientAddress(restRequest, headers)); - - return builder.request(); - } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRuleAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRuleAction.java index 4addd97465bf2..1660502d77920 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRuleAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRuleAction.java @@ -43,7 +43,7 @@ protected RestChannelConsumer innerPrepareRequest(RestRequest restRequest, NodeC PutQueryRuleAction.Request request = new PutQueryRuleAction.Request( restRequest.param("ruleset_id"), restRequest.param("rule_id"), - restRequest.content(), + restRequest.requiredContent(), restRequest.getXContentType() ); return channel -> client.execute( diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRulesetAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRulesetAction.java index a43ac70327e77..db20e66845f35 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRulesetAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestPutQueryRulesetAction.java @@ -42,7 +42,7 @@ public List routes() { protected RestChannelConsumer innerPrepareRequest(RestRequest restRequest, NodeClient client) throws IOException { PutQueryRulesetAction.Request request = new PutQueryRulesetAction.Request( restRequest.param("ruleset_id"), - restRequest.content(), + restRequest.requiredContent(), restRequest.getXContentType() ); return channel -> client.execute( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java index 0523160ee19c2..655e11996d522 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.rest; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.BaseRestHandler; @@ -48,12 +49,12 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient taskType = TaskType.ANY; // task type must be defined in the body } - var request = new PutInferenceModelAction.Request( - taskType, - inferenceEntityId, - restRequest.requiredContent(), - restRequest.getXContentType() + var content = restRequest.requiredContent(); + var request = new PutInferenceModelAction.Request(taskType, inferenceEntityId, content, restRequest.getXContentType()); + return channel -> client.execute( + PutInferenceModelAction.INSTANCE, + request, + ActionListener.withRef(new RestToXContentListener<>(channel), content) ); - return channel -> client.execute(PutInferenceModelAction.INSTANCE, request, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUpdateInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUpdateInferenceModelAction.java index 9405a6752538c..120731a4f8e66 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUpdateInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestUpdateInferenceModelAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.rest; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.BaseRestHandler; @@ -50,13 +51,18 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient throw new ElasticsearchStatusException("Inference ID must be provided in the path", RestStatus.BAD_REQUEST); } + var content = restRequest.requiredContent(); var request = new UpdateInferenceModelAction.Request( inferenceEntityId, - restRequest.requiredContent(), + content, restRequest.getXContentType(), taskType, RestUtils.getMasterNodeTimeout(restRequest) ); - return channel -> client.execute(UpdateInferenceModelAction.INSTANCE, request, new RestToXContentListener<>(channel)); + return channel -> client.execute( + UpdateInferenceModelAction.INSTANCE, + request, + ActionListener.withRef(new RestToXContentListener<>(channel), content) + ); } } diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestPutPipelineAction.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestPutPipelineAction.java index 2ea56b147bf9c..a9992e168bc66 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestPutPipelineAction.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestPutPipelineAction.java @@ -49,7 +49,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli } return restChannel -> { - final String content = request.releasableContent().utf8ToString(); + final String content = request.content().utf8ToString(); client.execute( PutPipelineAction.INSTANCE, new PutPipelineRequest(id, content, request.getXContentType()), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java index 48c6abde3010a..0fcad773100ff 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostDataAction.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.ml.rest.job; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; @@ -51,9 +52,14 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient PostDataAction.Request request = new PostDataAction.Request(restRequest.param(Job.ID.getPreferredName())); request.setResetStart(restRequest.param(PostDataAction.Request.RESET_START.getPreferredName(), DEFAULT_RESET_START)); request.setResetEnd(restRequest.param(PostDataAction.Request.RESET_END.getPreferredName(), DEFAULT_RESET_END)); - request.setContent(restRequest.content(), restRequest.getXContentType()); + var content = restRequest.content(); + request.setContent(content, restRequest.getXContentType()); - return channel -> client.execute(PostDataAction.INSTANCE, request, new RestToXContentListener<>(channel, r -> RestStatus.ACCEPTED)); + return channel -> client.execute( + PostDataAction.INSTANCE, + request, + ActionListener.withRef(new RestToXContentListener<>(channel, r -> RestStatus.ACCEPTED), content) + ); } @Override diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkAction.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkAction.java index b69b958a27ce6..762cbffacb082 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkAction.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.monitoring.rest.action; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.rest.BaseRestHandler; @@ -93,8 +94,9 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client final long intervalMillis = parseTimeValue(intervalAsString, INTERVAL).getMillis(); final MonitoringBulkRequestBuilder requestBuilder = new MonitoringBulkRequestBuilder(client); - requestBuilder.add(system, request.content(), request.getXContentType(), timestamp, intervalMillis); - return channel -> requestBuilder.execute(getRestBuilderListener(channel)); + var content = request.content(); + requestBuilder.add(system, content, request.getXContentType(), timestamp, intervalMillis); + return channel -> requestBuilder.execute(ActionListener.withRef(getRestBuilderListener(channel), content)); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditUtil.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditUtil.java index 429b632cdac18..58516b1d8324d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditUtil.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditUtil.java @@ -27,7 +27,7 @@ public class AuditUtil { public static String restRequestContent(RestRequest request) { if (request.hasContent()) { - var content = request.releasableContent(); + var content = request.content(); try { return XContentHelper.convertToJson(content, false, false, request.getXContentType()); } catch (IOException ioe) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java index df21f5d4eeb0b..d5d11ea42e345 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java @@ -75,7 +75,7 @@ protected final RestChannelConsumer prepareRequest(RestRequest request, NodeClie return innerPrepareRequest(request, client); } else { request.params().keySet().forEach(key -> request.param(key, "")); - request.releasableContent(); // mark content consumed + request.content(); // mark content consumed return channel -> channel.sendResponse(new RestResponse(channel, failedFeature)); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestHasPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestHasPrivilegesAction.java index f2233a7e19fd0..8029ed3ba45e4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestHasPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestHasPrivilegesAction.java @@ -8,7 +8,7 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Tuple; import org.elasticsearch.license.XPackLicenseState; @@ -77,7 +77,7 @@ public RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient c * Consume the body immediately. This ensures that if there is a body and we later reject the request (e.g., because security is not * enabled) that the REST infrastructure will not reject the request for not having consumed the body. */ - final Tuple content = request.contentOrSourceParam(); + final Tuple content = request.contentOrSourceParam(); final String username = getUsername(request); if (username == null) { return restChannel -> { throw new ElasticsearchSecurityException("there is no authenticated user"); }; diff --git a/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/rest/RestFindStructureAction.java b/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/rest/RestFindStructureAction.java index 5078572dee5fd..f47a25409b821 100644 --- a/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/rest/RestFindStructureAction.java +++ b/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/rest/RestFindStructureAction.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.textstructure.rest; -import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; @@ -50,14 +50,14 @@ public String getName() { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { FindStructureAction.Request request = new FindStructureAction.Request(); RestFindStructureArgumentsParser.parse(restRequest, request); + var content = restRequest.requiredContent(); + request.setSample(content); - if (restRequest.hasContent()) { - request.setSample(restRequest.content()); - } else { - throw new ElasticsearchParseException("request body is required"); - } - - return channel -> client.execute(FindStructureAction.INSTANCE, request, new RestToXContentListener<>(channel)); + return channel -> client.execute( + FindStructureAction.INSTANCE, + request, + ActionListener.withRef(new RestToXContentListener<>(channel), content) + ); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestPutWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestPutWatchAction.java index 9dba72b1f64c3..0ed27a4073653 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestPutWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestPutWatchAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.watcher.rest.action; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; @@ -42,19 +43,24 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(final RestRequest request, NodeClient client) { - PutWatchRequest putWatchRequest = new PutWatchRequest(request.param("id"), request.content(), request.getXContentType()); + var content = request.content(); + PutWatchRequest putWatchRequest = new PutWatchRequest(request.param("id"), content, request.getXContentType()); putWatchRequest.setVersion(request.paramAsLong("version", Versions.MATCH_ANY)); putWatchRequest.setIfSeqNo(request.paramAsLong("if_seq_no", putWatchRequest.getIfSeqNo())); putWatchRequest.setIfPrimaryTerm(request.paramAsLong("if_primary_term", putWatchRequest.getIfPrimaryTerm())); putWatchRequest.setActive(request.paramAsBoolean("active", putWatchRequest.isActive())); - return channel -> client.execute(PutWatchAction.INSTANCE, putWatchRequest, new RestBuilderListener<>(channel) { - @Override - public RestResponse buildResponse(PutWatchResponse response, XContentBuilder builder) throws Exception { - response.toXContent(builder, request); - RestStatus status = response.isCreated() ? CREATED : OK; - return new RestResponse(status, builder); - } - }); + return channel -> client.execute( + PutWatchAction.INSTANCE, + putWatchRequest, + ActionListener.withRef(new RestBuilderListener<>(channel) { + @Override + public RestResponse buildResponse(PutWatchResponse response, XContentBuilder builder) throws Exception { + response.toXContent(builder, request); + RestStatus status = response.isCreated() ? CREATED : OK; + return new RestResponse(status, builder); + } + }, content) + ); } private static final Set FILTERED_FIELDS = Set.of( From f05c9b07f801e49e1a95f7665485464dcda862ee Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Tue, 26 Nov 2024 13:45:13 -0500 Subject: [PATCH 261/386] ESQL Add some tests for sorting the date nanos union type (#117567) --- .../src/main/resources/union_types.csv-spec | 59 +++++++++++++++++++ 1 file changed, 59 insertions(+) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec index af987b13acc82..bf6e2f8ae0893 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec @@ -626,6 +626,65 @@ sample_data_ts_nanos | 2023-10-23T12:27:28.948123456Z | 172.21.2.113 | 27648 sample_data_ts_nanos | 2023-10-23T12:15:03.360123456Z | 172.21.2.162 | 3450233 | Connected to 10.1.0.3 ; +multiIndex sort millis and nanos as nanos +required_capability: to_date_nanos +required_capability: union_types +required_capability: metadata_fields +required_capability: union_types_remove_fields + +FROM sample_data, sample_data_ts_nanos METADATA _index +| EVAL ts = TO_DATE_NANOS(@timestamp) +| KEEP _index, ts, client_ip, event_duration, message +| SORT ts DESC +; + +_index:keyword | ts:date_nanos | client_ip:ip | event_duration:long | message:keyword +sample_data_ts_nanos | 2023-10-23T13:55:01.543123456Z | 172.21.3.15 | 1756467 | Connected to 10.1.0.1 +sample_data | 2023-10-23T13:55:01.543000000Z | 172.21.3.15 | 1756467 | Connected to 10.1.0.1 +sample_data_ts_nanos | 2023-10-23T13:53:55.832123456Z | 172.21.3.15 | 5033755 | Connection error +sample_data | 2023-10-23T13:53:55.832000000Z | 172.21.3.15 | 5033755 | Connection error +sample_data_ts_nanos | 2023-10-23T13:52:55.015123456Z | 172.21.3.15 | 8268153 | Connection error +sample_data | 2023-10-23T13:52:55.015000000Z | 172.21.3.15 | 8268153 | Connection error +sample_data_ts_nanos | 2023-10-23T13:51:54.732123456Z | 172.21.3.15 | 725448 | Connection error +sample_data | 2023-10-23T13:51:54.732000000Z | 172.21.3.15 | 725448 | Connection error +sample_data_ts_nanos | 2023-10-23T13:33:34.937123456Z | 172.21.0.5 | 1232382 | Disconnected +sample_data | 2023-10-23T13:33:34.937000000Z | 172.21.0.5 | 1232382 | Disconnected +sample_data_ts_nanos | 2023-10-23T12:27:28.948123456Z | 172.21.2.113 | 2764889 | Connected to 10.1.0.2 +sample_data | 2023-10-23T12:27:28.948000000Z | 172.21.2.113 | 2764889 | Connected to 10.1.0.2 +sample_data_ts_nanos | 2023-10-23T12:15:03.360123456Z | 172.21.2.162 | 3450233 | Connected to 10.1.0.3 +sample_data | 2023-10-23T12:15:03.360000000Z | 172.21.2.162 | 3450233 | Connected to 10.1.0.3 +; + +multiIndex sort millis and nanos as millis +required_capability: to_date_nanos +required_capability: union_types +required_capability: metadata_fields +required_capability: union_types_remove_fields + +FROM sample_data, sample_data_ts_nanos METADATA _index +| EVAL ts = TO_DATETIME(@timestamp) +| KEEP _index, ts, client_ip, event_duration, message +| SORT ts DESC, _index DESC +; + +_index:keyword | ts:datetime | client_ip:ip | event_duration:long | message:keyword +sample_data_ts_nanos | 2023-10-23T13:55:01.543Z | 172.21.3.15 | 1756467 | Connected to 10.1.0.1 +sample_data | 2023-10-23T13:55:01.543Z | 172.21.3.15 | 1756467 | Connected to 10.1.0.1 +sample_data_ts_nanos | 2023-10-23T13:53:55.832Z | 172.21.3.15 | 5033755 | Connection error +sample_data | 2023-10-23T13:53:55.832Z | 172.21.3.15 | 5033755 | Connection error +sample_data_ts_nanos | 2023-10-23T13:52:55.015Z | 172.21.3.15 | 8268153 | Connection error +sample_data | 2023-10-23T13:52:55.015Z | 172.21.3.15 | 8268153 | Connection error +sample_data_ts_nanos | 2023-10-23T13:51:54.732Z | 172.21.3.15 | 725448 | Connection error +sample_data | 2023-10-23T13:51:54.732Z | 172.21.3.15 | 725448 | Connection error +sample_data_ts_nanos | 2023-10-23T13:33:34.937Z | 172.21.0.5 | 1232382 | Disconnected +sample_data | 2023-10-23T13:33:34.937Z | 172.21.0.5 | 1232382 | Disconnected +sample_data_ts_nanos | 2023-10-23T12:27:28.948Z | 172.21.2.113 | 2764889 | Connected to 10.1.0.2 +sample_data | 2023-10-23T12:27:28.948Z | 172.21.2.113 | 2764889 | Connected to 10.1.0.2 +sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 | Connected to 10.1.0.3 +sample_data | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 | Connected to 10.1.0.3 +; + + multiIndexTsNanosRenameToNanosWithFiltering required_capability: to_date_nanos required_capability: date_nanos_binary_comparison From 094a81510c65e9ddd294137c369a716f707c1482 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 26 Nov 2024 20:05:26 +0000 Subject: [PATCH 262/386] Add `@UpdateForV9` annotations to `PutStoredScriptRequest` (#117582) We can remove some fields from `PutStoredScriptRequest` once the v9.0 transport protocol can deviate from the v8.last one. This commit adds reminder annotations to do this. Relates #117566 --- .../storedscripts/PutStoredScriptRequest.java | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java index 8e453cd5bac3a..c3bdfc5a594c0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.script.StoredScriptSource; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; @@ -36,8 +37,20 @@ public class PutStoredScriptRequest extends AcknowledgedRequest Date: Tue, 26 Nov 2024 20:05:45 +0000 Subject: [PATCH 263/386] Add `@UpdateForV10` annotation to `allow_insecure_settings` (#117571) This hasn't really been necessary since reloadable secure settings landed in 7.0. It's been deprecated for a long time and the last known user has agreed to stop using it in v9. This commit adds a reminder to drop this functionality entirely in v10. --- .../java/org/elasticsearch/common/settings/SecureSetting.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java b/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java index 3d4f0d2d9dbf7..64fe57b3ea373 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/SecureSetting.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.util.ArrayUtils; import org.elasticsearch.core.Booleans; +import org.elasticsearch.core.UpdateForV10; import java.io.InputStream; import java.security.GeneralSecurityException; @@ -26,6 +27,7 @@ public abstract class SecureSetting extends Setting { /** Determines whether legacy settings with sensitive values should be allowed. */ + @UpdateForV10(owner = UpdateForV10.Owner.DISTRIBUTED_COORDINATION) // this should no longer be in use, even in v9, so can go away in v10 private static final boolean ALLOW_INSECURE_SETTINGS = Booleans.parseBoolean(System.getProperty("es.allow_insecure_settings", "false")); private static final Set ALLOWED_PROPERTIES = EnumSet.of( From 2e9ef4059fd049f45e67325a1ebfb79ef2d78561 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 27 Nov 2024 08:28:36 +1100 Subject: [PATCH 264/386] Mute org.elasticsearch.reservedstate.service.FileSettingsServiceTests testStopWorksInMiddleOfProcessing #117591 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 1f092de410f8e..a54520fa66adf 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -243,6 +243,9 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=synonyms/90_synonyms_reloading_for_synset/Reload analyzers for specific synonym set} issue: https://github.com/elastic/elasticsearch/issues/116777 +- class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests + method: testStopWorksInMiddleOfProcessing + issue: https://github.com/elastic/elasticsearch/issues/117591 # Examples: # From 82be243b648f9fe61705f8caa31f931ad0c95d9c Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 26 Nov 2024 14:54:31 -0800 Subject: [PATCH 265/386] Refactor preview feature task to better support composite builds (#117594) --- .../src/main/groovy/elasticsearch.ide.gradle | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index 9237c3ae8918c..895cca2af7967 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -142,13 +142,18 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { description = 'Enables preview features on native library module' dependsOn tasks.named("enableExternalConfiguration") - doLast { - ['main', 'test'].each { sourceSet -> - modifyXml(".idea/modules/libs/native/elasticsearch.libs.native.${sourceSet}.iml") { xml -> - xml.component.find { it.'@name' == 'NewModuleRootManager' }?.'@LANGUAGE_LEVEL' = 'JDK_21_PREVIEW' + ext { + enablePreview = { moduleFile, languageLevel -> + modifyXml(moduleFile) { xml -> + xml.component.find { it.'@name' == 'NewModuleRootManager' }?.'@LANGUAGE_LEVEL' = languageLevel } } } + + doLast { + enablePreview('.idea/modules/libs/native/elasticsearch.libs.native.main.iml', 'JDK_21_PREVIEW') + enablePreview('.idea/modules/libs/native/elasticsearch.libs.native.test.iml', 'JDK_21_PREVIEW') + } } tasks.register('buildDependencyArtifacts') { From 433a00c0ee70ee285987f7ee9125be791bb22b86 Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Tue, 26 Nov 2024 18:00:19 -0500 Subject: [PATCH 266/386] [ML] Fix for Deberta tokenizer when input sequence exceeds 512 tokens (#117595) * Add test and fix * Update docs/changelog/117595.yaml * Remove test which wasn't working --- docs/changelog/117595.yaml | 5 +++ .../nlp/tokenizers/NlpTokenizer.java | 23 ++++++++++++++ .../nlp/TextSimilarityProcessorTests.java | 31 +++++++++++++++++++ .../tokenizers/DebertaV2TokenizerTests.java | 4 +-- 4 files changed, 61 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/117595.yaml diff --git a/docs/changelog/117595.yaml b/docs/changelog/117595.yaml new file mode 100644 index 0000000000000..9360c372ac97e --- /dev/null +++ b/docs/changelog/117595.yaml @@ -0,0 +1,5 @@ +pr: 117595 +summary: Fix for Deberta tokenizer when input sequence exceeds 512 tokens +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java index 0b4a5b651d8d4..930dbee304790 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java @@ -331,6 +331,29 @@ public List tokenize(String seq1, String seq2, Tokeni tokenIdsSeq2 = tokenIdsSeq2.subList(0, maxSequenceLength() - extraTokens - tokenIdsSeq1.size()); tokenPositionMapSeq2 = tokenPositionMapSeq2.subList(0, maxSequenceLength() - extraTokens - tokenIdsSeq1.size()); } + case BALANCED -> { + isTruncated = true; + int firstSequenceLength = 0; + + if (tokenIdsSeq2.size() > (maxSequenceLength() - getNumExtraTokensForSeqPair()) / 2) { + firstSequenceLength = min(tokenIdsSeq1.size(), (maxSequenceLength() - getNumExtraTokensForSeqPair()) / 2); + } else { + firstSequenceLength = min( + tokenIdsSeq1.size(), + maxSequenceLength() - tokenIdsSeq2.size() - getNumExtraTokensForSeqPair() + ); + } + int secondSequenceLength = min( + tokenIdsSeq2.size(), + maxSequenceLength() - firstSequenceLength - getNumExtraTokensForSeqPair() + ); + + tokenIdsSeq1 = tokenIdsSeq1.subList(0, firstSequenceLength); + tokenPositionMapSeq1 = tokenPositionMapSeq1.subList(0, firstSequenceLength); + + tokenIdsSeq2 = tokenIdsSeq2.subList(0, secondSequenceLength); + tokenPositionMapSeq2 = tokenPositionMapSeq2.subList(0, secondSequenceLength); + } case NONE -> throw ExceptionsHelper.badRequestException( "Input too large. The tokenized input length [{}] exceeds the maximum sequence length [{}]", numTokens, diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextSimilarityProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextSimilarityProcessorTests.java index 3590793b81abd..7460e17055a00 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextSimilarityProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextSimilarityProcessorTests.java @@ -10,11 +10,13 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.inference.results.TextSimilarityInferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertTokenization; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.DebertaV2Tokenization; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextSimilarityConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.VocabularyConfig; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BertTokenizationResult; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BertTokenizer; +import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.DebertaV2Tokenizer; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizationResult; import org.elasticsearch.xpack.ml.inference.pytorch.results.PyTorchInferenceResult; @@ -22,6 +24,8 @@ import java.util.List; import static org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BertTokenizerTests.TEST_CASED_VOCAB; +import static org.elasticsearch.xpack.ml.inference.nlp.tokenizers.DebertaV2TokenizerTests.TEST_CASE_SCORES; +import static org.elasticsearch.xpack.ml.inference.nlp.tokenizers.DebertaV2TokenizerTests.TEST_CASE_VOCAB; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -62,6 +66,33 @@ public void testProcessor() throws IOException { assertThat(result.predictedValue(), closeTo(42, 1e-6)); } + public void testBalancedTruncationWithLongInput() throws IOException { + String question = "Is Elasticsearch scalable?"; + StringBuilder longInputBuilder = new StringBuilder(); + for (int i = 0; i < 1000; i++) { + longInputBuilder.append(TEST_CASE_VOCAB.get(randomIntBetween(0, TEST_CASE_VOCAB.size() - 1))).append(i).append(" "); + } + String longInput = longInputBuilder.toString().trim(); + + DebertaV2Tokenization tokenization = new DebertaV2Tokenization(false, true, null, Tokenization.Truncate.BALANCED, -1); + DebertaV2Tokenizer tokenizer = DebertaV2Tokenizer.builder(TEST_CASE_VOCAB, TEST_CASE_SCORES, tokenization).build(); + TextSimilarityConfig textSimilarityConfig = new TextSimilarityConfig( + question, + new VocabularyConfig(""), + tokenization, + "result", + TextSimilarityConfig.SpanScoreFunction.MAX + ); + TextSimilarityProcessor processor = new TextSimilarityProcessor(tokenizer); + TokenizationResult tokenizationResult = processor.getRequestBuilder(textSimilarityConfig) + .buildRequest(List.of(longInput), "1", Tokenization.Truncate.BALANCED, -1, null) + .tokenization(); + + // Assert that the tokenization result is as expected + assertThat(tokenizationResult.anyTruncated(), is(true)); + assertThat(tokenizationResult.getTokenization(0).tokenIds().length, equalTo(512)); + } + public void testResultFunctions() { BertTokenization tokenization = new BertTokenization(false, true, 384, Tokenization.Truncate.NONE, 128); BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, tokenization).build(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaV2TokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaV2TokenizerTests.java index a8461de8630ae..fc070ec25dc68 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaV2TokenizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaV2TokenizerTests.java @@ -23,7 +23,7 @@ public class DebertaV2TokenizerTests extends ESTestCase { - private static final List TEST_CASE_VOCAB = List.of( + public static final List TEST_CASE_VOCAB = List.of( DebertaV2Tokenizer.CLASS_TOKEN, DebertaV2Tokenizer.PAD_TOKEN, DebertaV2Tokenizer.SEPARATOR_TOKEN, @@ -48,7 +48,7 @@ public class DebertaV2TokenizerTests extends ESTestCase { "<0xAD>", "▁" ); - private static final List TEST_CASE_SCORES = List.of( + public static final List TEST_CASE_SCORES = List.of( 0.0, 0.0, 0.0, From edd9d96fdf7141840a6051ec99883e4769a13b29 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Wed, 27 Nov 2024 11:08:13 +1100 Subject: [PATCH 267/386] Add a blank line between java and javax imports (#117602) This PR updates java and javax imports layout in editconfig to be consistent with spotless --- .editorconfig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.editorconfig b/.editorconfig index cf4f74744d2b4..774fd201ef8d5 100644 --- a/.editorconfig +++ b/.editorconfig @@ -209,7 +209,7 @@ indent_size = 4 max_line_length = 140 ij_java_class_count_to_use_import_on_demand = 999 ij_java_names_count_to_use_import_on_demand = 999 -ij_java_imports_layout = *,|,com.**,|,org.**,|,java.**,javax.**,|,$* +ij_java_imports_layout = *,|,com.**,|,org.**,|,java.**,|,javax.**,|,$* [*.json] indent_size = 2 From c5d155ec2b7f60ca68a75be784e1eae90e5ddf2f Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 26 Nov 2024 16:17:40 -0800 Subject: [PATCH 268/386] Increase test cluster node startup timeout (#117603) --- .../elasticsearch/gradle/testclusters/ElasticsearchNode.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index 90162591cfcef..4cb67e249b0b0 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -98,7 +98,7 @@ public class ElasticsearchNode implements TestClusterConfiguration { private static final int ES_DESTROY_TIMEOUT = 20; private static final TimeUnit ES_DESTROY_TIMEOUT_UNIT = TimeUnit.SECONDS; - private static final int NODE_UP_TIMEOUT = 2; + private static final int NODE_UP_TIMEOUT = 3; private static final TimeUnit NODE_UP_TIMEOUT_UNIT = TimeUnit.MINUTES; private static final int ADDITIONAL_CONFIG_TIMEOUT = 15; private static final TimeUnit ADDITIONAL_CONFIG_TIMEOUT_UNIT = TimeUnit.SECONDS; From e7a9dcb180f9f12ccdf876eaa427b86ca873715d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 27 Nov 2024 16:48:12 +1100 Subject: [PATCH 269/386] Mute org.elasticsearch.repositories.s3.RepositoryS3ClientYamlTestSuiteIT org.elasticsearch.repositories.s3.RepositoryS3ClientYamlTestSuiteIT #117596 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index a54520fa66adf..c97e46375c597 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -246,6 +246,8 @@ tests: - class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests method: testStopWorksInMiddleOfProcessing issue: https://github.com/elastic/elasticsearch/issues/117591 +- class: org.elasticsearch.repositories.s3.RepositoryS3ClientYamlTestSuiteIT + issue: https://github.com/elastic/elasticsearch/issues/117596 # Examples: # From 1988bf10880749cef8a3d554c098eea4d8e4870b Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 27 Nov 2024 07:38:33 +0100 Subject: [PATCH 270/386] Add has_custom_cutoff_date to logsdb usage. (#117550) Indicates whether es.mapping.synthetic_source_fallback_to_stored_source.cutoff_date_restricted_override system property has been configured. A follow up from #116647 --- .../org/elasticsearch/TransportVersions.java | 2 + .../application/LogsDBFeatureSetUsage.java | 23 ++++++++-- .../logsdb/qa/with-custom-cutoff/build.gradle | 19 ++++++++ .../xpack/logsdb/LogsdbWithBasicRestIT.java | 45 +++++++++++++++++++ .../logsdb/LogsDBUsageTransportAction.java | 8 +++- .../logsdb/SyntheticSourceLicenseService.java | 5 +-- 6 files changed, 94 insertions(+), 8 deletions(-) create mode 100644 x-pack/plugin/logsdb/qa/with-custom-cutoff/build.gradle create mode 100644 x-pack/plugin/logsdb/qa/with-custom-cutoff/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 6567f48d6c232..dda7d7e5d4c4c 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -204,9 +204,11 @@ static TransportVersion def(int id) { public static final TransportVersion FAST_REFRESH_RCO_2 = def(8_795_00_0); public static final TransportVersion ESQL_ENRICH_RUNTIME_WARNINGS = def(8_796_00_0); public static final TransportVersion INGEST_PIPELINE_CONFIGURATION_AS_MAP = def(8_797_00_0); + public static final TransportVersion LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE_FIX_8_17 = def(8_797_00_1); public static final TransportVersion INDEXING_PRESSURE_THROTTLING_STATS = def(8_798_00_0); public static final TransportVersion REINDEX_DATA_STREAMS = def(8_799_00_0); public static final TransportVersion ESQL_REMOVE_NODE_LEVEL_PLAN = def(8_800_00_0); + public static final TransportVersion LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE = def(8_801_00_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/application/LogsDBFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/application/LogsDBFeatureSetUsage.java index 2758ef73a98da..b32e95c5fc9d8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/application/LogsDBFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/application/LogsDBFeatureSetUsage.java @@ -22,6 +22,7 @@ public final class LogsDBFeatureSetUsage extends XPackFeatureUsage { private final int indicesWithSyntheticSource; private final long numDocs; private final long sizeInBytes; + private final boolean hasCustomCutoffDate; public LogsDBFeatureSetUsage(StreamInput input) throws IOException { super(input); @@ -34,6 +35,13 @@ public LogsDBFeatureSetUsage(StreamInput input) throws IOException { numDocs = 0; sizeInBytes = 0; } + var transportVersion = input.getTransportVersion(); + if (transportVersion.isPatchFrom(TransportVersions.LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE_FIX_8_17) + || transportVersion.onOrAfter(TransportVersions.LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE)) { + hasCustomCutoffDate = input.readBoolean(); + } else { + hasCustomCutoffDate = false; + } } @Override @@ -45,6 +53,11 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVLong(numDocs); out.writeVLong(sizeInBytes); } + var transportVersion = out.getTransportVersion(); + if (transportVersion.isPatchFrom(TransportVersions.LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE_FIX_8_17) + || transportVersion.onOrAfter(TransportVersions.LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE)) { + out.writeBoolean(hasCustomCutoffDate); + } } public LogsDBFeatureSetUsage( @@ -53,13 +66,15 @@ public LogsDBFeatureSetUsage( int indicesCount, int indicesWithSyntheticSource, long numDocs, - long sizeInBytes + long sizeInBytes, + boolean hasCustomCutoffDate ) { super(XPackField.LOGSDB, available, enabled); this.indicesCount = indicesCount; this.indicesWithSyntheticSource = indicesWithSyntheticSource; this.numDocs = numDocs; this.sizeInBytes = sizeInBytes; + this.hasCustomCutoffDate = hasCustomCutoffDate; } @Override @@ -74,11 +89,12 @@ protected void innerXContent(XContentBuilder builder, Params params) throws IOEx builder.field("indices_with_synthetic_source", indicesWithSyntheticSource); builder.field("num_docs", numDocs); builder.field("size_in_bytes", sizeInBytes); + builder.field("has_custom_cutoff_date", hasCustomCutoffDate); } @Override public int hashCode() { - return Objects.hash(available, enabled, indicesCount, indicesWithSyntheticSource, numDocs, sizeInBytes); + return Objects.hash(available, enabled, indicesCount, indicesWithSyntheticSource, numDocs, sizeInBytes, hasCustomCutoffDate); } @Override @@ -95,6 +111,7 @@ public boolean equals(Object obj) { && Objects.equals(indicesCount, other.indicesCount) && Objects.equals(indicesWithSyntheticSource, other.indicesWithSyntheticSource) && Objects.equals(numDocs, other.numDocs) - && Objects.equals(sizeInBytes, other.sizeInBytes); + && Objects.equals(sizeInBytes, other.sizeInBytes) + && Objects.equals(hasCustomCutoffDate, other.hasCustomCutoffDate); } } diff --git a/x-pack/plugin/logsdb/qa/with-custom-cutoff/build.gradle b/x-pack/plugin/logsdb/qa/with-custom-cutoff/build.gradle new file mode 100644 index 0000000000000..9729ac9c29cef --- /dev/null +++ b/x-pack/plugin/logsdb/qa/with-custom-cutoff/build.gradle @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +apply plugin: 'elasticsearch.internal-java-rest-test' + +dependencies { + javaRestTestImplementation(testArtifact(project(xpackModule('core')))) +} + +tasks.named("javaRestTest").configure { + // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC + buildParams.withFipsEnabledOnly(it) + + usesDefaultDistribution() +} diff --git a/x-pack/plugin/logsdb/qa/with-custom-cutoff/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java b/x-pack/plugin/logsdb/qa/with-custom-cutoff/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java new file mode 100644 index 0000000000000..3266e2e6e4757 --- /dev/null +++ b/x-pack/plugin/logsdb/qa/with-custom-cutoff/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.hamcrest.Matchers; +import org.junit.ClassRule; + +import java.io.IOException; +import java.util.Map; + +public class LogsdbWithBasicRestIT extends ESRestTestCase { + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .systemProperty("es.mapping.synthetic_source_fallback_to_stored_source.cutoff_date_restricted_override", "2027-12-31T23:59") + .setting("xpack.security.enabled", "false") + .setting("cluster.logsdb.enabled", "true") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + public void testCustomCutoffDateUsage() throws IOException { + var response = getAsMap("/_xpack/usage"); + Map usage = (Map) response.get("logsdb"); + assertThat(usage, Matchers.hasEntry("available", true)); + assertThat(usage, Matchers.hasEntry("enabled", true)); + assertThat(usage, Matchers.hasEntry("indices_count", 0)); + assertThat(usage, Matchers.hasEntry("indices_with_synthetic_source", 0)); + assertThat(usage, Matchers.hasEntry("num_docs", 0)); + assertThat(usage, Matchers.hasEntry("size_in_bytes", 0)); + assertThat(usage, Matchers.hasEntry("has_custom_cutoff_date", true)); + } +} diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBUsageTransportAction.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBUsageTransportAction.java index 62e1eef3e0e97..f4fa2a29d79a0 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBUsageTransportAction.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBUsageTransportAction.java @@ -77,6 +77,7 @@ protected void masterOperation( } } final boolean enabled = LogsDBPlugin.CLUSTER_LOGSDB_ENABLED.get(clusterService.getSettings()); + final boolean hasCustomCutoffDate = System.getProperty(SyntheticSourceLicenseService.CUTOFF_DATE_SYS_PROP_NAME) != null; if (featureService.clusterHasFeature(state, XPackFeatures.LOGSDB_TELMETRY_STATS)) { final DiscoveryNode[] nodes = state.nodes().getDataNodes().values().toArray(DiscoveryNode[]::new); final var statsRequest = new IndexModeStatsActionType.StatsRequest(nodes); @@ -91,13 +92,16 @@ protected void masterOperation( finalNumIndices, finalNumIndicesWithSyntheticSources, indexStats.numDocs(), - indexStats.numBytes() + indexStats.numBytes(), + hasCustomCutoffDate ) ); })); } else { listener.onResponse( - new XPackUsageFeatureResponse(new LogsDBFeatureSetUsage(true, enabled, numIndices, numIndicesWithSyntheticSources, 0L, 0L)) + new XPackUsageFeatureResponse( + new LogsDBFeatureSetUsage(true, enabled, numIndices, numIndicesWithSyntheticSources, 0L, 0L, hasCustomCutoffDate) + ) ); } } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java index 1b3513f15a86a..71de2f7909835 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java @@ -27,8 +27,7 @@ final class SyntheticSourceLicenseService { static final String MAPPINGS_FEATURE_FAMILY = "mappings"; // You can only override this property if you received explicit approval from Elastic. - private static final String CUTOFF_DATE_SYS_PROP_NAME = - "es.mapping.synthetic_source_fallback_to_stored_source.cutoff_date_restricted_override"; + static final String CUTOFF_DATE_SYS_PROP_NAME = "es.mapping.synthetic_source_fallback_to_stored_source.cutoff_date_restricted_override"; private static final Logger LOGGER = LogManager.getLogger(SyntheticSourceLicenseService.class); static final long DEFAULT_CUTOFF_DATE = LocalDateTime.of(2024, 12, 12, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); @@ -129,7 +128,7 @@ private static long getCutoffDate(String cutoffDateAsString) { LOGGER.info( "Configuring [{}] to [{}]", CUTOFF_DATE_SYS_PROP_NAME, - LocalDateTime.ofInstant(Instant.ofEpochSecond(cutoffDate), ZoneOffset.UTC) + LocalDateTime.ofInstant(Instant.ofEpochMilli(cutoffDate), ZoneOffset.UTC) ); return cutoffDate; } else { From ef8ffc5ada043b1f71052cdd919b5ee419472c1a Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Wed, 27 Nov 2024 08:13:43 +0100 Subject: [PATCH 271/386] Update docker.elastic.co/wolfi/chainguard-base:latest Docker digest to 32f06b1 (#117564) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- .../main/java/org/elasticsearch/gradle/internal/DockerBase.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index 71e968557cefe..0fb75b59b6096 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -22,7 +22,7 @@ public enum DockerBase { // Chainguard based wolfi image with latest jdk // This is usually updated via renovatebot // spotless:off - WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:55b297da5151d2a2997e8ab9729fe1304e4869389d7090ab7031cc29530f69f8", + WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:32f06b169bb4b0f257fbb10e8c8379f06d3ee1355c89b3327cb623781a29590e", "-wolfi", "apk" ), From 6130fbb0ea012b29f94a62df1d39abfcda247555 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 27 Nov 2024 08:17:42 +0000 Subject: [PATCH 272/386] Implement lifecycle on `SimulatePipelineRequest` (#117585) Rather than releasing the REST request body after computing the response, we can link the lifecycles of the REST and transport requests and release the REST request body sooner. Not that we expect these bodies to be particularly large in this case, but still it's a better pattern to follow. --- .../ingest/geoip/GeoIpDownloaderIT.java | 4 +-- .../elasticsearch/ingest/IngestClientIT.java | 3 +- .../ingest/SimulatePipelineRequest.java | 31 ++++++++++++++++--- .../SimulatePipelineRequestBuilder.java | 3 +- .../ingest/RestSimulatePipelineAction.java | 8 ++--- .../ingest/SimulatePipelineRequestTests.java | 9 ++---- .../ingest/IngestPipelineTestUtils.java | 16 ++++++++++ .../xpack/enrich/EnrichProcessorIT.java | 11 +++---- .../license/MachineLearningLicensingIT.java | 18 +++-------- .../TransportPreviewTransformAction.java | 6 +++- 10 files changed, 68 insertions(+), 41 deletions(-) diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java index f8c8d2bd359f3..dd177fed5732a 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java @@ -41,7 +41,6 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; import org.junit.After; @@ -67,6 +66,7 @@ import java.util.zip.GZIPInputStream; import static org.elasticsearch.ingest.ConfigurationUtils.readStringProperty; +import static org.elasticsearch.ingest.IngestPipelineTestUtils.jsonSimulatePipelineRequest; import static org.elasticsearch.ingest.geoip.GeoIpTestUtils.copyDefaultDatabases; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; @@ -494,7 +494,7 @@ private SimulateDocumentBaseResult simulatePipeline() throws IOException { builder.endObject(); bytes = BytesReference.bytes(builder); } - SimulatePipelineRequest simulateRequest = new SimulatePipelineRequest(bytes, XContentType.JSON); + SimulatePipelineRequest simulateRequest = jsonSimulatePipelineRequest(bytes); simulateRequest.setId("_id"); // Avoid executing on a coordinating only node, because databases are not available there and geoip processor won't do any lookups. // (some test seeds repeatedly hit such nodes causing failures) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestClientIT.java b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestClientIT.java index c25ce822f8755..81a39dbe1f9f7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestClientIT.java @@ -37,6 +37,7 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.ingest.IngestPipelineTestUtils.jsonSimulatePipelineRequest; import static org.elasticsearch.ingest.IngestPipelineTestUtils.putJsonPipelineRequest; import static org.elasticsearch.test.NodeRoles.nonIngestNode; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -97,7 +98,7 @@ public void testSimulate() throws Exception { if (randomBoolean()) { response = clusterAdmin().prepareSimulatePipeline(bytes, XContentType.JSON).setId("_id").get(); } else { - SimulatePipelineRequest request = new SimulatePipelineRequest(bytes, XContentType.JSON); + SimulatePipelineRequest request = jsonSimulatePipelineRequest(bytes); request.setId("_id"); response = clusterAdmin().simulatePipeline(request).get(); } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java index 9cfc441490859..d6a2d81fdb7d3 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; @@ -41,19 +42,20 @@ public class SimulatePipelineRequest extends ActionRequest implements ToXContent private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(SimulatePipelineRequest.class); private String id; private boolean verbose; - private final BytesReference source; + private final ReleasableBytesReference source; private final XContentType xContentType; private RestApiVersion restApiVersion; /** * Creates a new request with the given source and its content type */ - public SimulatePipelineRequest(BytesReference source, XContentType xContentType) { + public SimulatePipelineRequest(ReleasableBytesReference source, XContentType xContentType) { this(source, xContentType, RestApiVersion.current()); } - public SimulatePipelineRequest(BytesReference source, XContentType xContentType, RestApiVersion restApiVersion) { + public SimulatePipelineRequest(ReleasableBytesReference source, XContentType xContentType, RestApiVersion restApiVersion) { this.source = Objects.requireNonNull(source); + assert source.hasReferences(); this.xContentType = Objects.requireNonNull(xContentType); this.restApiVersion = restApiVersion; } @@ -62,7 +64,7 @@ public SimulatePipelineRequest(BytesReference source, XContentType xContentType, super(in); id = in.readOptionalString(); verbose = in.readBoolean(); - source = in.readBytesReference(); + source = in.readReleasableBytesReference(); xContentType = in.readEnum(XContentType.class); } @@ -88,6 +90,7 @@ public void setVerbose(boolean verbose) { } public BytesReference getSource() { + assert source.hasReferences(); return source; } @@ -250,4 +253,24 @@ private static List parseDocs(Map config, RestAp public RestApiVersion getRestApiVersion() { return restApiVersion; } + + @Override + public final void incRef() { + source.incRef(); + } + + @Override + public final boolean tryIncRef() { + return source.tryIncRef(); + } + + @Override + public final boolean decRef() { + return source.decRef(); + } + + @Override + public final boolean hasReferences() { + return source.hasReferences(); + } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequestBuilder.java index 05e30685c6a9b..931b86d15e24b 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequestBuilder.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.xcontent.XContentType; public class SimulatePipelineRequestBuilder extends ActionRequestBuilder { @@ -20,7 +21,7 @@ public class SimulatePipelineRequestBuilder extends ActionRequestBuilder sourceTuple = restRequest.contentOrSourceParam(); - var content = sourceTuple.v2(); - SimulatePipelineRequest request = new SimulatePipelineRequest(sourceTuple.v2(), sourceTuple.v1(), restRequest.getRestApiVersion()); + final var request = new SimulatePipelineRequest(sourceTuple.v2(), sourceTuple.v1(), restRequest.getRestApiVersion()); request.setId(restRequest.param("id")); request.setVerbose(restRequest.paramAsBoolean("verbose", false)); - return channel -> client.admin() - .cluster() - .simulatePipeline(request, ActionListener.withRef(new RestToXContentListener<>(channel), content)); + return channel -> client.admin().cluster().simulatePipeline(request, new RestToXContentListener<>(channel)); } } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java index 58ff9ec421889..983c2e7d65032 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java @@ -16,14 +16,14 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; -import java.nio.charset.StandardCharsets; +import static org.elasticsearch.ingest.IngestPipelineTestUtils.jsonSimulatePipelineRequest; import static org.hamcrest.CoreMatchers.equalTo; public class SimulatePipelineRequestTests extends ESTestCase { public void testSerialization() throws IOException { - SimulatePipelineRequest request = new SimulatePipelineRequest(new BytesArray(""), XContentType.JSON); + SimulatePipelineRequest request = jsonSimulatePipelineRequest(new BytesArray("")); // Sometimes we set an id if (randomBoolean()) { request.setId(randomAlphaOfLengthBetween(1, 10)); @@ -44,10 +44,7 @@ public void testSerialization() throws IOException { } public void testSerializationWithXContent() throws IOException { - SimulatePipelineRequest request = new SimulatePipelineRequest( - new BytesArray("{}".getBytes(StandardCharsets.UTF_8)), - XContentType.JSON - ); + SimulatePipelineRequest request = jsonSimulatePipelineRequest("{}"); assertEquals(XContentType.JSON, request.getXContentType()); BytesStreamOutput output = new BytesStreamOutput(); diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/IngestPipelineTestUtils.java b/test/framework/src/main/java/org/elasticsearch/ingest/IngestPipelineTestUtils.java index 8fd3c61d4c9da..9888b1eb661ff 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/IngestPipelineTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/IngestPipelineTestUtils.java @@ -14,11 +14,13 @@ import org.elasticsearch.action.ingest.DeletePipelineTransportAction; import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.ingest.PutPipelineTransportAction; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.ESTestCase; @@ -124,4 +126,18 @@ public void onFailure(Exception e) { ); } } + + /** + * Construct a new {@link SimulatePipelineRequest} whose content is the given JSON document, represented as a {@link String}. + */ + public static SimulatePipelineRequest jsonSimulatePipelineRequest(String jsonString) { + return jsonSimulatePipelineRequest(new BytesArray(jsonString)); + } + + /** + * Construct a new {@link SimulatePipelineRequest} whose content is the given JSON document, represented as a {@link BytesReference}. + */ + public static SimulatePipelineRequest jsonSimulatePipelineRequest(BytesReference jsonBytes) { + return new SimulatePipelineRequest(ReleasableBytesReference.wrap(jsonBytes), XContentType.JSON); + } } diff --git a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichProcessorIT.java b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichProcessorIT.java index d646aed11d7d9..5fc16034465d4 100644 --- a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichProcessorIT.java +++ b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichProcessorIT.java @@ -9,9 +9,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.ingest.SimulateDocumentBaseResult; -import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.common.IngestCommonPlugin; import org.elasticsearch.plugins.Plugin; @@ -27,6 +25,7 @@ import java.util.Collection; import java.util.List; +import static org.elasticsearch.ingest.IngestPipelineTestUtils.jsonSimulatePipelineRequest; import static org.elasticsearch.xpack.enrich.AbstractEnrichTestCase.createSourceIndices; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; @@ -90,7 +89,7 @@ public void testEnrichCacheValuesCannotBeCorrupted() { var executePolicyRequest = new ExecuteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, policyName); client().execute(ExecuteEnrichPolicyAction.INSTANCE, executePolicyRequest).actionGet(); - var simulatePipelineRequest = new SimulatePipelineRequest(new BytesArray(""" + var simulatePipelineRequest = jsonSimulatePipelineRequest(""" { "pipeline": { "processors": [ @@ -119,7 +118,7 @@ public void testEnrichCacheValuesCannotBeCorrupted() { } ] } - """), XContentType.JSON); + """); var response = clusterAdmin().simulatePipeline(simulatePipelineRequest).actionGet(); var result = (SimulateDocumentBaseResult) response.getResults().get(0); assertThat(result.getFailure(), nullValue()); @@ -132,7 +131,7 @@ public void testEnrichCacheValuesCannotBeCorrupted() { assertThat(statsResponse.getCacheStats().get(0).misses(), equalTo(1L)); assertThat(statsResponse.getCacheStats().get(0).hits(), equalTo(0L)); - simulatePipelineRequest = new SimulatePipelineRequest(new BytesArray(""" + simulatePipelineRequest = jsonSimulatePipelineRequest(""" { "pipeline": { "processors": [ @@ -155,7 +154,7 @@ public void testEnrichCacheValuesCannotBeCorrupted() { } ] } - """), XContentType.JSON); + """); response = clusterAdmin().simulatePipeline(simulatePipelineRequest).actionGet(); result = (SimulateDocumentBaseResult) response.getResults().get(0); assertThat(result.getFailure(), nullValue()); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/license/MachineLearningLicensingIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/license/MachineLearningLicensingIT.java index 08d09f70cb46b..479fb20650b18 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/license/MachineLearningLicensingIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/license/MachineLearningLicensingIT.java @@ -11,14 +11,12 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.ingest.SimulateDocumentBaseResult; import org.elasticsearch.action.ingest.SimulatePipelineAction; -import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; @@ -61,13 +59,13 @@ import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; import org.junit.Before; -import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; +import static org.elasticsearch.ingest.IngestPipelineTestUtils.jsonSimulatePipelineRequest; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasItem; @@ -541,11 +539,7 @@ public void testMachineLearningCreateInferenceProcessorRestricted() { }}] }""", pipeline); PlainActionFuture simulatePipelineListener = new PlainActionFuture<>(); - client().execute( - SimulatePipelineAction.INSTANCE, - new SimulatePipelineRequest(new BytesArray(simulateSource.getBytes(StandardCharsets.UTF_8)), XContentType.JSON), - simulatePipelineListener - ); + client().execute(SimulatePipelineAction.INSTANCE, jsonSimulatePipelineRequest(simulateSource), simulatePipelineListener); assertThat(simulatePipelineListener.actionGet().getResults(), is(not(empty()))); @@ -575,7 +569,7 @@ public void testMachineLearningCreateInferenceProcessorRestricted() { // Simulating the pipeline should fail SimulateDocumentBaseResult simulateResponse = (SimulateDocumentBaseResult) client().execute( SimulatePipelineAction.INSTANCE, - new SimulatePipelineRequest(new BytesArray(simulateSource.getBytes(StandardCharsets.UTF_8)), XContentType.JSON) + jsonSimulatePipelineRequest(simulateSource) ).actionGet().getResults().get(0); assertThat(simulateResponse.getFailure(), is(not(nullValue()))); assertThat((simulateResponse.getFailure()).getCause(), is(instanceOf(ElasticsearchSecurityException.class))); @@ -588,11 +582,7 @@ public void testMachineLearningCreateInferenceProcessorRestricted() { putJsonPipeline("test_infer_license_pipeline", pipeline); PlainActionFuture simulatePipelineListenerNewLicense = new PlainActionFuture<>(); - client().execute( - SimulatePipelineAction.INSTANCE, - new SimulatePipelineRequest(new BytesArray(simulateSource.getBytes(StandardCharsets.UTF_8)), XContentType.JSON), - simulatePipelineListenerNewLicense - ); + client().execute(SimulatePipelineAction.INSTANCE, jsonSimulatePipelineRequest(simulateSource), simulatePipelineListenerNewLicense); assertThat(simulatePipelineListenerNewLicense.actionGet().getResults(), is(not(empty()))); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java index 36237d2705205..60f00da195974 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; @@ -282,7 +283,10 @@ private void getPreview( builder.startObject(); builder.field("docs", results); builder.endObject(); - var pipelineRequest = new SimulatePipelineRequest(BytesReference.bytes(builder), XContentType.JSON); + var pipelineRequest = new SimulatePipelineRequest( + ReleasableBytesReference.wrap(BytesReference.bytes(builder)), + XContentType.JSON + ); pipelineRequest.setId(pipeline); parentTaskClient.execute(SimulatePipelineAction.INSTANCE, pipelineRequest, pipelineResponseActionListener); } From c11e3c22991d39a95b71e992024d80d8eb677419 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 27 Nov 2024 08:18:54 +0000 Subject: [PATCH 273/386] Log shard `completed snapshot` message at `TRACE` (#117569) This message is on the happy path, no need to log it at `DEBUG`. Relates ES-8773 --- .../org/elasticsearch/snapshots/SnapshotShardsService.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index 7b2066f243771..234c0239a68ce 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -425,9 +425,9 @@ public void onResponse(ShardSnapshotResult shardSnapshotResult) { final ShardGeneration newGeneration = shardSnapshotResult.getGeneration(); assert newGeneration != null; assert newGeneration.equals(snapshotStatus.generation()); - if (logger.isDebugEnabled()) { + if (logger.isTraceEnabled()) { final IndexShardSnapshotStatus.Copy lastSnapshotStatus = snapshotStatus.asCopy(); - logger.debug( + logger.trace( "[{}][{}] completed snapshot to [{}] with status [{}] at generation [{}]", shardId, snapshot, From 04dd9c22dae13e7a5ab67e8c3ea4b8228784f21a Mon Sep 17 00:00:00 2001 From: Iraklis Psaroudakis Date: Wed, 27 Nov 2024 12:10:22 +0200 Subject: [PATCH 274/386] Make fast refresh ineffective for search routing (#117455) Re-introduction of ES PR #114619. Now, fast refresh indices route searches/gets to search shards in stateless. Thus, this PR removes unnecessary code and simplifies some things. Relates ES-9563 --- ...ansportUnpromotableShardRefreshAction.java | 15 --------- .../action/get/TransportGetAction.java | 12 +++---- .../get/TransportShardMultiGetAction.java | 13 +++----- .../cluster/routing/IndexRoutingTable.java | 2 +- .../cluster/routing/OperationRouting.java | 19 +---------- .../cluster/routing/ShardRouting.java | 3 +- .../routing/IndexRoutingTableTests.java | 33 +++---------------- 7 files changed, 15 insertions(+), 82 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java index 4458c008babcd..6c24ec2d17604 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java @@ -24,9 +24,6 @@ import java.util.List; -import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO_2; -import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; - public class TransportUnpromotableShardRefreshAction extends TransportBroadcastUnpromotableAction< UnpromotableShardRefreshRequest, ActionResponse.Empty> { @@ -76,18 +73,6 @@ protected void unpromotableShardOperation( return; } - // During an upgrade to FAST_REFRESH_RCO_2, we expect search shards to be first upgraded before the primary is upgraded. Thus, - // when the primary is upgraded, and starts to deliver unpromotable refreshes, we expect the search shards to be upgraded already. - // Note that the fast refresh setting is final. - // TODO: remove assertion (ES-9563) - assert INDEX_FAST_REFRESH_SETTING.get(shard.indexSettings().getSettings()) == false - || transportService.getLocalNodeConnection().getTransportVersion().onOrAfter(FAST_REFRESH_RCO_2) - : "attempted to refresh a fast refresh search shard " - + shard - + " on transport version " - + transportService.getLocalNodeConnection().getTransportVersion() - + " (before FAST_REFRESH_RCO_2)"; - ActionListener.run(responseListener, listener -> { shard.waitForPrimaryTermAndGeneration( request.getPrimaryTerm(), diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index fb4b3907d2bfd..a2c7c8664e81a 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -28,9 +28,9 @@ import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.routing.PlainShardIterator; import org.elasticsearch.cluster.routing.ShardIterator; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.TimeValue; @@ -109,10 +109,7 @@ protected ShardIterator shards(ClusterState state, InternalRequest request) { if (iterator == null) { return null; } - return new PlainShardIterator( - iterator.shardId(), - iterator.getShardRoutings().stream().filter(shardRouting -> OperationRouting.canSearchShard(shardRouting, state)).toList() - ); + return new PlainShardIterator(iterator.shardId(), iterator.getShardRoutings().stream().filter(ShardRouting::isSearchable).toList()); } @Override @@ -129,9 +126,8 @@ protected void asyncShardOperation(GetRequest request, ShardId shardId, ActionLi handleGetOnUnpromotableShard(request, indexShard, listener); return; } - // TODO: adapt assertion to assert only that it is not stateless (ES-9563) - assert DiscoveryNode.isStateless(clusterService.getSettings()) == false || indexShard.indexSettings().isFastRefresh() - : "in Stateless a promotable to primary shard can receive a TransportGetAction only if an index has the fast refresh setting"; + assert DiscoveryNode.isStateless(clusterService.getSettings()) == false + : "in Stateless a promotable to primary shard should not receive a TransportGetAction"; if (request.realtime()) { // we are not tied to a refresh cycle here anyway asyncGet(request, shardId, listener); } else { diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index 93e1b18ec64c6..0fa770df8e4ef 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -28,9 +28,9 @@ import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.routing.PlainShardIterator; import org.elasticsearch.cluster.routing.ShardIterator; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.TimeValue; @@ -113,10 +113,7 @@ protected ShardIterator shards(ClusterState state, InternalRequest request) { if (iterator == null) { return null; } - return new PlainShardIterator( - iterator.shardId(), - iterator.getShardRoutings().stream().filter(shardRouting -> OperationRouting.canSearchShard(shardRouting, state)).toList() - ); + return new PlainShardIterator(iterator.shardId(), iterator.getShardRoutings().stream().filter(ShardRouting::isSearchable).toList()); } @Override @@ -128,10 +125,8 @@ protected void asyncShardOperation(MultiGetShardRequest request, ShardId shardId handleMultiGetOnUnpromotableShard(request, indexShard, listener); return; } - // TODO: adapt assertion to assert only that it is not stateless (ES-9563) - assert DiscoveryNode.isStateless(clusterService.getSettings()) == false || indexShard.indexSettings().isFastRefresh() - : "in Stateless a promotable to primary shard can receive a TransportShardMultiGetAction only if an index has " - + "the fast refresh setting"; + assert DiscoveryNode.isStateless(clusterService.getSettings()) == false + : "in Stateless a promotable to primary shard should not receive a TransportShardMultiGetAction"; if (request.realtime()) { // we are not tied to a refresh cycle here anyway asyncShardMultiGet(request, shardId, listener); } else { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java index 7cb0e457e36c7..bcacf21fcedbf 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java @@ -241,7 +241,7 @@ public boolean readyForSearch(ClusterState clusterState) { boolean found = false; for (int idx = 0; idx < shardRoutingTable.size(); idx++) { ShardRouting shardRouting = shardRoutingTable.shard(idx); - if (shardRouting.active() && OperationRouting.canSearchShard(shardRouting, clusterState)) { + if (shardRouting.active() && shardRouting.isSearchable()) { found = true; break; } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java index 13fc874f52e9f..5e2dbf1c5df5d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java @@ -32,9 +32,6 @@ import java.util.Set; import java.util.stream.Collectors; -import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO_2; -import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; - public class OperationRouting { public static final Setting USE_ADAPTIVE_REPLICA_SELECTION_SETTING = Setting.boolSetting( @@ -151,7 +148,7 @@ private static List statefulShardsThatHandleSearches(ShardIterator } private static List statelessShardsThatHandleSearches(ClusterState clusterState, ShardIterator iterator) { - return iterator.getShardRoutings().stream().filter(shardRouting -> canSearchShard(shardRouting, clusterState)).toList(); + return iterator.getShardRoutings().stream().filter(ShardRouting::isSearchable).toList(); } public static ShardIterator getShards(ClusterState clusterState, ShardId shardId) { @@ -304,18 +301,4 @@ public ShardId shardId(ClusterState clusterState, String index, String id, @Null IndexMetadata indexMetadata = indexMetadata(clusterState, index); return new ShardId(indexMetadata.getIndex(), IndexRouting.fromIndexMetadata(indexMetadata).getShard(id, routing)); } - - public static boolean canSearchShard(ShardRouting shardRouting, ClusterState clusterState) { - // TODO: remove if and always return isSearchable (ES-9563) - if (INDEX_FAST_REFRESH_SETTING.get(clusterState.metadata().index(shardRouting.index()).getSettings())) { - // Until all the cluster is upgraded, we send searches/gets to the primary (even if it has been upgraded) to execute locally. - if (clusterState.getMinTransportVersion().onOrAfter(FAST_REFRESH_RCO_2)) { - return shardRouting.isSearchable(); - } else { - return shardRouting.isPromotableToPrimary(); - } - } else { - return shardRouting.isSearchable(); - } - } } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java index 319786b558ddd..157d28e61057c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java @@ -935,8 +935,7 @@ public boolean isPromotableToPrimary() { } /** - * Determine if role searchable. Consumers should prefer {@link OperationRouting#canSearchShard(ShardRouting, ClusterState)} to - * determine if a shard can be searched and {@link IndexRoutingTable#readyForSearch(ClusterState)} to determine if an index + * Determine if role searchable. Consumers should prefer {@link IndexRoutingTable#readyForSearch(ClusterState)} to determine if an index * is ready to be searched. */ public boolean isSearchable() { diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java index e5786b1b3449e..912326162e5c4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.cluster.routing; -import org.elasticsearch.TransportVersion; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; @@ -20,7 +19,6 @@ import java.util.List; -import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO_2; import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; @@ -29,21 +27,10 @@ public class IndexRoutingTableTests extends ESTestCase { public void testReadyForSearch() { - innerReadyForSearch(false, false); - innerReadyForSearch(false, true); - innerReadyForSearch(true, false); - innerReadyForSearch(true, true); - } - - // TODO: remove if (fastRefresh && beforeFastRefreshRCO) branches (ES-9563) - private void innerReadyForSearch(boolean fastRefresh, boolean beforeFastRefreshRCO) { Index index = new Index(randomIdentifier(), UUIDs.randomBase64UUID()); ClusterState clusterState = mock(ClusterState.class, Mockito.RETURNS_DEEP_STUBS); when(clusterState.metadata().index(any(Index.class)).getSettings()).thenReturn( - Settings.builder().put(INDEX_FAST_REFRESH_SETTING.getKey(), fastRefresh).build() - ); - when(clusterState.getMinTransportVersion()).thenReturn( - beforeFastRefreshRCO ? TransportVersion.fromId(FAST_REFRESH_RCO_2.id() - 1_00_0) : TransportVersion.current() + Settings.builder().put(INDEX_FAST_REFRESH_SETTING.getKey(), randomBoolean()).build() ); // 2 primaries that are search and index ShardId p1 = new ShardId(index, 0); @@ -63,11 +50,7 @@ private void innerReadyForSearch(boolean fastRefresh, boolean beforeFastRefreshR shardTable1 = new IndexShardRoutingTable(p1, List.of(getShard(p1, true, ShardRoutingState.STARTED, ShardRouting.Role.INDEX_ONLY))); shardTable2 = new IndexShardRoutingTable(p2, List.of(getShard(p2, true, ShardRoutingState.STARTED, ShardRouting.Role.INDEX_ONLY))); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh && beforeFastRefreshRCO) { - assertTrue(indexRoutingTable.readyForSearch(clusterState)); - } else { - assertFalse(indexRoutingTable.readyForSearch(clusterState)); - } + assertFalse(indexRoutingTable.readyForSearch(clusterState)); // 2 unassigned primaries that are index only shardTable1 = new IndexShardRoutingTable( @@ -99,11 +82,7 @@ private void innerReadyForSearch(boolean fastRefresh, boolean beforeFastRefreshR ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh && beforeFastRefreshRCO) { - assertTrue(indexRoutingTable.readyForSearch(clusterState)); - } else { - assertFalse(indexRoutingTable.readyForSearch(clusterState)); - } + assertFalse(indexRoutingTable.readyForSearch(clusterState)); // 2 primaries that are index only with some replicas that are all available shardTable1 = new IndexShardRoutingTable( @@ -143,11 +122,7 @@ private void innerReadyForSearch(boolean fastRefresh, boolean beforeFastRefreshR ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh && beforeFastRefreshRCO) { - assertFalse(indexRoutingTable.readyForSearch(clusterState)); - } else { - assertTrue(indexRoutingTable.readyForSearch(clusterState)); - } + assertTrue(indexRoutingTable.readyForSearch(clusterState)); // 2 primaries that are index only with at least 1 replica per primary that is available shardTable1 = new IndexShardRoutingTable( From d7737e73065dd30da18c409616d242ee7f30ff3e Mon Sep 17 00:00:00 2001 From: Shamil Date: Wed, 27 Nov 2024 13:17:34 +0300 Subject: [PATCH 275/386] [ML] Remove ChunkingOptions parameter (#117235) --- docs/changelog/117235.yaml | 5 +++++ .../inference/ChunkingOptions.java | 19 ------------------- .../inference/InferenceService.java | 6 ------ .../TestDenseInferenceServiceExtension.java | 2 -- .../mock/TestRerankingServiceExtension.java | 2 -- .../TestSparseInferenceServiceExtension.java | 2 -- ...stStreamingCompletionServiceExtension.java | 2 -- .../ShardBulkInferenceActionFilter.java | 12 +----------- .../inference/services/SenderService.java | 5 +---- .../AlibabaCloudSearchService.java | 2 -- .../amazonbedrock/AmazonBedrockService.java | 2 -- .../services/anthropic/AnthropicService.java | 2 -- .../azureaistudio/AzureAiStudioService.java | 2 -- .../azureopenai/AzureOpenAiService.java | 2 -- .../services/cohere/CohereService.java | 2 -- .../elastic/ElasticInferenceService.java | 2 -- .../ElasticsearchInternalService.java | 5 +---- .../googleaistudio/GoogleAiStudioService.java | 2 -- .../googlevertexai/GoogleVertexAiService.java | 2 -- .../huggingface/HuggingFaceService.java | 2 -- .../elser/HuggingFaceElserService.java | 2 -- .../ibmwatsonx/IbmWatsonxService.java | 2 -- .../services/mistral/MistralService.java | 2 -- .../services/openai/OpenAiService.java | 2 -- .../ShardBulkInferenceActionFilterTests.java | 4 ++-- .../services/SenderServiceTests.java | 2 -- .../AlibabaCloudSearchServiceTests.java | 13 +------------ .../AmazonBedrockServiceTests.java | 2 -- .../AzureAiStudioServiceTests.java | 2 -- .../azureopenai/AzureOpenAiServiceTests.java | 2 -- .../services/cohere/CohereServiceTests.java | 3 --- .../elastic/ElasticInferenceServiceTests.java | 2 -- .../ElasticsearchInternalServiceTests.java | 8 -------- .../GoogleAiStudioServiceTests.java | 12 +----------- .../HuggingFaceElserServiceTests.java | 2 -- .../huggingface/HuggingFaceServiceTests.java | 3 --- .../ibmwatsonx/IbmWatsonxServiceTests.java | 12 +----------- .../services/mistral/MistralServiceTests.java | 2 -- .../services/openai/OpenAiServiceTests.java | 2 -- 39 files changed, 13 insertions(+), 146 deletions(-) create mode 100644 docs/changelog/117235.yaml delete mode 100644 server/src/main/java/org/elasticsearch/inference/ChunkingOptions.java diff --git a/docs/changelog/117235.yaml b/docs/changelog/117235.yaml new file mode 100644 index 0000000000000..dbf0b4cc18388 --- /dev/null +++ b/docs/changelog/117235.yaml @@ -0,0 +1,5 @@ +pr: 117235 +summary: "Deprecate `ChunkingOptions` parameter" +area: ES|QL +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/inference/ChunkingOptions.java b/server/src/main/java/org/elasticsearch/inference/ChunkingOptions.java deleted file mode 100644 index 5953e2cb44ebf..0000000000000 --- a/server/src/main/java/org/elasticsearch/inference/ChunkingOptions.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.inference; - -import org.elasticsearch.core.Nullable; - -public record ChunkingOptions(@Nullable Integer windowSize, @Nullable Integer span) { - - public boolean settingsArePresent() { - return windowSize != null || span != null; - } -} diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index c6e09f61befa0..4497254aad1f0 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -112,16 +112,11 @@ void infer( ); /** - * Chunk long text according to {@code chunkingOptions} or the - * model defaults if {@code chunkingOptions} contains unset - * values. - * * @param model The model * @param query Inference query, mainly for re-ranking * @param input Inference input * @param taskSettings Settings in the request to override the model's defaults * @param inputType For search, ingest etc - * @param chunkingOptions The window and span options to apply * @param timeout The timeout for the request * @param listener Chunked Inference result listener */ @@ -131,7 +126,6 @@ void chunkedInfer( List input, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ); diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java index 2ddc4f6c3e2f6..ae11a02d312e2 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java @@ -18,7 +18,6 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceExtension; @@ -140,7 +139,6 @@ public void chunkedInfer( List input, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestRerankingServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestRerankingServiceExtension.java index 2075c1b1924bf..9320571572f0a 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestRerankingServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestRerankingServiceExtension.java @@ -17,7 +17,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceExtension; @@ -128,7 +127,6 @@ public void chunkedInfer( List input, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java index 3d6f0ce6eba05..fe0223cce0323 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java @@ -17,7 +17,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceExtension; @@ -131,7 +130,6 @@ public void chunkedInfer( List input, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestStreamingCompletionServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestStreamingCompletionServiceExtension.java index 595b92a6be66b..6d7983bc8cb53 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestStreamingCompletionServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestStreamingCompletionServiceExtension.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceExtension; @@ -160,7 +159,6 @@ public void chunkedInfer( List input, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java index dd59230e575c4..d178e927aa65d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java @@ -30,7 +30,6 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.inference.InputType; @@ -337,16 +336,7 @@ private void onFinish() { } }; inferenceProvider.service() - .chunkedInfer( - inferenceProvider.model(), - null, - inputs, - Map.of(), - InputType.INGEST, - new ChunkingOptions(null, null), - TimeValue.MAX_VALUE, - completionListener - ); + .chunkedInfer(inferenceProvider.model(), null, inputs, Map.of(), InputType.INGEST, TimeValue.MAX_VALUE, completionListener); } private FieldInferenceResponseAccumulator ensureResponseAccumulatorSlot(int id) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java index b8a99227cf517..8e2dac1ef9db2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java @@ -12,7 +12,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.InputType; @@ -76,13 +75,12 @@ public void chunkedInfer( List input, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { init(); // a non-null query is not supported and is dropped by all providers - doChunkedInfer(model, new DocumentsOnlyInput(input), taskSettings, inputType, chunkingOptions, timeout, listener); + doChunkedInfer(model, new DocumentsOnlyInput(input), taskSettings, inputType, timeout, listener); } protected abstract void doInfer( @@ -99,7 +97,6 @@ protected abstract void doChunkedInfer( DocumentsOnlyInput inputs, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java index 6d77663f49ece..d7ac7caed7efc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java @@ -15,7 +15,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; @@ -289,7 +288,6 @@ protected void doChunkedInfer( DocumentsOnlyInput inputs, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java index a69b9d2c70405..48b3c3df03e11 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java @@ -17,7 +17,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; @@ -114,7 +113,6 @@ protected void doChunkedInfer( DocumentsOnlyInput inputs, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicService.java index eba7353f2b12e..b3d503de8e3eb 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicService.java @@ -15,7 +15,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; @@ -220,7 +219,6 @@ protected void doChunkedInfer( DocumentsOnlyInput inputs, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java index a2f8dc409585e..bba331fc0b5df 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java @@ -16,7 +16,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; @@ -107,7 +106,6 @@ protected void doChunkedInfer( DocumentsOnlyInput inputs, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java index 2f3a935cdf010..16c94dfa9ad94 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java @@ -15,7 +15,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; @@ -261,7 +260,6 @@ protected void doChunkedInfer( DocumentsOnlyInput inputs, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index cc67470686a02..b3d8b3b6efce3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -15,7 +15,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; @@ -260,7 +259,6 @@ protected void doChunkedInfer( DocumentsOnlyInput inputs, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java index e7ce5903163d4..1f08c06edaa91 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java @@ -16,7 +16,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; @@ -109,7 +108,6 @@ protected void doChunkedInfer( DocumentsOnlyInput inputs, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 6d124906d65bd..2ec3a9d629434 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -19,7 +19,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceResults; @@ -676,11 +675,10 @@ public void chunkedInfer( List input, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { - chunkedInfer(model, null, input, taskSettings, inputType, chunkingOptions, timeout, listener); + chunkedInfer(model, null, input, taskSettings, inputType, timeout, listener); } @Override @@ -690,7 +688,6 @@ public void chunkedInfer( List input, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java index 1c01ebbe2c0e4..57a8a66a3f3a6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java @@ -15,7 +15,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; @@ -315,7 +314,6 @@ protected void doChunkedInfer( DocumentsOnlyInput inputs, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java index 204593464a4ad..857d475499aae 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java @@ -15,7 +15,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; @@ -213,7 +212,6 @@ protected void doChunkedInfer( DocumentsOnlyInput inputs, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java index eede14a975234..51cca72f26054 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java @@ -15,7 +15,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; @@ -116,7 +115,6 @@ protected void doChunkedInfer( DocumentsOnlyInput inputs, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java index a2e22e24172cf..75920efa251f2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java @@ -16,7 +16,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; @@ -88,7 +87,6 @@ protected void doChunkedInfer( DocumentsOnlyInput inputs, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java index 592900d117b39..ea263fb77a2da 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java @@ -15,7 +15,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; @@ -283,7 +282,6 @@ protected void doChunkedInfer( DocumentsOnlyInput input, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java index 2e810c357f8bd..fe0edb851902b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java @@ -15,7 +15,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; @@ -95,7 +94,6 @@ protected void doChunkedInfer( DocumentsOnlyInput inputs, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 81ab87a461696..20ff1c617d21f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -15,7 +15,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; @@ -264,7 +263,6 @@ protected void doChunkedInfer( DocumentsOnlyInput inputs, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java index 770e6e3cb9cf4..2416aeb62ff33 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java @@ -291,7 +291,7 @@ private static ShardBulkInferenceActionFilter createFilter(ThreadPool threadPool StaticModel model = (StaticModel) invocationOnMock.getArguments()[0]; List inputs = (List) invocationOnMock.getArguments()[2]; ActionListener> listener = (ActionListener< - List>) invocationOnMock.getArguments()[7]; + List>) invocationOnMock.getArguments()[6]; Runnable runnable = () -> { List results = new ArrayList<>(); for (String input : inputs) { @@ -310,7 +310,7 @@ private static ShardBulkInferenceActionFilter createFilter(ThreadPool threadPool } return null; }; - doAnswer(chunkedInferAnswer).when(inferenceService).chunkedInfer(any(), any(), any(), any(), any(), any(), any(), any()); + doAnswer(chunkedInferAnswer).when(inferenceService).chunkedInfer(any(), any(), any(), any(), any(), any(), any()); Answer modelAnswer = invocationOnMock -> { String inferenceId = (String) invocationOnMock.getArguments()[0]; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java index d8402c28cec87..47a96bf78dda1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; @@ -126,7 +125,6 @@ protected void doChunkedInfer( DocumentsOnlyInput inputs, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, TimeValue timeout, ActionListener> listener ) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java index b6d29ccab9a49..a154ded395822 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; @@ -401,7 +400,6 @@ public void testChunkedInfer_InvalidTaskType() throws IOException { List.of("foo", "bar"), new HashMap<>(), InputType.INGEST, - new ChunkingOptions(null, null), InferenceAction.Request.DEFAULT_TIMEOUT, listener ); @@ -420,16 +418,7 @@ private void testChunkedInfer(TaskType taskType, ChunkingSettings chunkingSettin var model = createModelForTaskType(taskType, chunkingSettings); PlainActionFuture> listener = new PlainActionFuture<>(); - service.chunkedInfer( - model, - null, - input, - new HashMap<>(), - InputType.INGEST, - new ChunkingOptions(null, null), - InferenceAction.Request.DEFAULT_TIMEOUT, - listener - ); + service.chunkedInfer(model, null, input, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, listener); var results = listener.actionGet(TIMEOUT); assertThat(results, instanceOf(List.class)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java index e583e50075ee7..35b5642b7a60c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; @@ -1559,7 +1558,6 @@ private void testChunkedInfer(AmazonBedrockEmbeddingsModel model) throws IOExcep List.of("abc", "xyz"), new HashMap<>(), InputType.INGEST, - new ChunkingOptions(null, null), InferenceAction.Request.DEFAULT_TIMEOUT, listener ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java index 76ea7a5bde5ca..8636ba8890e87 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; @@ -1194,7 +1193,6 @@ private void testChunkedInfer(AzureAiStudioEmbeddingsModel model) throws IOExcep List.of("foo", "bar"), new HashMap<>(), InputType.INGEST, - new ChunkingOptions(null, null), InferenceAction.Request.DEFAULT_TIMEOUT, listener ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java index dc1970e26a3f8..b0c590e237a44 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; @@ -1343,7 +1342,6 @@ private void testChunkedInfer(AzureOpenAiEmbeddingsModel model) throws IOExcepti List.of("foo", "bar"), new HashMap<>(), InputType.INGEST, - new ChunkingOptions(null, null), InferenceAction.Request.DEFAULT_TIMEOUT, listener ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index 30f3b344a268c..259a32aa6254d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; @@ -1451,7 +1450,6 @@ private void testChunkedInfer(CohereEmbeddingsModel model) throws IOException { List.of("foo", "bar"), new HashMap<>(), InputType.UNSPECIFIED, - new ChunkingOptions(null, null), InferenceAction.Request.DEFAULT_TIMEOUT, listener ); @@ -1543,7 +1541,6 @@ public void testChunkedInfer_BatchesCalls_Bytes() throws IOException { List.of("foo", "bar"), new HashMap<>(), InputType.UNSPECIFIED, - new ChunkingOptions(null, null), InferenceAction.Request.DEFAULT_TIMEOUT, listener ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java index 3767ac496d183..d3101099d06c7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.EmptySecretSettings; import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.inference.InferenceServiceConfiguration; @@ -461,7 +460,6 @@ public void testChunkedInfer_PassesThrough() throws IOException { List.of("input text"), new HashMap<>(), InputType.INGEST, - new ChunkingOptions(null, null), InferenceAction.Request.DEFAULT_TIMEOUT, listener ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index 9a4d0dda82238..306509ea60cfc 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.inference.InferenceResults; @@ -902,7 +901,6 @@ private void testChunkInfer_e5(ChunkingSettings chunkingSettings) throws Interru List.of("foo", "bar"), Map.of(), InputType.SEARCH, - new ChunkingOptions(null, null), InferenceAction.Request.DEFAULT_TIMEOUT, latchedListener ); @@ -973,7 +971,6 @@ private void testChunkInfer_Sparse(ChunkingSettings chunkingSettings) throws Int List.of("foo", "bar"), Map.of(), InputType.SEARCH, - new ChunkingOptions(null, null), InferenceAction.Request.DEFAULT_TIMEOUT, latchedListener ); @@ -1044,7 +1041,6 @@ private void testChunkInfer_Elser(ChunkingSettings chunkingSettings) throws Inte List.of("foo", "bar"), Map.of(), InputType.SEARCH, - new ChunkingOptions(null, null), InferenceAction.Request.DEFAULT_TIMEOUT, latchedListener ); @@ -1090,7 +1086,6 @@ public void testChunkInferSetsTokenization() { List.of("foo", "bar"), Map.of(), InputType.SEARCH, - null, InferenceAction.Request.DEFAULT_TIMEOUT, ActionListener.wrap(r -> fail("unexpected result"), e -> fail(e.getMessage())) ); @@ -1102,7 +1097,6 @@ public void testChunkInferSetsTokenization() { List.of("foo", "bar"), Map.of(), InputType.SEARCH, - new ChunkingOptions(256, null), InferenceAction.Request.DEFAULT_TIMEOUT, ActionListener.wrap(r -> fail("unexpected result"), e -> fail(e.getMessage())) ); @@ -1155,7 +1149,6 @@ public void testChunkInfer_FailsBatch() throws InterruptedException { List.of("foo", "bar", "baz"), Map.of(), InputType.SEARCH, - new ChunkingOptions(null, null), InferenceAction.Request.DEFAULT_TIMEOUT, latchedListener ); @@ -1228,7 +1221,6 @@ public void testChunkingLargeDocument() throws InterruptedException { List.of(input), Map.of(), InputType.SEARCH, - new ChunkingOptions(null, null), InferenceAction.Request.DEFAULT_TIMEOUT, latchedListener ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java index bc8020d8d88fe..375c583cce13a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.inference.InferenceServiceConfiguration; @@ -870,16 +869,7 @@ private void testChunkedInfer(String modelId, String apiKey, GoogleAiStudioEmbed webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); PlainActionFuture> listener = new PlainActionFuture<>(); - service.chunkedInfer( - model, - null, - input, - new HashMap<>(), - InputType.INGEST, - new ChunkingOptions(null, null), - InferenceAction.Request.DEFAULT_TIMEOUT, - listener - ); + service.chunkedInfer(model, null, input, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, listener); var results = listener.actionGet(TIMEOUT); assertThat(results, hasSize(2)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceElserServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceElserServiceTests.java index df82f1ed393bf..8f0e481213cdf 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceElserServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceElserServiceTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InputType; import org.elasticsearch.test.ESTestCase; @@ -98,7 +97,6 @@ public void testChunkedInfer_CallsInfer_Elser_ConvertsFloatResponse() throws IOE List.of("abc"), new HashMap<>(), InputType.INGEST, - new ChunkingOptions(null, null), InferenceAction.Request.DEFAULT_TIMEOUT, listener ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java index 0ff4bd805ea36..022cbecd1ea6a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; @@ -782,7 +781,6 @@ public void testChunkedInfer_CallsInfer_TextEmbedding_ConvertsFloatResponse() th List.of("abc"), new HashMap<>(), InputType.INGEST, - new ChunkingOptions(null, null), InferenceAction.Request.DEFAULT_TIMEOUT, listener ); @@ -838,7 +836,6 @@ public void testChunkedInfer() throws IOException { List.of("abc"), new HashMap<>(), InputType.INGEST, - new ChunkingOptions(null, null), InferenceAction.Request.DEFAULT_TIMEOUT, listener ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java index 1261e3834437b..5aa826f1d80fe 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.inference.InferenceServiceConfiguration; @@ -686,16 +685,7 @@ private void testChunkedInfer_Batches(ChunkingSettings chunkingSettings) throws getUrl(webServer) ); PlainActionFuture> listener = new PlainActionFuture<>(); - service.chunkedInfer( - model, - null, - input, - new HashMap<>(), - InputType.INGEST, - new ChunkingOptions(null, null), - InferenceAction.Request.DEFAULT_TIMEOUT, - listener - ); + service.chunkedInfer(model, null, input, new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, listener); var results = listener.actionGet(TIMEOUT); assertThat(results, hasSize(2)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java index 71e9eac9a6635..73bf03fd43ec5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; @@ -673,7 +672,6 @@ public void testChunkedInfer(MistralEmbeddingsModel model) throws IOException { List.of("abc", "def"), new HashMap<>(), InputType.INGEST, - new ChunkingOptions(null, null), InferenceAction.Request.DEFAULT_TIMEOUT, listener ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index 509a1f8a3d010..76b5d6fee2c59 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; -import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; @@ -1558,7 +1557,6 @@ private void testChunkedInfer(OpenAiEmbeddingsModel model) throws IOException { List.of("foo", "bar"), new HashMap<>(), InputType.INGEST, - new ChunkingOptions(null, null), InferenceAction.Request.DEFAULT_TIMEOUT, listener ); From 9799d0082b5ca39f598dd71beda2c7823f88444b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Wed, 27 Nov 2024 11:31:02 +0100 Subject: [PATCH 276/386] [Entitlements] Add support for instrumenting constructors (#117332) --- .../impl/InstrumentationServiceImpl.java | 9 +- .../impl/InstrumenterImpl.java | 11 +- .../impl/InstrumentationServiceImplTests.java | 56 ++++++++++ .../impl/InstrumenterTests.java | 103 ++++++++++++++++-- .../bridge/EntitlementChecker.java | 14 +++ .../EntitlementInitialization.java | 4 - .../api/ElasticsearchEntitlementChecker.java | 34 ++++++ .../runtime/policy/FlagEntitlementType.java | 3 +- .../runtime/policy/PolicyManager.java | 2 +- .../test/entitlements/EntitlementsIT.java | 7 ++ .../entitlements/EntitlementsCheckPlugin.java | 3 +- ...estEntitlementsCheckClassLoaderAction.java | 54 +++++++++ .../bootstrap/Elasticsearch.java | 4 +- 13 files changed, 281 insertions(+), 23 deletions(-) create mode 100644 qa/entitlements/src/main/java/org/elasticsearch/test/entitlements/RestEntitlementsCheckClassLoaderAction.java diff --git a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java index a3bbb611f3e68..16bd04e60c5e3 100644 --- a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java @@ -91,15 +91,18 @@ static MethodKey parseCheckerMethodSignature(String checkerMethodName, Type[] ch String.format( Locale.ROOT, "Checker method %s has incorrect name format. " - + "It should be either check$$methodName (instance) or check$package_ClassName$methodName (static)", + + "It should be either check$$methodName (instance), check$package_ClassName$methodName (static) or " + + "check$package_ClassName$ (ctor)", checkerMethodName ) ); } - // No "className" (check$$methodName) -> method is static, and we'll get the class from the actual typed argument + // No "className" (check$$methodName) -> method is instance, and we'll get the class from the actual typed argument final boolean targetMethodIsStatic = classNameStartIndex + 1 != classNameEndIndex; - final String targetMethodName = checkerMethodName.substring(classNameEndIndex + 1); + // No "methodName" (check$package_ClassName$) -> method is ctor + final boolean targetMethodIsCtor = classNameEndIndex + 1 == checkerMethodName.length(); + final String targetMethodName = targetMethodIsCtor ? "" : checkerMethodName.substring(classNameEndIndex + 1); final String targetClassName; final List targetParameterTypes; diff --git a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java index dc20b16400f3d..4d762dc997383 100644 --- a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java @@ -154,11 +154,12 @@ public MethodVisitor visitMethod(int access, String name, String descriptor, Str var mv = super.visitMethod(access, name, descriptor, signature, exceptions); if (isAnnotationPresent == false) { boolean isStatic = (access & ACC_STATIC) != 0; + boolean isCtor = "".equals(name); var key = new MethodKey(className, name, Stream.of(Type.getArgumentTypes(descriptor)).map(Type::getInternalName).toList()); var instrumentationMethod = instrumentationMethods.get(key); if (instrumentationMethod != null) { // LOGGER.debug("Will instrument method {}", key); - return new EntitlementMethodVisitor(Opcodes.ASM9, mv, isStatic, descriptor, instrumentationMethod); + return new EntitlementMethodVisitor(Opcodes.ASM9, mv, isStatic, isCtor, descriptor, instrumentationMethod); } else { // LOGGER.trace("Will not instrument method {}", key); } @@ -187,6 +188,7 @@ private void addClassAnnotationIfNeeded() { class EntitlementMethodVisitor extends MethodVisitor { private final boolean instrumentedMethodIsStatic; + private final boolean instrumentedMethodIsCtor; private final String instrumentedMethodDescriptor; private final CheckerMethod instrumentationMethod; private boolean hasCallerSensitiveAnnotation = false; @@ -195,11 +197,13 @@ class EntitlementMethodVisitor extends MethodVisitor { int api, MethodVisitor methodVisitor, boolean instrumentedMethodIsStatic, + boolean instrumentedMethodIsCtor, String instrumentedMethodDescriptor, CheckerMethod instrumentationMethod ) { super(api, methodVisitor); this.instrumentedMethodIsStatic = instrumentedMethodIsStatic; + this.instrumentedMethodIsCtor = instrumentedMethodIsCtor; this.instrumentedMethodDescriptor = instrumentedMethodDescriptor; this.instrumentationMethod = instrumentationMethod; } @@ -260,14 +264,15 @@ private void pushCallerClass() { private void forwardIncomingArguments() { int localVarIndex = 0; - if (instrumentedMethodIsStatic == false) { + if (instrumentedMethodIsCtor) { + localVarIndex++; + } else if (instrumentedMethodIsStatic == false) { mv.visitVarInsn(Opcodes.ALOAD, localVarIndex++); } for (Type type : Type.getArgumentTypes(instrumentedMethodDescriptor)) { mv.visitVarInsn(type.getOpcode(Opcodes.ILOAD), localVarIndex); localVarIndex += type.getSize(); } - } private void invokeInstrumentationMethod() { diff --git a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java index c0ff5d59d3c72..5eee0bf27d1df 100644 --- a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java +++ b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java @@ -45,6 +45,12 @@ interface TestCheckerOverloads { void check$org_example_TestTargetClass$staticMethodWithOverload(Class clazz, int x, String y); } + interface TestCheckerCtors { + void check$org_example_TestTargetClass$(Class clazz); + + void check$org_example_TestTargetClass$(Class clazz, int x, String y); + } + public void testInstrumentationTargetLookup() throws IOException, ClassNotFoundException { Map methodsMap = instrumentationService.lookupMethodsToInstrument(TestChecker.class.getName()); @@ -142,6 +148,38 @@ public void testInstrumentationTargetLookupWithOverloads() throws IOException, C ); } + public void testInstrumentationTargetLookupWithCtors() throws IOException, ClassNotFoundException { + Map methodsMap = instrumentationService.lookupMethodsToInstrument(TestCheckerCtors.class.getName()); + + assertThat(methodsMap, aMapWithSize(2)); + assertThat( + methodsMap, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "", List.of("I", "java/lang/String"))), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerCtors", + "check$org_example_TestTargetClass$", + List.of("Ljava/lang/Class;", "I", "Ljava/lang/String;") + ) + ) + ) + ); + assertThat( + methodsMap, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "", List.of())), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerCtors", + "check$org_example_TestTargetClass$", + List.of("Ljava/lang/Class;") + ) + ) + ) + ); + } + public void testParseCheckerMethodSignatureStaticMethod() { var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( "check$org_example_TestClass$staticMethod", @@ -169,6 +207,24 @@ public void testParseCheckerMethodSignatureStaticMethodInnerClass() { assertThat(methodKey, equalTo(new MethodKey("org/example/TestClass$InnerClass", "staticMethod", List.of()))); } + public void testParseCheckerMethodSignatureCtor() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$org_example_TestClass$", + new Type[] { Type.getType(Class.class) } + ); + + assertThat(methodKey, equalTo(new MethodKey("org/example/TestClass", "", List.of()))); + } + + public void testParseCheckerMethodSignatureCtorWithArgs() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$org_example_TestClass$", + new Type[] { Type.getType(Class.class), Type.getType("I"), Type.getType(String.class) } + ); + + assertThat(methodKey, equalTo(new MethodKey("org/example/TestClass", "", List.of("I", "java/lang/String")))); + } + public void testParseCheckerMethodSignatureIncorrectName() { var exception = assertThrows( IllegalArgumentException.class, diff --git a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java index e3f5539999be5..40f0162d2eaa2 100644 --- a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java +++ b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java @@ -23,12 +23,15 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.net.URL; +import java.net.URLStreamHandlerFactory; import java.util.Arrays; +import java.util.List; import java.util.Map; import static org.elasticsearch.entitlement.instrumentation.impl.ASMUtils.bytecode2text; import static org.elasticsearch.entitlement.instrumentation.impl.InstrumenterImpl.getClassFileInfo; -import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.startsWith; import static org.objectweb.asm.Opcodes.INVOKESTATIC; @@ -72,6 +75,11 @@ public interface Testable { * They must not throw {@link TestException}. */ public static class ClassToInstrument implements Testable { + + public ClassToInstrument() {} + + public ClassToInstrument(int arg) {} + public static void systemExit(int status) { assertEquals(123, status); } @@ -91,12 +99,20 @@ public static void someStaticMethod(int arg, String anotherArg) {} static final class TestException extends RuntimeException {} + /** + * Interface to test specific, "synthetic" cases (e.g. overloaded methods, overloaded constructors, etc.) that + * may be not present/may be difficult to find or not clear in the production EntitlementChecker interface + */ public interface MockEntitlementChecker extends EntitlementChecker { void checkSomeStaticMethod(Class clazz, int arg); void checkSomeStaticMethod(Class clazz, int arg, String anotherArg); void checkSomeInstanceMethod(Class clazz, Testable that, int arg, String anotherArg); + + void checkCtor(Class clazz); + + void checkCtor(Class clazz, int arg); } /** @@ -118,6 +134,9 @@ public static class TestEntitlementChecker implements MockEntitlementChecker { int checkSomeStaticMethodIntStringCallCount = 0; int checkSomeInstanceMethodCallCount = 0; + int checkCtorCallCount = 0; + int checkCtorIntCallCount = 0; + @Override public void check$java_lang_System$exit(Class callerClass, int status) { checkSystemExitCallCount++; @@ -126,6 +145,27 @@ public static class TestEntitlementChecker implements MockEntitlementChecker { throwIfActive(); } + @Override + public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls) {} + + @Override + public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent) {} + + @Override + public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent, URLStreamHandlerFactory factory) {} + + @Override + public void check$java_net_URLClassLoader$(Class callerClass, String name, URL[] urls, ClassLoader parent) {} + + @Override + public void check$java_net_URLClassLoader$( + Class callerClass, + String name, + URL[] urls, + ClassLoader parent, + URLStreamHandlerFactory factory + ) {} + private void throwIfActive() { if (isActive) { throw new TestException(); @@ -161,6 +201,21 @@ public void checkSomeInstanceMethod(Class callerClass, Testable that, int arg assertEquals("def", anotherArg); throwIfActive(); } + + @Override + public void checkCtor(Class callerClass) { + checkCtorCallCount++; + assertSame(InstrumenterTests.class, callerClass); + throwIfActive(); + } + + @Override + public void checkCtor(Class callerClass, int arg) { + checkCtorIntCallCount++; + assertSame(InstrumenterTests.class, callerClass); + assertEquals(123, arg); + throwIfActive(); + } } public void testClassIsInstrumented() throws Exception { @@ -225,7 +280,7 @@ public void testClassIsNotInstrumentedTwice() throws Exception { getTestEntitlementChecker().checkSystemExitCallCount = 0; assertThrows(TestException.class, () -> callStaticMethod(newClass, "systemExit", 123)); - assertThat(getTestEntitlementChecker().checkSystemExitCallCount, is(1)); + assertEquals(1, getTestEntitlementChecker().checkSystemExitCallCount); } public void testClassAllMethodsAreInstrumentedFirstPass() throws Exception { @@ -259,10 +314,10 @@ public void testClassAllMethodsAreInstrumentedFirstPass() throws Exception { getTestEntitlementChecker().checkSystemExitCallCount = 0; assertThrows(TestException.class, () -> callStaticMethod(newClass, "systemExit", 123)); - assertThat(getTestEntitlementChecker().checkSystemExitCallCount, is(1)); + assertEquals(1, getTestEntitlementChecker().checkSystemExitCallCount); assertThrows(TestException.class, () -> callStaticMethod(newClass, "anotherSystemExit", 123)); - assertThat(getTestEntitlementChecker().checkSystemExitCallCount, is(2)); + assertEquals(2, getTestEntitlementChecker().checkSystemExitCallCount); } public void testInstrumenterWorksWithOverloads() throws Exception { @@ -294,8 +349,8 @@ public void testInstrumenterWorksWithOverloads() throws Exception { assertThrows(TestException.class, () -> callStaticMethod(newClass, "someStaticMethod", 123)); assertThrows(TestException.class, () -> callStaticMethod(newClass, "someStaticMethod", 123, "abc")); - assertThat(getTestEntitlementChecker().checkSomeStaticMethodIntCallCount, is(1)); - assertThat(getTestEntitlementChecker().checkSomeStaticMethodIntStringCallCount, is(1)); + assertEquals(1, getTestEntitlementChecker().checkSomeStaticMethodIntCallCount); + assertEquals(1, getTestEntitlementChecker().checkSomeStaticMethodIntStringCallCount); } public void testInstrumenterWorksWithInstanceMethodsAndOverloads() throws Exception { @@ -327,7 +382,41 @@ public void testInstrumenterWorksWithInstanceMethodsAndOverloads() throws Except testTargetClass.someMethod(123); assertThrows(TestException.class, () -> testTargetClass.someMethod(123, "def")); - assertThat(getTestEntitlementChecker().checkSomeInstanceMethodCallCount, is(1)); + assertEquals(1, getTestEntitlementChecker().checkSomeInstanceMethodCallCount); + } + + public void testInstrumenterWorksWithConstructors() throws Exception { + var classToInstrument = ClassToInstrument.class; + + Map methods = Map.of( + new MethodKey(classToInstrument.getName().replace('.', '/'), "", List.of()), + getCheckerMethod(MockEntitlementChecker.class, "checkCtor", Class.class), + new MethodKey(classToInstrument.getName().replace('.', '/'), "", List.of("I")), + getCheckerMethod(MockEntitlementChecker.class, "checkCtor", Class.class, int.class) + ); + + var instrumenter = createInstrumenter(methods); + + byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); + + if (logger.isTraceEnabled()) { + logger.trace("Bytecode after instrumentation:\n{}", bytecode2text(newBytecode)); + } + + Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( + classToInstrument.getName() + "_NEW", + newBytecode + ); + + getTestEntitlementChecker().isActive = true; + + var ex = assertThrows(InvocationTargetException.class, () -> newClass.getConstructor().newInstance()); + assertThat(ex.getCause(), instanceOf(TestException.class)); + var ex2 = assertThrows(InvocationTargetException.class, () -> newClass.getConstructor(int.class).newInstance(123)); + assertThat(ex2.getCause(), instanceOf(TestException.class)); + + assertEquals(1, getTestEntitlementChecker().checkCtorCallCount); + assertEquals(1, getTestEntitlementChecker().checkCtorIntCallCount); } /** This test doesn't replace classToInstrument in-place but instead loads a separate diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index 167c93c90df5c..ad0f14bcf4478 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -9,6 +9,20 @@ package org.elasticsearch.entitlement.bridge; +import java.net.URL; +import java.net.URLStreamHandlerFactory; + public interface EntitlementChecker { void check$java_lang_System$exit(Class callerClass, int status); + + // URLClassLoader ctor + void check$java_net_URLClassLoader$(Class callerClass, URL[] urls); + + void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent); + + void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent, URLStreamHandlerFactory factory); + + void check$java_net_URLClassLoader$(Class callerClass, String name, URL[] urls, ClassLoader parent); + + void check$java_net_URLClassLoader$(Class callerClass, String name, URL[] urls, ClassLoader parent, URLStreamHandlerFactory factory); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index ca57e7b255bca..1f87e067e04f1 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -169,10 +169,6 @@ private static ElasticsearchEntitlementChecker initChecker() throws IOException } } - private static String internalName(Class c) { - return c.getName().replace('.', '/'); - } - private static final InstrumentationService INSTRUMENTER_FACTORY = new ProviderLocator<>( "entitlement", InstrumentationService.class, diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index 790416ca5659a..28a080470c043 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -13,6 +13,9 @@ import org.elasticsearch.entitlement.runtime.policy.FlagEntitlementType; import org.elasticsearch.entitlement.runtime.policy.PolicyManager; +import java.net.URL; +import java.net.URLStreamHandlerFactory; + /** * Implementation of the {@link EntitlementChecker} interface, providing additional * API methods for managing the checks. @@ -29,4 +32,35 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { public void check$java_lang_System$exit(Class callerClass, int status) { policyManager.checkFlagEntitlement(callerClass, FlagEntitlementType.SYSTEM_EXIT); } + + @Override + public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls) { + policyManager.checkFlagEntitlement(callerClass, FlagEntitlementType.CREATE_CLASSLOADER); + } + + @Override + public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent) { + policyManager.checkFlagEntitlement(callerClass, FlagEntitlementType.CREATE_CLASSLOADER); + } + + @Override + public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent, URLStreamHandlerFactory factory) { + policyManager.checkFlagEntitlement(callerClass, FlagEntitlementType.CREATE_CLASSLOADER); + } + + @Override + public void check$java_net_URLClassLoader$(Class callerClass, String name, URL[] urls, ClassLoader parent) { + policyManager.checkFlagEntitlement(callerClass, FlagEntitlementType.CREATE_CLASSLOADER); + } + + @Override + public void check$java_net_URLClassLoader$( + Class callerClass, + String name, + URL[] urls, + ClassLoader parent, + URLStreamHandlerFactory factory + ) { + policyManager.checkFlagEntitlement(callerClass, FlagEntitlementType.CREATE_CLASSLOADER); + } } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FlagEntitlementType.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FlagEntitlementType.java index 60490baf41a10..d40235ee12166 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FlagEntitlementType.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FlagEntitlementType.java @@ -10,5 +10,6 @@ package org.elasticsearch.entitlement.runtime.policy; public enum FlagEntitlementType { - SYSTEM_EXIT; + SYSTEM_EXIT, + CREATE_CLASSLOADER; } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index c06dc09758de5..b3fb5b75a1d5a 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -66,7 +66,7 @@ public void checkFlagEntitlement(Class callerClass, FlagEntitlementType type) // TODO: this will be checked using policies if (requestingModule.isNamed() && requestingModule.getName().equals("org.elasticsearch.server") - && type == FlagEntitlementType.SYSTEM_EXIT) { + && (type == FlagEntitlementType.SYSTEM_EXIT || type == FlagEntitlementType.CREATE_CLASSLOADER)) { logger.debug("Allowed: caller [{}] in module [{}] has entitlement [{}]", callerClass, requestingModule.getName(), type); return; } diff --git a/qa/entitlements/src/javaRestTest/java/org/elasticsearch/test/entitlements/EntitlementsIT.java b/qa/entitlements/src/javaRestTest/java/org/elasticsearch/test/entitlements/EntitlementsIT.java index 8b3629527f918..f8bae10492ba8 100644 --- a/qa/entitlements/src/javaRestTest/java/org/elasticsearch/test/entitlements/EntitlementsIT.java +++ b/qa/entitlements/src/javaRestTest/java/org/elasticsearch/test/entitlements/EntitlementsIT.java @@ -39,4 +39,11 @@ public void testCheckSystemExit() { ); assertThat(exception.getMessage(), containsString("not_entitled_exception")); } + + public void testCheckCreateURLClassLoader() { + var exception = expectThrows(IOException.class, () -> { + client().performRequest(new Request("GET", "/_entitlement/_check_create_url_classloader")); + }); + assertThat(exception.getMessage(), containsString("not_entitled_exception")); + } } diff --git a/qa/entitlements/src/main/java/org/elasticsearch/test/entitlements/EntitlementsCheckPlugin.java b/qa/entitlements/src/main/java/org/elasticsearch/test/entitlements/EntitlementsCheckPlugin.java index f3821c065eceb..94ad54c8c8ba8 100644 --- a/qa/entitlements/src/main/java/org/elasticsearch/test/entitlements/EntitlementsCheckPlugin.java +++ b/qa/entitlements/src/main/java/org/elasticsearch/test/entitlements/EntitlementsCheckPlugin.java @@ -22,7 +22,6 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import java.util.Collections; import java.util.List; import java.util.function.Predicate; import java.util.function.Supplier; @@ -42,6 +41,6 @@ public List getRestHandlers( final Supplier nodesInCluster, Predicate clusterSupportsFeature ) { - return Collections.singletonList(new RestEntitlementsCheckSystemExitAction()); + return List.of(new RestEntitlementsCheckSystemExitAction(), new RestEntitlementsCheckClassLoaderAction()); } } diff --git a/qa/entitlements/src/main/java/org/elasticsearch/test/entitlements/RestEntitlementsCheckClassLoaderAction.java b/qa/entitlements/src/main/java/org/elasticsearch/test/entitlements/RestEntitlementsCheckClassLoaderAction.java new file mode 100644 index 0000000000000..0b5ca28739ed0 --- /dev/null +++ b/qa/entitlements/src/main/java/org/elasticsearch/test/entitlements/RestEntitlementsCheckClassLoaderAction.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.test.entitlements; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; + +import java.net.URL; +import java.net.URLClassLoader; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.GET; + +public class RestEntitlementsCheckClassLoaderAction extends BaseRestHandler { + + private static final Logger logger = LogManager.getLogger(RestEntitlementsCheckClassLoaderAction.class); + + RestEntitlementsCheckClassLoaderAction() {} + + @Override + public List routes() { + return List.of(new Route(GET, "/_entitlement/_check_create_url_classloader")); + } + + @Override + public String getName() { + return "check_classloader_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { + logger.info("RestEntitlementsCheckClassLoaderAction rest handler [{}]", request.path()); + if (request.path().equals("/_entitlement/_check_create_url_classloader")) { + return channel -> { + logger.info("Calling new URLClassLoader"); + try (var classLoader = new URLClassLoader("test", new URL[0], this.getClass().getClassLoader())) { + logger.info("Created URLClassLoader [{}]", classLoader.getName()); + } + }; + } + + throw new UnsupportedOperationException(); + } +} diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 95e5b00a2805f..b7774259bf289 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -210,7 +210,7 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { bootstrap.setPluginsLoader(pluginsLoader); if (Boolean.parseBoolean(System.getProperty("es.entitlements.enabled"))) { - logger.info("Bootstrapping Entitlements"); + LogManager.getLogger(Elasticsearch.class).info("Bootstrapping Entitlements"); List> pluginData = new ArrayList<>(); Set moduleBundles = PluginsUtils.getModuleBundles(nodeEnv.modulesFile()); @@ -225,7 +225,7 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { EntitlementBootstrap.bootstrap(pluginData, callerClass -> null); } else { // install SM after natives, shutdown hooks, etc. - logger.info("Bootstrapping java SecurityManager"); + LogManager.getLogger(Elasticsearch.class).info("Bootstrapping java SecurityManager"); org.elasticsearch.bootstrap.Security.configure( nodeEnv, SECURITY_FILTER_BAD_DEFAULTS_SETTING.get(args.nodeSettings()), From 9e610894143483ef234d447c420f08ccae73648d Mon Sep 17 00:00:00 2001 From: George Wallace Date: Wed, 27 Nov 2024 03:39:07 -0700 Subject: [PATCH 277/386] [DOCS] : swap allocation sections (#116518) Co-authored-by: Liam Thompson <32779855+leemthompo@users.noreply.github.com> --- .../inference/service-elser.asciidoc | 61 ++++++++++--------- 1 file changed, 31 insertions(+), 30 deletions(-) diff --git a/docs/reference/inference/service-elser.asciidoc b/docs/reference/inference/service-elser.asciidoc index 262bdfbca002f..c1cc23c8c9adb 100644 --- a/docs/reference/inference/service-elser.asciidoc +++ b/docs/reference/inference/service-elser.asciidoc @@ -102,10 +102,39 @@ If `adaptive_allocations` is enabled, do not set this value, because it's automa Sets the number of threads used by each model allocation during inference. This generally increases the speed per inference request. The inference process is a compute-bound process; `threads_per_allocations` must not exceed the number of available allocated processors per node. Must be a power of 2. Max allowed value is 32. +[discrete] +[[inference-example-elser-adaptive-allocation]] +==== ELSER service example with adaptive allocations + +When adaptive allocations are enabled, the number of allocations of the model is set automatically based on the current load. + +NOTE: For more information on how to optimize your ELSER endpoints, refer to {ml-docs}/ml-nlp-elser.html#elser-recommendations[the ELSER recommendations] section in the model documentation. +To learn more about model autoscaling, refer to the {ml-docs}/ml-nlp-auto-scale.html[trained model autoscaling] page. + +The following example shows how to create an {infer} endpoint called `my-elser-model` to perform a `sparse_embedding` task type and configure adaptive allocations. + +The request below will automatically download the ELSER model if it isn't already downloaded and then deploy the model. + +[source,console] +------------------------------------------------------------ +PUT _inference/sparse_embedding/my-elser-model +{ + "service": "elser", + "service_settings": { + "adaptive_allocations": { + "enabled": true, + "min_number_of_allocations": 3, + "max_number_of_allocations": 10 + }, + "num_threads": 1 + } +} +------------------------------------------------------------ +// TEST[skip:TBD] [discrete] [[inference-example-elser]] -==== ELSER service example +==== ELSER service example without adaptive allocations The following example shows how to create an {infer} endpoint called `my-elser-model` to perform a `sparse_embedding` task type. Refer to the {ml-docs}/ml-nlp-elser.html[ELSER model documentation] for more info. @@ -151,32 +180,4 @@ You might see a 502 bad gateway error in the response when using the {kib} Conso This error usually just reflects a timeout, while the model downloads in the background. You can check the download progress in the {ml-app} UI. If using the Python client, you can set the `timeout` parameter to a higher value. -==== - -[discrete] -[[inference-example-elser-adaptive-allocation]] -==== Setting adaptive allocations for the ELSER service - -NOTE: For more information on how to optimize your ELSER endpoints, refer to {ml-docs}/ml-nlp-elser.html#elser-recommendations[the ELSER recommendations] section in the model documentation. -To learn more about model autoscaling, refer to the {ml-docs}/ml-nlp-auto-scale.html[trained model autoscaling] page. - -The following example shows how to create an {infer} endpoint called `my-elser-model` to perform a `sparse_embedding` task type and configure adaptive allocations. - -The request below will automatically download the ELSER model if it isn't already downloaded and then deploy the model. - -[source,console] ------------------------------------------------------------- -PUT _inference/sparse_embedding/my-elser-model -{ - "service": "elser", - "service_settings": { - "adaptive_allocations": { - "enabled": true, - "min_number_of_allocations": 3, - "max_number_of_allocations": 10 - }, - "num_threads": 1 - } -} ------------------------------------------------------------- -// TEST[skip:TBD] +==== \ No newline at end of file From 9946cea34dc711d6cc48fa49784e804f2421088d Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 27 Nov 2024 11:52:23 +0100 Subject: [PATCH 278/386] Turn RankFeatureShardPhase into utility class (#117616) This class has no state, no need to pass instances of it around all its members can be static to simplify node construction and the code overall. --- .../elasticsearch/node/NodeConstruction.java | 1 - .../node/NodeServiceProvider.java | 3 --- .../org/elasticsearch/search/SearchModule.java | 5 ----- .../elasticsearch/search/SearchService.java | 7 ++----- .../rank/feature/RankFeatureShardPhase.java | 8 ++++---- .../rank/RankFeatureShardPhaseTests.java | 18 ++++++------------ .../snapshots/SnapshotResiliencyTests.java | 2 -- .../java/org/elasticsearch/node/MockNode.java | 4 ---- .../search/MockSearchService.java | 3 --- 9 files changed, 12 insertions(+), 39 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 2488ac894a612..795fe9e2771f0 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -1099,7 +1099,6 @@ private void construct( threadPool, scriptService, bigArrays, - searchModule.getRankFeatureShardPhase(), searchModule.getFetchPhase(), responseCollectorService, circuitBreakerService, diff --git a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java index 8f2dc4e532ae0..a49958c476416 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java +++ b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java @@ -35,7 +35,6 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.fetch.FetchPhase; -import org.elasticsearch.search.rank.feature.RankFeatureShardPhase; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.threadpool.ThreadPool; @@ -119,7 +118,6 @@ SearchService newSearchService( ThreadPool threadPool, ScriptService scriptService, BigArrays bigArrays, - RankFeatureShardPhase rankFeatureShardPhase, FetchPhase fetchPhase, ResponseCollectorService responseCollectorService, CircuitBreakerService circuitBreakerService, @@ -132,7 +130,6 @@ SearchService newSearchService( threadPool, scriptService, bigArrays, - rankFeatureShardPhase, fetchPhase, responseCollectorService, circuitBreakerService, diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index b8f50c6f9a62f..09e25350ad4fd 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -231,7 +231,6 @@ import org.elasticsearch.search.rank.RankDoc; import org.elasticsearch.search.rank.RankShardResult; import org.elasticsearch.search.rank.feature.RankFeatureDoc; -import org.elasticsearch.search.rank.feature.RankFeatureShardPhase; import org.elasticsearch.search.rank.feature.RankFeatureShardResult; import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.search.rescore.RescorerBuilder; @@ -1299,10 +1298,6 @@ private void registerQuery(QuerySpec spec) { ); } - public RankFeatureShardPhase getRankFeatureShardPhase() { - return new RankFeatureShardPhase(); - } - public FetchPhase getFetchPhase() { return new FetchPhase(fetchSubPhases); } diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index a11c4013a9c9b..84bdc017ce970 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -286,7 +286,6 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv private final BigArrays bigArrays; private final FetchPhase fetchPhase; - private final RankFeatureShardPhase rankFeatureShardPhase; private volatile Executor searchExecutor; private volatile boolean enableQueryPhaseParallelCollection; @@ -325,7 +324,6 @@ public SearchService( ThreadPool threadPool, ScriptService scriptService, BigArrays bigArrays, - RankFeatureShardPhase rankFeatureShardPhase, FetchPhase fetchPhase, ResponseCollectorService responseCollectorService, CircuitBreakerService circuitBreakerService, @@ -339,7 +337,6 @@ public SearchService( this.scriptService = scriptService; this.responseCollectorService = responseCollectorService; this.bigArrays = bigArrays; - this.rankFeatureShardPhase = rankFeatureShardPhase; this.fetchPhase = fetchPhase; this.multiBucketConsumerService = new MultiBucketConsumerService( clusterService, @@ -751,9 +748,9 @@ public void executeRankFeaturePhase(RankFeatureShardRequest request, SearchShard searchContext.rankFeatureResult().incRef(); return searchContext.rankFeatureResult(); } - rankFeatureShardPhase.prepareForFetch(searchContext, request); + RankFeatureShardPhase.prepareForFetch(searchContext, request); fetchPhase.execute(searchContext, docIds, null); - rankFeatureShardPhase.processFetch(searchContext); + RankFeatureShardPhase.processFetch(searchContext); var rankFeatureResult = searchContext.rankFeatureResult(); rankFeatureResult.incRef(); return rankFeatureResult; diff --git a/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardPhase.java b/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardPhase.java index 68463eecfb11d..e64bbe3c39d79 100644 --- a/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardPhase.java +++ b/server/src/main/java/org/elasticsearch/search/rank/feature/RankFeatureShardPhase.java @@ -35,9 +35,9 @@ public final class RankFeatureShardPhase { public static final RankFeatureShardResult EMPTY_RESULT = new RankFeatureShardResult(new RankFeatureDoc[0]); - public RankFeatureShardPhase() {} + private RankFeatureShardPhase() {} - public void prepareForFetch(SearchContext searchContext, RankFeatureShardRequest request) { + public static void prepareForFetch(SearchContext searchContext, RankFeatureShardRequest request) { if (logger.isTraceEnabled()) { logger.trace("{}", new SearchContextSourcePrinter(searchContext)); } @@ -58,7 +58,7 @@ public void prepareForFetch(SearchContext searchContext, RankFeatureShardRequest } } - public void processFetch(SearchContext searchContext) { + public static void processFetch(SearchContext searchContext) { if (logger.isTraceEnabled()) { logger.trace("{}", new SearchContextSourcePrinter(searchContext)); } @@ -92,7 +92,7 @@ public void processFetch(SearchContext searchContext) { } } - private RankFeaturePhaseRankShardContext shardContext(SearchContext searchContext) { + private static RankFeaturePhaseRankShardContext shardContext(SearchContext searchContext) { return searchContext.request().source() != null && searchContext.request().source().rankBuilder() != null ? searchContext.request().source().rankBuilder().buildRankFeaturePhaseShardContext() : null; diff --git a/server/src/test/java/org/elasticsearch/search/rank/RankFeatureShardPhaseTests.java b/server/src/test/java/org/elasticsearch/search/rank/RankFeatureShardPhaseTests.java index 6250d1679fda3..41febe77d54aa 100644 --- a/server/src/test/java/org/elasticsearch/search/rank/RankFeatureShardPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/rank/RankFeatureShardPhaseTests.java @@ -219,8 +219,7 @@ public void testPrepareForFetch() { RankFeatureShardRequest request = mock(RankFeatureShardRequest.class); when(request.getDocIds()).thenReturn(new int[] { 4, 9, numDocs - 1 }); - RankFeatureShardPhase rankFeatureShardPhase = new RankFeatureShardPhase(); - rankFeatureShardPhase.prepareForFetch(searchContext, request); + RankFeatureShardPhase.prepareForFetch(searchContext, request); assertNotNull(searchContext.fetchFieldsContext()); assertEquals(searchContext.fetchFieldsContext().fields().size(), 1); @@ -248,8 +247,7 @@ public void testPrepareForFetchNoRankFeatureContext() { RankFeatureShardRequest request = mock(RankFeatureShardRequest.class); when(request.getDocIds()).thenReturn(new int[] { 4, 9, numDocs - 1 }); - RankFeatureShardPhase rankFeatureShardPhase = new RankFeatureShardPhase(); - rankFeatureShardPhase.prepareForFetch(searchContext, request); + RankFeatureShardPhase.prepareForFetch(searchContext, request); assertNull(searchContext.fetchFieldsContext()); assertNull(searchContext.fetchResult()); @@ -274,8 +272,7 @@ public void testPrepareForFetchWhileTaskIsCancelled() { RankFeatureShardRequest request = mock(RankFeatureShardRequest.class); when(request.getDocIds()).thenReturn(new int[] { 4, 9, numDocs - 1 }); - RankFeatureShardPhase rankFeatureShardPhase = new RankFeatureShardPhase(); - expectThrows(TaskCancelledException.class, () -> rankFeatureShardPhase.prepareForFetch(searchContext, request)); + expectThrows(TaskCancelledException.class, () -> RankFeatureShardPhase.prepareForFetch(searchContext, request)); } } @@ -318,11 +315,10 @@ public void testProcessFetch() { RankFeatureShardRequest request = mock(RankFeatureShardRequest.class); when(request.getDocIds()).thenReturn(new int[] { 4, 9, numDocs - 1 }); - RankFeatureShardPhase rankFeatureShardPhase = new RankFeatureShardPhase(); // this is called as part of the search context initialization // with the ResultsType.RANK_FEATURE type searchContext.addRankFeatureResult(); - rankFeatureShardPhase.processFetch(searchContext); + RankFeatureShardPhase.processFetch(searchContext); assertNotNull(searchContext.rankFeatureResult()); assertNotNull(searchContext.rankFeatureResult().rankFeatureResult()); @@ -365,11 +361,10 @@ public void testProcessFetchEmptyHits() { RankFeatureShardRequest request = mock(RankFeatureShardRequest.class); when(request.getDocIds()).thenReturn(new int[] { 4, 9, numDocs - 1 }); - RankFeatureShardPhase rankFeatureShardPhase = new RankFeatureShardPhase(); // this is called as part of the search context initialization // with the ResultsType.RANK_FEATURE type searchContext.addRankFeatureResult(); - rankFeatureShardPhase.processFetch(searchContext); + RankFeatureShardPhase.processFetch(searchContext); assertNotNull(searchContext.rankFeatureResult()); assertNotNull(searchContext.rankFeatureResult().rankFeatureResult()); @@ -410,11 +405,10 @@ public void testProcessFetchWhileTaskIsCancelled() { RankFeatureShardRequest request = mock(RankFeatureShardRequest.class); when(request.getDocIds()).thenReturn(new int[] { 4, 9, numDocs - 1 }); - RankFeatureShardPhase rankFeatureShardPhase = new RankFeatureShardPhase(); // this is called as part of the search context initialization // with the ResultsType.RANK_FEATURE type searchContext.addRankFeatureResult(); - expectThrows(TaskCancelledException.class, () -> rankFeatureShardPhase.processFetch(searchContext)); + expectThrows(TaskCancelledException.class, () -> RankFeatureShardPhase.processFetch(searchContext)); } finally { if (searchHits != null) { searchHits.decRef(); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index cf240550e809d..ceaf7979ed60e 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -180,7 +180,6 @@ import org.elasticsearch.search.SearchService; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.FetchPhase; -import org.elasticsearch.search.rank.feature.RankFeatureShardPhase; import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.test.ClusterServiceUtils; @@ -2314,7 +2313,6 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { threadPool, scriptService, bigArrays, - new RankFeatureShardPhase(), new FetchPhase(Collections.emptyList()), responseCollectorService, new NoneCircuitBreakerService(), diff --git a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java index 38c7b1eb04772..7fddeb8491c7f 100644 --- a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java +++ b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java @@ -42,7 +42,6 @@ import org.elasticsearch.search.MockSearchService; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.fetch.FetchPhase; -import org.elasticsearch.search.rank.feature.RankFeatureShardPhase; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.test.ESTestCase; @@ -100,7 +99,6 @@ SearchService newSearchService( ThreadPool threadPool, ScriptService scriptService, BigArrays bigArrays, - RankFeatureShardPhase rankFeatureShardPhase, FetchPhase fetchPhase, ResponseCollectorService responseCollectorService, CircuitBreakerService circuitBreakerService, @@ -115,7 +113,6 @@ SearchService newSearchService( threadPool, scriptService, bigArrays, - rankFeatureShardPhase, fetchPhase, responseCollectorService, circuitBreakerService, @@ -129,7 +126,6 @@ SearchService newSearchService( threadPool, scriptService, bigArrays, - rankFeatureShardPhase, fetchPhase, responseCollectorService, circuitBreakerService, diff --git a/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java index 778a6e3106f49..179e1cd80cd4b 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java +++ b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java @@ -24,7 +24,6 @@ import org.elasticsearch.search.internal.ReaderContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchRequest; -import org.elasticsearch.search.rank.feature.RankFeatureShardPhase; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.threadpool.ThreadPool; @@ -83,7 +82,6 @@ public MockSearchService( ThreadPool threadPool, ScriptService scriptService, BigArrays bigArrays, - RankFeatureShardPhase rankFeatureShardPhase, FetchPhase fetchPhase, ResponseCollectorService responseCollectorService, CircuitBreakerService circuitBreakerService, @@ -96,7 +94,6 @@ public MockSearchService( threadPool, scriptService, bigArrays, - rankFeatureShardPhase, fetchPhase, responseCollectorService, circuitBreakerService, From 2ed318f21fc015609fa9b09d94115e3465c17615 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 27 Nov 2024 12:02:36 +0100 Subject: [PATCH 279/386] Remove unnecessary ResponseCollectorService dependency from SearchService (#117573) Small cleanup from a code-review earlier. SearchService isn't using this thing, it's only used by the transport action so that's where it should reside. Adjusted constructors accordingly and removed getter. --- .../action/search/TransportSearchAction.java | 6 +++++- .../java/org/elasticsearch/node/NodeConstruction.java | 5 +++-- .../java/org/elasticsearch/node/NodeServiceProvider.java | 2 -- .../java/org/elasticsearch/search/SearchService.java | 9 --------- .../action/search/TransportSearchActionTests.java | 1 + .../elasticsearch/snapshots/SnapshotResiliencyTests.java | 2 +- .../src/main/java/org/elasticsearch/node/MockNode.java | 3 --- .../java/org/elasticsearch/search/MockSearchService.java | 3 --- 8 files changed, 10 insertions(+), 21 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 4bca7a562fc38..5d1fb46a53cef 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -69,6 +69,7 @@ import org.elasticsearch.indices.ExecutorSelector; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.node.ResponseCollectorService; import org.elasticsearch.rest.action.search.SearchResponseMetrics; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchService; @@ -151,6 +152,7 @@ public class TransportSearchAction extends HandledTransportAction getLocalShardsIterator( concreteIndices, routingMap, searchRequest.preference(), - searchService.getResponseCollectorService(), + responseCollectorService, searchTransportService.getPendingSearchRequests() ); final Map originalIndices = buildPerIndexOriginalIndices( diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 795fe9e2771f0..aec8eb0c3ca67 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -921,6 +921,9 @@ private void construct( final IndexingPressure indexingLimits = new IndexingPressure(settings); final IncrementalBulkService incrementalBulkService = new IncrementalBulkService(client, indexingLimits); + final ResponseCollectorService responseCollectorService = new ResponseCollectorService(clusterService); + modules.bindToInstance(ResponseCollectorService.class, responseCollectorService); + ActionModule actionModule = new ActionModule( settings, clusterModule.getIndexNameExpressionResolver(), @@ -1003,7 +1006,6 @@ private void construct( taskManager, telemetryProvider.getTracer() ); - final ResponseCollectorService responseCollectorService = new ResponseCollectorService(clusterService); final SearchResponseMetrics searchResponseMetrics = new SearchResponseMetrics(telemetryProvider.getMeterRegistry()); final SearchTransportService searchTransportService = new SearchTransportService( transportService, @@ -1100,7 +1102,6 @@ private void construct( scriptService, bigArrays, searchModule.getFetchPhase(), - responseCollectorService, circuitBreakerService, systemIndices.getExecutorSelector(), telemetryProvider.getTracer() diff --git a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java index a49958c476416..4b7524a7ac011 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java +++ b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java @@ -119,7 +119,6 @@ SearchService newSearchService( ScriptService scriptService, BigArrays bigArrays, FetchPhase fetchPhase, - ResponseCollectorService responseCollectorService, CircuitBreakerService circuitBreakerService, ExecutorSelector executorSelector, Tracer tracer @@ -131,7 +130,6 @@ SearchService newSearchService( scriptService, bigArrays, fetchPhase, - responseCollectorService, circuitBreakerService, executorSelector, tracer diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 84bdc017ce970..e17709ed78318 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -73,7 +73,6 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason; -import org.elasticsearch.node.ResponseCollectorService; import org.elasticsearch.script.FieldScript; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.AggregationInitializationException; @@ -279,8 +278,6 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv private final ScriptService scriptService; - private final ResponseCollectorService responseCollectorService; - private final ExecutorSelector executorSelector; private final BigArrays bigArrays; @@ -325,7 +322,6 @@ public SearchService( ScriptService scriptService, BigArrays bigArrays, FetchPhase fetchPhase, - ResponseCollectorService responseCollectorService, CircuitBreakerService circuitBreakerService, ExecutorSelector executorSelector, Tracer tracer @@ -335,7 +331,6 @@ public SearchService( this.clusterService = clusterService; this.indicesService = indicesService; this.scriptService = scriptService; - this.responseCollectorService = responseCollectorService; this.bigArrays = bigArrays; this.fetchPhase = fetchPhase; this.multiBucketConsumerService = new MultiBucketConsumerService( @@ -1535,10 +1530,6 @@ public int getOpenScrollContexts() { return openScrollContexts.get(); } - public ResponseCollectorService getResponseCollectorService() { - return this.responseCollectorService; - } - public long getDefaultKeepAliveInMillis() { return defaultKeepAlive; } diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index a9de118c6b859..367508283bb93 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -1758,6 +1758,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { new NoneCircuitBreakerService(), transportService, searchService, + null, new SearchTransportService(transportService, client, null), null, clusterService, diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index ceaf7979ed60e..b7f33151961ea 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -2314,7 +2314,6 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { scriptService, bigArrays, new FetchPhase(Collections.emptyList()), - responseCollectorService, new NoneCircuitBreakerService(), EmptySystemIndices.INSTANCE.getExecutorSelector(), Tracer.NOOP @@ -2481,6 +2480,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { new NoneCircuitBreakerService(), transportService, searchService, + responseCollectorService, searchTransportService, searchPhaseController, clusterService, diff --git a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java index 7fddeb8491c7f..d3bfacdf7691a 100644 --- a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java +++ b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java @@ -100,7 +100,6 @@ SearchService newSearchService( ScriptService scriptService, BigArrays bigArrays, FetchPhase fetchPhase, - ResponseCollectorService responseCollectorService, CircuitBreakerService circuitBreakerService, ExecutorSelector executorSelector, Tracer tracer @@ -114,7 +113,6 @@ SearchService newSearchService( scriptService, bigArrays, fetchPhase, - responseCollectorService, circuitBreakerService, executorSelector, tracer @@ -127,7 +125,6 @@ SearchService newSearchService( scriptService, bigArrays, fetchPhase, - responseCollectorService, circuitBreakerService, executorSelector, tracer diff --git a/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java index 179e1cd80cd4b..79c61cacb58eb 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java +++ b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java @@ -17,7 +17,6 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.node.MockNode; -import org.elasticsearch.node.ResponseCollectorService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.fetch.FetchPhase; @@ -83,7 +82,6 @@ public MockSearchService( ScriptService scriptService, BigArrays bigArrays, FetchPhase fetchPhase, - ResponseCollectorService responseCollectorService, CircuitBreakerService circuitBreakerService, ExecutorSelector executorSelector, Tracer tracer @@ -95,7 +93,6 @@ public MockSearchService( scriptService, bigArrays, fetchPhase, - responseCollectorService, circuitBreakerService, executorSelector, tracer From 560e0c5d0441a165f4588f8af869053b5202999f Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Wed, 27 Nov 2024 14:59:42 +0100 Subject: [PATCH 280/386] ESQL: fix COUNT filter pushdown (#117503) If `COUNT` agg has a filter applied, this must also be push down to source. This currently does not happen, but this issue is masked currently by two factors: * a logical optimisation, `ExtractAggregateCommonFilter` that extracts the filter out of the STATS entirely (and pushes it to source then from a `WHERE`); * the phisical plan optimisation implementing the push down, `PushStatsToSource`, currently only applies if there's just one agg function to push down. However, this fix needs to be applied since: * it's still present in versions prior to `ExtractAggregateCommonFilter` introduction; * the defect might resurface when the restriction in `PushStatsToSource` is lifted. Fixes #115522. --- docs/changelog/117503.yaml | 6 ++ .../src/main/resources/stats.csv-spec | 31 +++++++++ .../physical/local/PushStatsToSource.java | 11 ++++ .../LocalPhysicalPlanOptimizerTests.java | 66 +++++++++++++++++++ .../esql/optimizer/TestPlannerOptimizer.java | 10 +-- 5 files changed, 120 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/117503.yaml diff --git a/docs/changelog/117503.yaml b/docs/changelog/117503.yaml new file mode 100644 index 0000000000000..d48741262b581 --- /dev/null +++ b/docs/changelog/117503.yaml @@ -0,0 +1,6 @@ +pr: 117503 +summary: Fix COUNT filter pushdown +area: ES|QL +type: bug +issues: + - 115522 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index f95506ff1982f..d76f4c05d955f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -2688,6 +2688,16 @@ c1:long 41 ; +simpleCountOnFieldWithFilteringOnDifferentFieldAndNoGrouping +required_capability: per_agg_filtering +from employees +| stats c1 = count(hire_date) where emp_no < 10042 +; + +c1:long +41 +; + simpleCountOnStarWithFilteringAndNoGrouping required_capability: per_agg_filtering from employees @@ -2698,6 +2708,27 @@ c1:long 41 ; +simpleCountWithFilteringAndNoGroupingOnFieldWithNulls +required_capability: per_agg_filtering +from employees +| stats c1 = count(birth_date) where emp_no <= 10050 +; + +c1:long +40 +; + + +simpleCountWithFilteringAndNoGroupingOnFieldWithMultivalues +required_capability: per_agg_filtering +from employees +| stats c1 = count(job_positions) where emp_no <= 10003 +; + +c1:long +3 +; + commonFilterExtractionWithAliasing required_capability: per_agg_filtering from employees diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushStatsToSource.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushStatsToSource.java index b0b86b43cd162..21bc360404628 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushStatsToSource.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushStatsToSource.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.util.Queries; import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalOptimizerContext; @@ -25,12 +26,15 @@ import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.planner.AbstractPhysicalOperationProviders; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import java.util.ArrayList; import java.util.List; +import static java.util.Arrays.asList; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; +import static org.elasticsearch.xpack.esql.optimizer.rules.physical.local.PushFiltersToSource.canPushToSource; import static org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.StatsType.COUNT; /** @@ -98,6 +102,13 @@ private Tuple, List> pushableStats( } } if (fieldName != null) { + if (count.hasFilter()) { + if (canPushToSource(count.filter()) == false) { + return null; // can't push down + } + var countFilter = PlannerUtils.TRANSLATOR_HANDLER.asQuery(count.filter()); + query = Queries.combine(Queries.Clause.MUST, asList(countFilter.asBuilder(), query)); + } return new EsStatsQueryExec.Stat(fieldName, COUNT, query); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 4612ccb425ba2..86f5c812737b1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -42,7 +42,9 @@ import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.index.IndexResolution; +import org.elasticsearch.xpack.esql.optimizer.rules.logical.ExtractAggregateCommonFilter; import org.elasticsearch.xpack.esql.plan.logical.Enrich; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec; @@ -59,6 +61,7 @@ import org.elasticsearch.xpack.esql.planner.FilterTests; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; +import org.elasticsearch.xpack.esql.rule.Rule; import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.stats.Metrics; import org.elasticsearch.xpack.esql.stats.SearchContextStats; @@ -67,9 +70,11 @@ import org.junit.Before; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.function.Function; import static java.util.Arrays.asList; import static org.elasticsearch.compute.aggregation.AggregatorMode.FINAL; @@ -380,6 +385,67 @@ public void testMultiCountAllWithFilter() { assertThat(plan.anyMatch(EsQueryExec.class::isInstance), is(true)); } + @SuppressWarnings("unchecked") + public void testSingleCountWithStatsFilter() { + // an optimizer that filters out the ExtractAggregateCommonFilter rule + var logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(config)) { + @Override + protected List> batches() { + var oldBatches = super.batches(); + List> newBatches = new ArrayList<>(oldBatches.size()); + for (var batch : oldBatches) { + List> rules = new ArrayList<>(List.of(batch.rules())); + rules.removeIf(r -> r instanceof ExtractAggregateCommonFilter); + newBatches.add(batch.with(rules.toArray(Rule[]::new))); + } + return newBatches; + } + }; + var analyzer = makeAnalyzer("mapping-default.json"); + var plannerOptimizer = new TestPlannerOptimizer(config, analyzer, logicalOptimizer); + var plan = plannerOptimizer.plan(""" + from test + | stats c = count(hire_date) where emp_no < 10042 + """, IS_SV_STATS); + + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat(agg.getMode(), is(FINAL)); + var exchange = as(agg.child(), ExchangeExec.class); + var esStatsQuery = as(exchange.child(), EsStatsQueryExec.class); + + Function compact = s -> s.replaceAll("\\s+", ""); + assertThat(compact.apply(esStatsQuery.query().toString()), is(compact.apply(""" + { + "bool": { + "must": [ + { + "exists": { + "field": "hire_date", + "boost": 1.0 + } + }, + { + "esql_single_value": { + "field": "emp_no", + "next": { + "range": { + "emp_no": { + "lt": 10042, + "boost": 1.0 + } + } + }, + "source": "emp_no < 10042@2:36" + } + } + ], + "boost": 1.0 + } + } + """))); + } + /** * Expecting * LimitExec[1000[INTEGER]] diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/TestPlannerOptimizer.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/TestPlannerOptimizer.java index 595f0aaa91f0d..9fe479dbb8625 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/TestPlannerOptimizer.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/TestPlannerOptimizer.java @@ -9,7 +9,6 @@ import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.analysis.Analyzer; -import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; @@ -23,19 +22,22 @@ public class TestPlannerOptimizer { private final Analyzer analyzer; private final LogicalPlanOptimizer logicalOptimizer; private final PhysicalPlanOptimizer physicalPlanOptimizer; - private final EsqlFunctionRegistry functionRegistry; private final Mapper mapper; private final Configuration config; public TestPlannerOptimizer(Configuration config, Analyzer analyzer) { + this(config, analyzer, new LogicalPlanOptimizer(new LogicalOptimizerContext(config))); + } + + public TestPlannerOptimizer(Configuration config, Analyzer analyzer, LogicalPlanOptimizer logicalOptimizer) { this.analyzer = analyzer; this.config = config; + this.logicalOptimizer = logicalOptimizer; parser = new EsqlParser(); - logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(config)); physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(config)); - functionRegistry = new EsqlFunctionRegistry(); mapper = new Mapper(); + } public PhysicalPlan plan(String query) { From 66108ebeb9c3d526a8d61df73af2191a5282dc8d Mon Sep 17 00:00:00 2001 From: Dimitris Rempapis Date: Wed, 27 Nov 2024 16:42:41 +0200 Subject: [PATCH 281/386] Search Queries in parallel - part 2 (#117141) Assert optimization applied to search IT tests --- .../search/fields/SearchFieldsIT.java | 65 +-- .../functionscore/DecayFunctionScoreIT.java | 412 ++++++------------ .../search/functionscore/FunctionScoreIT.java | 89 ++-- .../search/nested/SimpleNestedIT.java | 135 ++---- .../search/query/QueryStringIT.java | 75 ++-- .../search/query/SearchQueryIT.java | 119 ++--- .../search/query/SimpleQueryStringIT.java | 86 ++-- .../routing/SearchReplicaSelectionIT.java | 17 +- 8 files changed, 340 insertions(+), 658 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java index 16e5e42e00c9f..0310af3685e3e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -65,6 +65,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -203,18 +204,16 @@ public void testStoredFields() throws Exception { assertThat(response.getHits().getAt(0).getFields().size(), equalTo(0)); assertThat(response.getHits().getAt(0).getFields().get("field2"), nullValue()); }); - assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field3"), response -> { + assertResponses(response -> { assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); - }); - assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("*3"), response -> { - assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); - assertThat(response.getHits().getHits().length, equalTo(1)); - assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); - }); + }, + prepareSearch().setQuery(matchAllQuery()).addStoredField("field3"), + prepareSearch().setQuery(matchAllQuery()).addStoredField("*3"), + prepareSearch().setQuery(matchAllQuery()).addStoredField("f*3") + ); assertResponse( prepareSearch().setQuery(matchAllQuery()).addStoredField("*3").addStoredField("field1").addStoredField("field2"), response -> { @@ -232,12 +231,6 @@ public void testStoredFields() throws Exception { assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); assertThat(response.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1")); }); - assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("f*3"), response -> { - assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); - assertThat(response.getHits().getHits().length, equalTo(1)); - assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); - }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("*"), response -> { assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); @@ -865,47 +858,7 @@ public void testDocValueFields() throws Exception { if (randomBoolean()) { builder.addDocValueField("*_field"); } - assertResponse(builder, response -> { - assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); - assertThat(response.getHits().getHits().length, equalTo(1)); - Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); - assertThat( - fields, - equalTo( - newHashSet( - "byte_field", - "short_field", - "integer_field", - "long_field", - "float_field", - "double_field", - "date_field", - "boolean_field", - "text_field", - "keyword_field", - "binary_field", - "ip_field" - ) - ) - ); - - assertThat(response.getHits().getAt(0).getFields().get("byte_field").getValues(), equalTo(List.of(1L))); - assertThat(response.getHits().getAt(0).getFields().get("short_field").getValues(), equalTo(List.of(2L))); - assertThat(response.getHits().getAt(0).getFields().get("integer_field").getValues(), equalTo(List.of(3L))); - assertThat(response.getHits().getAt(0).getFields().get("long_field").getValues(), equalTo(List.of(4L))); - assertThat(response.getHits().getAt(0).getFields().get("float_field").getValues(), equalTo(List.of(5.0))); - assertThat(response.getHits().getAt(0).getFields().get("double_field").getValues(), equalTo(List.of(6.0d))); - assertThat( - response.getHits().getAt(0).getFields().get("date_field").getValue(), - equalTo(DateFormatter.forPattern("date_optional_time").format(date)) - ); - assertThat(response.getHits().getAt(0).getFields().get("boolean_field").getValues(), equalTo(List.of(true))); - assertThat(response.getHits().getAt(0).getFields().get("text_field").getValues(), equalTo(List.of("foo"))); - assertThat(response.getHits().getAt(0).getFields().get("keyword_field").getValues(), equalTo(List.of("foo"))); - assertThat(response.getHits().getAt(0).getFields().get("binary_field").getValues(), equalTo(List.of("KmQ="))); - assertThat(response.getHits().getAt(0).getFields().get("ip_field").getValues(), equalTo(List.of("::1"))); - }); - assertResponse(prepareSearch().setQuery(matchAllQuery()).addDocValueField("*field"), response -> { + assertResponses(response -> { assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); @@ -944,7 +897,7 @@ public void testDocValueFields() throws Exception { assertThat(response.getHits().getAt(0).getFields().get("keyword_field").getValues(), equalTo(List.of("foo"))); assertThat(response.getHits().getAt(0).getFields().get("binary_field").getValues(), equalTo(List.of("KmQ="))); assertThat(response.getHits().getAt(0).getFields().get("ip_field").getValues(), equalTo(List.of("::1"))); - }); + }, builder, prepareSearch().setQuery(matchAllQuery()).addDocValueField("*field")); assertResponse( prepareSearch().setQuery(matchAllQuery()) .addDocValueField("byte_field", "#.0") diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java index 76384253282de..9988624f6a677 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java @@ -51,6 +51,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.anyOf; @@ -135,64 +136,21 @@ public void testDistanceScoreGeoLinGaussExp() throws Exception { lonlat.add(20f); lonlat.add(11f); - assertHitCount( - client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH).source(searchSource().query(baseQuery)) - ), - (numDummyDocs + 2) - ); - - assertResponse( - client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source(searchSource().query(functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km")))) - ), - response -> { - assertHitCount(response, (numDummyDocs + 2)); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - } - ); - // Test Exp - - assertHitCount( - client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH).source(searchSource().query(baseQuery)) - ), - (numDummyDocs + 2) - ); - - assertResponse( - client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source(searchSource().query(functionScoreQuery(baseQuery, linearDecayFunction("loc", lonlat, "1000km")))) - ), - response -> { - assertHitCount(response, (numDummyDocs + 2)); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - } - ); - - // Test Lin - - assertHitCount( - client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH).source(searchSource().query(baseQuery)) - ), - (numDummyDocs + 2) - ); - - assertResponse( - client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source(searchSource().query(functionScoreQuery(baseQuery, exponentialDecayFunction("loc", lonlat, "1000km")))) - ), - response -> { - assertHitCount(response, (numDummyDocs + 2)); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - } + assertResponses(response -> { + assertHitCount(response, (numDummyDocs + 2)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + assertHitCount( + (numDummyDocs + 2), + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH).setSource(searchSource().query(baseQuery)) + ); + }, + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setSource(searchSource().query(functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km")))), + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setSource(searchSource().query(functionScoreQuery(baseQuery, linearDecayFunction("loc", lonlat, "1000km")))), + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setSource(searchSource().query(functionScoreQuery(baseQuery, exponentialDecayFunction("loc", lonlat, "1000km")))) ); } @@ -234,77 +192,46 @@ public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { indexRandom(true, indexBuilders); - // Test Gauss - - assertResponse( - client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().size(numDummyDocs + 2) - .query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 1.0, 5.0, 1.0)).boostMode( - CombineFunction.REPLACE - ) - ) - ) - ), - response -> { - SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value(), equalTo((long) (numDummyDocs + 2))); - assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); - assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); - assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); - for (int i = 0; i < numDummyDocs; i++) { - assertThat(sh.getAt(i + 2).getId(), equalTo(Integer.toString(i + 3))); - } + assertResponses(response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getTotalHits().value(), equalTo((long) (numDummyDocs + 2))); + assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); + assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); + assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); + for (int i = 0; i < numDummyDocs; i++) { + assertThat(sh.getAt(i + 2).getId(), equalTo(Integer.toString(i + 3))); } - ); - - // Test Exp - - assertResponse( - client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().size(numDummyDocs + 2) - .query( - functionScoreQuery(termQuery("test", "value"), exponentialDecayFunction("num", 1.0, 5.0, 1.0)).boostMode( - CombineFunction.REPLACE - ) + }, + // Test Gauss + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setSource( + searchSource().size(numDummyDocs + 2) + .query( + functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 1.0, 5.0, 1.0)).boostMode( + CombineFunction.REPLACE ) - ) - ), - response -> { - SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value(), equalTo((long) (numDummyDocs + 2))); - assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); - assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); - assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); - for (int i = 0; i < numDummyDocs; i++) { - assertThat(sh.getAt(i + 2).getId(), equalTo(Integer.toString(i + 3))); - } - } - ); - // Test Lin - assertResponse( - client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().size(numDummyDocs + 2) - .query( - functionScoreQuery(termQuery("test", "value"), linearDecayFunction("num", 1.0, 20.0, 1.0)).boostMode( - CombineFunction.REPLACE - ) + ) + ), + // Test Exp + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setSource( + searchSource().size(numDummyDocs + 2) + .query( + functionScoreQuery(termQuery("test", "value"), exponentialDecayFunction("num", 1.0, 5.0, 1.0)).boostMode( + CombineFunction.REPLACE ) - ) - ), - response -> { - SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value(), equalTo((long) (numDummyDocs + 2))); - assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); - assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); - assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); - } + ) + ), + // Test Lin + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setSource( + searchSource().size(numDummyDocs + 2) + .query( + functionScoreQuery(termQuery("test", "value"), linearDecayFunction("num", 1.0, 20.0, 1.0)).boostMode( + CombineFunction.REPLACE + ) + ) + ) ); } @@ -355,54 +282,38 @@ public void testBoostModeSettingWorks() throws Exception { ); indexRandom(true, false, indexBuilders); // force no dummy docs - // Test Gauss List lonlat = new ArrayList<>(); lonlat.add(20f); lonlat.add(11f); - assertResponse( - client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("loc", lonlat, "1000km")).boostMode( - CombineFunction.MULTIPLY - ) + assertResponses(response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat(sh.getAt(1).getId(), equalTo("2")); + }, + // Test Gauss + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setSource( + searchSource().query( + functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("loc", lonlat, "1000km")).boostMode( + CombineFunction.MULTIPLY ) ) - ), - response -> { - SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value(), equalTo((long) (2))); - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat(sh.getAt(1).getId(), equalTo("2")); - } - ); - // Test Exp - assertResponse( - client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source(searchSource().query(termQuery("test", "value"))) - ), - response -> { - SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value(), equalTo((long) (2))); - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat(sh.getAt(1).getId(), equalTo("2")); - } + ), + // Test Exp + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH).setSource(searchSource().query(termQuery("test", "value"))) ); assertResponse( - client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("loc", lonlat, "1000km")).boostMode( - CombineFunction.REPLACE - ) + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setSource( + searchSource().query( + functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("loc", lonlat, "1000km")).boostMode( + CombineFunction.REPLACE ) ) - ), + ), response -> { SearchHits sh = response.getHits(); assertThat(sh.getTotalHits().value(), equalTo((long) (2))); @@ -410,7 +321,6 @@ public void testBoostModeSettingWorks() throws Exception { assertThat(sh.getAt(1).getId(), equalTo("1")); } ); - } public void testParseGeoPoint() throws Exception { @@ -447,44 +357,30 @@ public void testParseGeoPoint() throws Exception { constantScoreQuery(termQuery("test", "value")), ScoreFunctionBuilders.weightFactorFunction(randomIntBetween(1, 10)) ); - GeoPoint point = new GeoPoint(20, 11); - assertResponse( - client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(baseQueryBuilder, gaussDecayFunction("loc", point, "1000km")).boostMode( - CombineFunction.REPLACE - ) + + assertResponses(response -> { + SearchHits sh = response.getHits(); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); + assertThat(sh.getAt(0).getId(), equalTo("1")); + assertThat((double) sh.getAt(0).getScore(), closeTo(1.0f, 1.e-5)); + }, + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setSource( + searchSource().query( + functionScoreQuery(baseQueryBuilder, gaussDecayFunction("loc", new GeoPoint(20, 11), "1000km")).boostMode( + CombineFunction.REPLACE ) ) - ), - response -> { - SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value(), equalTo((long) (1))); - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).getScore(), closeTo(1.0, 1.e-5)); - } - ); - // this is equivalent to new GeoPoint(20, 11); just flipped so scores must be same - float[] coords = { 11, 20 }; - assertResponse( - client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(baseQueryBuilder, gaussDecayFunction("loc", coords, "1000km")).boostMode( - CombineFunction.REPLACE - ) + ), + // new float[] {11,20} is equivalent to new GeoPoint(20, 11); just flipped so scores must be same + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setSource( + searchSource().query( + functionScoreQuery(baseQueryBuilder, gaussDecayFunction("loc", new float[] { 11, 20 }, "1000km")).boostMode( + CombineFunction.REPLACE ) ) - ), - response -> { - SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value(), equalTo((long) (1))); - assertThat(sh.getAt(0).getId(), equalTo("1")); - assertThat((double) sh.getAt(0).getScore(), closeTo(1.0f, 1.e-5)); - } + ) ); } @@ -516,16 +412,14 @@ public void testCombineModes() throws Exception { ); // decay score should return 0.5 for this function and baseQuery should return 2.0f as it's score assertResponse( - client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode( - CombineFunction.MULTIPLY - ) + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setSource( + searchSource().query( + functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode( + CombineFunction.MULTIPLY ) ) - ), + ), response -> { SearchHits sh = response.getHits(); assertThat(sh.getTotalHits().value(), equalTo((long) (1))); @@ -534,16 +428,14 @@ public void testCombineModes() throws Exception { } ); assertResponse( - client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode( - CombineFunction.REPLACE - ) + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setSource( + searchSource().query( + functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode( + CombineFunction.REPLACE ) ) - ), + ), response -> { SearchHits sh = response.getHits(); assertThat(sh.getTotalHits().value(), equalTo((long) (1))); @@ -552,16 +444,12 @@ public void testCombineModes() throws Exception { } ); assertResponse( - client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode( - CombineFunction.SUM - ) - ) - ) - ), + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setSource( + (searchSource().query( + functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode(CombineFunction.SUM) + )) + ), response -> { SearchHits sh = response.getHits(); assertThat(sh.getTotalHits().value(), equalTo((long) (1))); @@ -576,16 +464,12 @@ public void testCombineModes() throws Exception { ); assertResponse( - client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode( - CombineFunction.AVG - ) - ) + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setSource( + searchSource().query( + functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode(CombineFunction.AVG) ) - ), + ), response -> { SearchHits sh = response.getHits(); assertThat(sh.getTotalHits().value(), equalTo((long) (1))); @@ -594,16 +478,12 @@ public void testCombineModes() throws Exception { } ); assertResponse( - client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode( - CombineFunction.MIN - ) - ) + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setSource( + searchSource().query( + functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode(CombineFunction.MIN) ) - ), + ), response -> { SearchHits sh = response.getHits(); assertThat(sh.getTotalHits().value(), equalTo((long) (1))); @@ -612,16 +492,12 @@ public void testCombineModes() throws Exception { } ); assertResponse( - client().search( - new SearchRequest(new String[] {}).searchType(SearchType.QUERY_THEN_FETCH) - .source( - searchSource().query( - functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode( - CombineFunction.MAX - ) - ) + prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) + .setSource( + searchSource().query( + functionScoreQuery(baseQueryBuilder, gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boostMode(CombineFunction.MAX) ) - ), + ), response -> { SearchHits sh = response.getHits(); assertThat(sh.getTotalHits().value(), equalTo((long) (1))); @@ -1128,7 +1004,7 @@ public void testMultiFieldOptions() throws Exception { indexRandom(true, doc1, doc2); - assertResponse(client().search(new SearchRequest(new String[] {}).source(searchSource().query(baseQuery))), response -> { + assertResponse(prepareSearch().setSource(searchSource().query(baseQuery)), response -> { assertSearchHits(response, "1", "2"); SearchHits sh = response.getHits(); assertThat(sh.getTotalHits().value(), equalTo((long) (2))); @@ -1138,11 +1014,9 @@ public void testMultiFieldOptions() throws Exception { lonlat.add(20f); lonlat.add(10f); assertResponse( - client().search( - new SearchRequest(new String[] {}).source( - searchSource().query( - functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MIN)) - ) + prepareSearch().setSource( + searchSource().query( + functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MIN)) ) ), response -> { @@ -1154,11 +1028,9 @@ public void testMultiFieldOptions() throws Exception { } ); assertResponse( - client().search( - new SearchRequest(new String[] {}).source( - searchSource().query( - functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MAX)) - ) + prepareSearch().setSource( + searchSource().query( + functionScoreQuery(baseQuery, gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MAX)) ) ), response -> { @@ -1180,11 +1052,9 @@ public void testMultiFieldOptions() throws Exception { indexRandom(true, doc1, doc2); assertResponse( - client().search( - new SearchRequest(new String[] {}).source( - searchSource().query( - functionScoreQuery(baseQuery, linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.SUM)) - ) + prepareSearch().setSource( + searchSource().query( + functionScoreQuery(baseQuery, linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.SUM)) ) ), response -> { @@ -1197,11 +1067,9 @@ public void testMultiFieldOptions() throws Exception { } ); assertResponse( - client().search( - new SearchRequest(new String[] {}).source( - searchSource().query( - functionScoreQuery(baseQuery, linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.AVG)) - ) + prepareSearch().setSource( + searchSource().query( + functionScoreQuery(baseQuery, linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.AVG)) ) ), response -> { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java index a38c9dc916056..e90740c042de3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java @@ -43,7 +43,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -137,41 +137,25 @@ public void testMinScoreFunctionScoreBasic() throws Exception { ensureYellow(); Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['random_score']", Collections.emptyMap()); - assertResponse( - client().search( - new SearchRequest(new String[] {}).source( - searchSource().query(functionScoreQuery(scriptFunction(script)).setMinScore(minScore)) - ) - ), - response -> { - if (score < minScore) { - assertThat(response.getHits().getTotalHits().value(), is(0L)); - } else { - assertThat(response.getHits().getTotalHits().value(), is(1L)); - } - } - ); - assertResponse( - client().search( - new SearchRequest(new String[] {}).source( - searchSource().query( - functionScoreQuery( - new MatchAllQueryBuilder(), - new FilterFunctionBuilder[] { - new FilterFunctionBuilder(scriptFunction(script)), - new FilterFunctionBuilder(scriptFunction(script)) } - ).scoreMode(FunctionScoreQuery.ScoreMode.AVG).setMinScore(minScore) - ) - ) - ), - response -> { - if (score < minScore) { - assertThat(response.getHits().getTotalHits().value(), is(0L)); - } else { - assertThat(response.getHits().getTotalHits().value(), is(1L)); - } + assertResponses(response -> { + if (score < minScore) { + assertThat(response.getHits().getTotalHits().value(), is(0L)); + } else { + assertThat(response.getHits().getTotalHits().value(), is(1L)); } + }, + prepareSearch().setSource(searchSource().query(functionScoreQuery(scriptFunction(script)).setMinScore(minScore))), + prepareSearch().setSource( + searchSource().query( + functionScoreQuery( + new MatchAllQueryBuilder(), + new FilterFunctionBuilder[] { + new FilterFunctionBuilder(scriptFunction(script)), + new FilterFunctionBuilder(scriptFunction(script)) } + ).scoreMode(FunctionScoreQuery.ScoreMode.AVG).setMinScore(minScore) + ) + ) ); } @@ -195,31 +179,20 @@ public void testMinScoreFunctionScoreManyDocsAndRandomMinScore() throws IOExcept final int finalNumMatchingDocs = numMatchingDocs; - assertResponse( - client().search( - new SearchRequest(new String[] {}).source( - searchSource().query(functionScoreQuery(scriptFunction(script)).setMinScore(minScore)).size(numDocs) - ) - ), - response -> assertMinScoreSearchResponses(numDocs, response, finalNumMatchingDocs) - ); - - assertResponse( - client().search( - new SearchRequest(new String[] {}).source( - searchSource().query( - functionScoreQuery( - new MatchAllQueryBuilder(), - new FilterFunctionBuilder[] { - new FilterFunctionBuilder(scriptFunction(script)), - new FilterFunctionBuilder(scriptFunction(script)) } - ).scoreMode(FunctionScoreQuery.ScoreMode.AVG).setMinScore(minScore) - ).size(numDocs) - ) - ), - response -> assertMinScoreSearchResponses(numDocs, response, finalNumMatchingDocs) + assertResponses( + response -> assertMinScoreSearchResponses(numDocs, response, finalNumMatchingDocs), + prepareSearch().setSource(searchSource().query(functionScoreQuery(scriptFunction(script)).setMinScore(minScore)).size(numDocs)), + prepareSearch().setSource( + searchSource().query( + functionScoreQuery( + new MatchAllQueryBuilder(), + new FilterFunctionBuilder[] { + new FilterFunctionBuilder(scriptFunction(script)), + new FilterFunctionBuilder(scriptFunction(script)) } + ).scoreMode(FunctionScoreQuery.ScoreMode.AVG).setMinScore(minScore) + ).size(numDocs) + ) ); - } protected void assertMinScoreSearchResponses(int numDocs, SearchResponse searchResponse, int numMatchingDocs) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java index 4688201c66201..8225386ed02d2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -44,6 +44,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -1149,39 +1150,54 @@ public void testSortNestedWithNestedFilter() throws Exception { // With nested filter NestedSortBuilder nestedSort = new NestedSortBuilder("parent.child"); nestedSort.setFilter(QueryBuilders.termQuery("parent.child.filter", true)); - assertResponse( + assertResponses(response -> { + assertHitCount(response, 3); + assertThat(response.getHits().getHits().length, equalTo(3)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); + }, prepareSearch().setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("parent.child.child_values").setNestedSort(nestedSort).order(SortOrder.ASC)), - response -> { - assertHitCount(response, 3); - assertThat(response.getHits().getHits().length, equalTo(3)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); - assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); - assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); - assertThat(response.getHits().getHits()[2].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); - } - ); - // Nested path should be automatically detected, expect same results as above search request - assertResponse( prepareSearch().setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("parent.child.child_values").setNestedSort(nestedSort).order(SortOrder.ASC)), - response -> { - assertHitCount(response, 3); - assertThat(response.getHits().getHits().length, equalTo(3)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); - assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); - assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); - assertThat(response.getHits().getHits()[2].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); - } + .addSort( + SortBuilders.fieldSort("parent.child.child_obj.value") + .setNestedSort( + new NestedSortBuilder("parent.child").setFilter(QueryBuilders.termQuery("parent.child.filter", true)) + ) + .order(SortOrder.ASC) + ), + // Sort mode: sum with filter + prepareSearch().setQuery(matchAllQuery()) + .addSort( + SortBuilders.fieldSort("parent.child.child_values") + .setNestedSort( + new NestedSortBuilder("parent.child").setFilter(QueryBuilders.termQuery("parent.child.filter", true)) + ) + .sortMode(SortMode.SUM) + .order(SortOrder.ASC) + ), + // Sort mode: avg with filter + prepareSearch().setQuery(matchAllQuery()) + .addSort( + SortBuilders.fieldSort("parent.child.child_values") + .setNestedSort( + new NestedSortBuilder("parent.child").setFilter(QueryBuilders.termQuery("parent.child.filter", true)) + ) + .sortMode(SortMode.AVG) + .order(SortOrder.ASC) + ) ); - nestedSort.setFilter(QueryBuilders.termQuery("parent.filter", false)); assertResponse( prepareSearch().setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("parent.parent_values").setNestedSort(nestedSort).order(SortOrder.ASC)), + .addSort( + SortBuilders.fieldSort("parent.parent_values") + .setNestedSort(nestedSort.setFilter(QueryBuilders.termQuery("parent.filter", false))) + .order(SortOrder.ASC) + ), response -> { assertHitCount(response, 3); assertThat(response.getHits().getHits().length, equalTo(3)); @@ -1215,27 +1231,6 @@ public void testSortNestedWithNestedFilter() throws Exception { assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("6")); } ); - // Check if closest nested type is resolved - assertResponse( - prepareSearch().setQuery(matchAllQuery()) - .addSort( - SortBuilders.fieldSort("parent.child.child_obj.value") - .setNestedSort( - new NestedSortBuilder("parent.child").setFilter(QueryBuilders.termQuery("parent.child.filter", true)) - ) - .order(SortOrder.ASC) - ), - response -> { - assertHitCount(response, 3); - assertThat(response.getHits().getHits().length, equalTo(3)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); - assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); - assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); - assertThat(response.getHits().getHits()[2].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); - } - ); // Sort mode: sum assertResponse( prepareSearch().setQuery(matchAllQuery()) @@ -1275,28 +1270,6 @@ public void testSortNestedWithNestedFilter() throws Exception { assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("2")); } ); - // Sort mode: sum with filter - assertResponse( - prepareSearch().setQuery(matchAllQuery()) - .addSort( - SortBuilders.fieldSort("parent.child.child_values") - .setNestedSort( - new NestedSortBuilder("parent.child").setFilter(QueryBuilders.termQuery("parent.child.filter", true)) - ) - .sortMode(SortMode.SUM) - .order(SortOrder.ASC) - ), - response -> { - assertHitCount(response, 3); - assertThat(response.getHits().getHits().length, equalTo(3)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); - assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); - assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); - assertThat(response.getHits().getHits()[2].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); - } - ); // Sort mode: avg assertResponse( prepareSearch().setQuery(matchAllQuery()) @@ -1336,28 +1309,6 @@ public void testSortNestedWithNestedFilter() throws Exception { assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("1")); } ); - // Sort mode: avg with filter - assertResponse( - prepareSearch().setQuery(matchAllQuery()) - .addSort( - SortBuilders.fieldSort("parent.child.child_values") - .setNestedSort( - new NestedSortBuilder("parent.child").setFilter(QueryBuilders.termQuery("parent.child.filter", true)) - ) - .sortMode(SortMode.AVG) - .order(SortOrder.ASC) - ), - response -> { - assertHitCount(response, 3); - assertThat(response.getHits().getHits().length, equalTo(3)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); - assertThat(response.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("1")); - assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("2")); - assertThat(response.getHits().getHits()[2].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); - } - ); } // Issue #9305 diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java index c8fe9498b156f..28d72518f516e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java @@ -30,6 +30,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -50,14 +51,10 @@ public void testBasicAllQuery() throws Exception { reqs.add(prepareIndex("test").setId("3").setSource("f3", "foo bar baz")); indexRandom(true, false, reqs); - assertResponse(prepareSearch("test").setQuery(queryStringQuery("foo")), response -> { - assertHitCount(response, 2L); - assertHits(response.getHits(), "1", "3"); - }); - assertResponse(prepareSearch("test").setQuery(queryStringQuery("bar")), response -> { + assertResponses(response -> { assertHitCount(response, 2L); assertHits(response.getHits(), "1", "3"); - }); + }, prepareSearch("test").setQuery(queryStringQuery("foo")), prepareSearch("test").setQuery(queryStringQuery("bar"))); assertResponse(prepareSearch("test").setQuery(queryStringQuery("Bar")), response -> { assertHitCount(response, 3L); assertHits(response.getHits(), "1", "2", "3"); @@ -70,22 +67,18 @@ public void testWithDate() throws Exception { reqs.add(prepareIndex("test").setId("2").setSource("f1", "bar", "f_date", "2015/09/01")); indexRandom(true, false, reqs); - assertResponse(prepareSearch("test").setQuery(queryStringQuery("foo bar")), response -> { + assertResponses(response -> { assertHits(response.getHits(), "1", "2"); assertHitCount(response, 2L); - }); + }, + prepareSearch("test").setQuery(queryStringQuery("foo bar")), + prepareSearch("test").setQuery(queryStringQuery("bar \"2015/09/02\"")), + prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\" \"2015/09/01\"")) + ); assertResponse(prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\"")), response -> { assertHits(response.getHits(), "1"); assertHitCount(response, 1L); }); - assertResponse(prepareSearch("test").setQuery(queryStringQuery("bar \"2015/09/02\"")), response -> { - assertHits(response.getHits(), "1", "2"); - assertHitCount(response, 2L); - }); - assertResponse(prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\" \"2015/09/01\"")), response -> { - assertHits(response.getHits(), "1", "2"); - assertHitCount(response, 2L); - }); } public void testWithLotsOfTypes() throws Exception { @@ -94,22 +87,18 @@ public void testWithLotsOfTypes() throws Exception { reqs.add(prepareIndex("test").setId("2").setSource("f1", "bar", "f_date", "2015/09/01", "f_float", "1.8", "f_ip", "127.0.0.2")); indexRandom(true, false, reqs); - assertResponse(prepareSearch("test").setQuery(queryStringQuery("foo bar")), response -> { + assertResponses(response -> { assertHits(response.getHits(), "1", "2"); assertHitCount(response, 2L); - }); + }, + prepareSearch("test").setQuery(queryStringQuery("foo bar")), + prepareSearch("test").setQuery(queryStringQuery("127.0.0.2 \"2015/09/02\"")), + prepareSearch("test").setQuery(queryStringQuery("127.0.0.1 OR 1.8")) + ); assertResponse(prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\"")), response -> { assertHits(response.getHits(), "1"); assertHitCount(response, 1L); }); - assertResponse(prepareSearch("test").setQuery(queryStringQuery("127.0.0.2 \"2015/09/02\"")), response -> { - assertHits(response.getHits(), "1", "2"); - assertHitCount(response, 2L); - }); - assertResponse(prepareSearch("test").setQuery(queryStringQuery("127.0.0.1 OR 1.8")), response -> { - assertHits(response.getHits(), "1", "2"); - assertHitCount(response, 2L); - }); } public void testDocWithAllTypes() throws Exception { @@ -118,23 +107,23 @@ public void testDocWithAllTypes() throws Exception { reqs.add(prepareIndex("test").setId("1").setSource(docBody, XContentType.JSON)); indexRandom(true, false, reqs); - assertResponse(prepareSearch("test").setQuery(queryStringQuery("foo")), response -> assertHits(response.getHits(), "1")); - assertResponse(prepareSearch("test").setQuery(queryStringQuery("Bar")), response -> assertHits(response.getHits(), "1")); - assertResponse(prepareSearch("test").setQuery(queryStringQuery("Baz")), response -> assertHits(response.getHits(), "1")); - assertResponse(prepareSearch("test").setQuery(queryStringQuery("19")), response -> assertHits(response.getHits(), "1")); - // nested doesn't match because it's hidden - assertResponse(prepareSearch("test").setQuery(queryStringQuery("1476383971")), response -> assertHits(response.getHits(), "1")); - // bool doesn't match - assertResponse(prepareSearch("test").setQuery(queryStringQuery("7")), response -> assertHits(response.getHits(), "1")); - assertResponse(prepareSearch("test").setQuery(queryStringQuery("23")), response -> assertHits(response.getHits(), "1")); - assertResponse(prepareSearch("test").setQuery(queryStringQuery("1293")), response -> assertHits(response.getHits(), "1")); - assertResponse(prepareSearch("test").setQuery(queryStringQuery("42")), response -> assertHits(response.getHits(), "1")); - assertResponse(prepareSearch("test").setQuery(queryStringQuery("1.7")), response -> assertHits(response.getHits(), "1")); - assertResponse(prepareSearch("test").setQuery(queryStringQuery("1.5")), response -> assertHits(response.getHits(), "1")); - assertResponse(prepareSearch("test").setQuery(queryStringQuery("127.0.0.1")), response -> assertHits(response.getHits(), "1")); - // binary doesn't match - // suggest doesn't match - // geo_point doesn't match + assertResponses( + response -> assertHits(response.getHits(), "1"), + prepareSearch("test").setQuery(queryStringQuery("foo")), + prepareSearch("test").setQuery(queryStringQuery("Bar")), + prepareSearch("test").setQuery(queryStringQuery("Baz")), + prepareSearch("test").setQuery(queryStringQuery("19")), + // nested doesn't match because it's hidden + prepareSearch("test").setQuery(queryStringQuery("1476383971")), + // bool doesn't match + prepareSearch("test").setQuery(queryStringQuery("7")), + prepareSearch("test").setQuery(queryStringQuery("23")), + prepareSearch("test").setQuery(queryStringQuery("1293")), + prepareSearch("test").setQuery(queryStringQuery("42")), + prepareSearch("test").setQuery(queryStringQuery("1.7")), + prepareSearch("test").setQuery(queryStringQuery("1.5")), + prepareSearch("test").setQuery(queryStringQuery("127.0.0.1")) + ); } public void testKeywordWithWhitespace() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java index 118aa00fc1b4f..f790cf30e1c0e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -589,19 +589,19 @@ public void testMultiMatchQuery() throws Exception { indicesAdmin().prepareRefresh("test").get(); builder = multiMatchQuery("value1", "field1", "field2").operator(Operator.AND); // Operator only applies on terms inside a field! - // Fields are always OR-ed together. + // Fields are always OR-ed together. assertSearchHitsWithoutFailures(prepareSearch().setQuery(builder), "1"); refresh(); builder = multiMatchQuery("value1", "field1").field("field3", 1.5f).operator(Operator.AND); // Operator only applies on terms inside - // a field! Fields are always OR-ed - // together. + // a field! Fields are always OR-ed + // together. assertSearchHitsWithoutFailures(prepareSearch().setQuery(builder), "3", "1"); indicesAdmin().prepareRefresh("test").get(); builder = multiMatchQuery("value1").field("field1").field("field3", 1.5f).operator(Operator.AND); // Operator only applies on terms - // inside a field! Fields are - // always OR-ed together. + // inside a field! Fields are + // always OR-ed together. assertResponse(prepareSearch().setQuery(builder), response -> { assertHitCount(response, 2L); assertSearchHits(response, "3", "1"); @@ -726,25 +726,27 @@ public void testBoolQueryMinShouldMatchBiggerThanNumberOfShouldClauses() throws prepareIndex("test").setId("2").setSource("field2", "value1").get(); refresh(); - BoolQueryBuilder boolQuery = boolQuery().must(termQuery("field1", "value1")) - .should(boolQuery().should(termQuery("field1", "value1")).should(termQuery("field1", "value2")).minimumShouldMatch(3)); - assertResponse(prepareSearch().setQuery(boolQuery), response -> { + assertResponses(response -> { assertHitCount(response, 1L); assertFirstHit(response, hasId("1")); - }); - boolQuery = boolQuery().must(termQuery("field1", "value1")) + }, + prepareSearch().setQuery( + boolQuery().must(termQuery("field1", "value1")) + .should(boolQuery().should(termQuery("field1", "value1")).should(termQuery("field1", "value2")).minimumShouldMatch(3)) + ), + prepareSearch().setQuery( + boolQuery().should(termQuery("field1", "value1")) + .should(boolQuery().should(termQuery("field1", "value1")).should(termQuery("field1", "value2")).minimumShouldMatch(3)) + .minimumShouldMatch(1) + ) + ); + + BoolQueryBuilder boolQuery = boolQuery().must(termQuery("field1", "value1")) .should(boolQuery().should(termQuery("field1", "value1")).should(termQuery("field1", "value2")).minimumShouldMatch(1)) // Only one should clause is defined, returns no docs. .minimumShouldMatch(2); assertHitCount(prepareSearch().setQuery(boolQuery), 0L); - boolQuery = boolQuery().should(termQuery("field1", "value1")) - .should(boolQuery().should(termQuery("field1", "value1")).should(termQuery("field1", "value2")).minimumShouldMatch(3)) - .minimumShouldMatch(1); - assertResponse(prepareSearch().setQuery(boolQuery), response -> { - assertHitCount(response, 1L); - assertFirstHit(response, hasId("1")); - }); boolQuery = boolQuery().must(termQuery("field1", "value1")) .must(boolQuery().should(termQuery("field1", "value1")).should(termQuery("field1", "value2")).minimumShouldMatch(3)); assertHitCount(prepareSearch().setQuery(boolQuery), 0L); @@ -1449,73 +1451,40 @@ public void testRangeQueryWithTimeZone() throws Exception { .setSource("date", Instant.now().atZone(ZoneOffset.ofHours(1)).toInstant().toEpochMilli(), "num", 4) ); - assertResponse( + assertResponses(response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getId(), is("1")); + }, prepareSearch("test").setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01T00:00:00").to("2014-01-01T00:59:00")), - response -> { - assertHitCount(response, 1L); - assertThat(response.getHits().getAt(0).getId(), is("1")); - } - ); - assertResponse( - prepareSearch("test").setQuery(QueryBuilders.rangeQuery("date").from("2013-12-31T23:00:00").to("2013-12-31T23:59:00")), - response -> { - assertHitCount(response, 1L); - assertThat(response.getHits().getAt(0).getId(), is("2")); - } - ); - assertResponse( - prepareSearch("test").setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01T01:00:00").to("2014-01-01T01:59:00")), - response -> { - assertHitCount(response, 1L); - assertThat(response.getHits().getAt(0).getId(), is("3")); - } - ); - // We explicitly define a time zone in the from/to dates so whatever the time zone is, it won't be used - assertResponse( + // We explicitly define a time zone in the from/to dates so whatever the time zone is, it won't be used prepareSearch("test").setQuery( QueryBuilders.rangeQuery("date").from("2014-01-01T00:00:00Z").to("2014-01-01T00:59:00Z").timeZone("+10:00") ), - response -> { - assertHitCount(response, 1L); - assertThat(response.getHits().getAt(0).getId(), is("1")); - } - ); - assertResponse( + // We define a time zone to be applied to the filter and from/to have no time zone prepareSearch("test").setQuery( - QueryBuilders.rangeQuery("date").from("2013-12-31T23:00:00Z").to("2013-12-31T23:59:00Z").timeZone("+10:00") - ), - response -> { - assertHitCount(response, 1L); - assertThat(response.getHits().getAt(0).getId(), is("2")); - } + QueryBuilders.rangeQuery("date").from("2014-01-01T03:00:00").to("2014-01-01T03:59:00").timeZone("+03:00") + ) ); - assertResponse( + assertResponses(response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getId(), is("2")); + }, + prepareSearch("test").setQuery(QueryBuilders.rangeQuery("date").from("2013-12-31T23:00:00").to("2013-12-31T23:59:00")), prepareSearch("test").setQuery( - QueryBuilders.rangeQuery("date").from("2014-01-01T01:00:00Z").to("2014-01-01T01:59:00Z").timeZone("+10:00") + QueryBuilders.rangeQuery("date").from("2013-12-31T23:00:00Z").to("2013-12-31T23:59:00Z").timeZone("+10:00") ), - response -> { - assertHitCount(response, 1L); - assertThat(response.getHits().getAt(0).getId(), is("3")); - } - ); - // We define a time zone to be applied to the filter and from/to have no time zone - assertResponse( prepareSearch("test").setQuery( - QueryBuilders.rangeQuery("date").from("2014-01-01T03:00:00").to("2014-01-01T03:59:00").timeZone("+03:00") - ), - response -> { - assertHitCount(response, 1L); - assertThat(response.getHits().getAt(0).getId(), is("1")); - } + QueryBuilders.rangeQuery("date").from("2014-01-01T02:00:00").to("2014-01-01T02:59:00").timeZone("+03:00") + ) ); - assertResponse( + assertResponses(response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getId(), is("3")); + }, + prepareSearch("test").setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01T01:00:00").to("2014-01-01T01:59:00")), prepareSearch("test").setQuery( - QueryBuilders.rangeQuery("date").from("2014-01-01T02:00:00").to("2014-01-01T02:59:00").timeZone("+03:00") - ), - response -> { - assertHitCount(response, 1L); - assertThat(response.getHits().getAt(0).getId(), is("2")); - } + QueryBuilders.rangeQuery("date").from("2014-01-01T01:00:00Z").to("2014-01-01T01:59:00Z").timeZone("+10:00") + ) ); assertResponses(response -> { assertHitCount(response, 1L); @@ -1713,8 +1682,8 @@ public void testFieldAliasesForMetaFields() throws Exception { } /** - * Test that wildcard queries on keyword fields get normalized - */ + * Test that wildcard queries on keyword fields get normalized + */ public void testWildcardQueryNormalizationOnKeywordField() { assertAcked( prepareCreate("test").setSettings( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index 522c20b687caa..f9ae30720b33f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -51,6 +51,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHitsWithoutFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; @@ -383,14 +384,10 @@ public void testBasicAllQuery() throws Exception { reqs.add(prepareIndex("test").setId("3").setSource("f3", "foo bar baz")); indexRandom(true, false, reqs); - assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("foo")), response -> { + assertResponses(response -> { assertHitCount(response, 2L); assertHits(response.getHits(), "1", "3"); - }); - assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("bar")), response -> { - assertHitCount(response, 2L); - assertHits(response.getHits(), "1", "3"); - }); + }, prepareSearch("test").setQuery(simpleQueryStringQuery("foo")), prepareSearch("test").setQuery(simpleQueryStringQuery("bar"))); assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("Bar")), response -> { assertHitCount(response, 3L); assertHits(response.getHits(), "1", "2", "3"); @@ -407,22 +404,18 @@ public void testWithDate() throws Exception { reqs.add(prepareIndex("test").setId("2").setSource("f1", "bar", "f_date", "2015/09/01")); indexRandom(true, false, reqs); - assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("foo bar")), response -> { + assertResponses(response -> { assertHits(response.getHits(), "1", "2"); assertHitCount(response, 2L); - }); + }, + prepareSearch("test").setQuery(simpleQueryStringQuery("foo bar")), + prepareSearch("test").setQuery(simpleQueryStringQuery("bar \"2015/09/02\"")), + prepareSearch("test").setQuery(simpleQueryStringQuery("\"2015/09/02\" \"2015/09/01\"")) + ); assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("\"2015/09/02\"")), response -> { assertHits(response.getHits(), "1"); assertHitCount(response, 1L); }); - assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("bar \"2015/09/02\"")), response -> { - assertHits(response.getHits(), "1", "2"); - assertHitCount(response, 2L); - }); - assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("\"2015/09/02\" \"2015/09/01\"")), response -> { - assertHits(response.getHits(), "1", "2"); - assertHitCount(response, 2L); - }); } public void testWithLotsOfTypes() throws Exception { @@ -435,22 +428,18 @@ public void testWithLotsOfTypes() throws Exception { reqs.add(prepareIndex("test").setId("2").setSource("f1", "bar", "f_date", "2015/09/01", "f_float", "1.8", "f_ip", "127.0.0.2")); indexRandom(true, false, reqs); - assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("foo bar")), response -> { + assertResponses(response -> { assertHits(response.getHits(), "1", "2"); assertHitCount(response, 2L); - }); + }, + prepareSearch("test").setQuery(simpleQueryStringQuery("foo bar")), + prepareSearch("test").setQuery(simpleQueryStringQuery("127.0.0.2 \"2015/09/02\"")), + prepareSearch("test").setQuery(simpleQueryStringQuery("127.0.0.1 1.8")) + ); assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("\"2015/09/02\"")), response -> { assertHits(response.getHits(), "1"); assertHitCount(response, 1L); }); - assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("127.0.0.2 \"2015/09/02\"")), response -> { - assertHits(response.getHits(), "1", "2"); - assertHitCount(response, 2L); - }); - assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("127.0.0.1 1.8")), response -> { - assertHits(response.getHits(), "1", "2"); - assertHitCount(response, 2L); - }); } public void testDocWithAllTypes() throws Exception { @@ -463,34 +452,27 @@ public void testDocWithAllTypes() throws Exception { reqs.add(prepareIndex("test").setId("1").setSource(docBody, XContentType.JSON)); indexRandom(true, false, reqs); - assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("foo")), response -> assertHits(response.getHits(), "1")); - assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("Bar")), response -> assertHits(response.getHits(), "1")); - assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("Baz")), response -> assertHits(response.getHits(), "1")); - assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("19")), response -> assertHits(response.getHits(), "1")); - // nested doesn't match because it's hidden - assertResponse( + assertResponses( + response -> assertHits(response.getHits(), "1"), + prepareSearch("test").setQuery(simpleQueryStringQuery("foo")), + prepareSearch("test").setQuery(simpleQueryStringQuery("Bar")), + prepareSearch("test").setQuery(simpleQueryStringQuery("Baz")), + prepareSearch("test").setQuery(simpleQueryStringQuery("19")), + // nested doesn't match because it's hidden prepareSearch("test").setQuery(simpleQueryStringQuery("1476383971")), - response -> assertHits(response.getHits(), "1") - ); - // bool doesn't match - assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("7")), response -> assertHits(response.getHits(), "1")); - assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("23")), response -> assertHits(response.getHits(), "1")); - assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("1293")), response -> assertHits(response.getHits(), "1")); - assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("42")), response -> assertHits(response.getHits(), "1")); - assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("1.7")), response -> assertHits(response.getHits(), "1")); - assertResponse(prepareSearch("test").setQuery(simpleQueryStringQuery("1.5")), response -> assertHits(response.getHits(), "1")); - assertResponse( + // bool doesn't match + prepareSearch("test").setQuery(simpleQueryStringQuery("7")), + prepareSearch("test").setQuery(simpleQueryStringQuery("23")), + prepareSearch("test").setQuery(simpleQueryStringQuery("1293")), + prepareSearch("test").setQuery(simpleQueryStringQuery("42")), + prepareSearch("test").setQuery(simpleQueryStringQuery("1.7")), + prepareSearch("test").setQuery(simpleQueryStringQuery("1.5")), prepareSearch("test").setQuery(simpleQueryStringQuery("127.0.0.1")), - response -> assertHits(response.getHits(), "1") - ); - // binary doesn't match - // suggest doesn't match - // geo_point doesn't match - // geo_shape doesn't match - - assertResponse( - prepareSearch("test").setQuery(simpleQueryStringQuery("foo Bar 19 127.0.0.1").defaultOperator(Operator.AND)), - response -> assertHits(response.getHits(), "1") + // binary doesn't match + // suggest doesn't match + // geo_point doesn't match + // geo_shape doesn't match + prepareSearch("test").setQuery(simpleQueryStringQuery("foo Bar 19 127.0.0.1").defaultOperator(Operator.AND)) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java index 06ce330213af8..789da5aac7568 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java @@ -24,6 +24,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -50,18 +51,14 @@ public void testNodeSelection() { // Before we've gathered stats for all nodes, we should try each node once. Set nodeIds = new HashSet<>(); - assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { - assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); - nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); - }); - assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { + assertResponses(response -> { assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); - }); - assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { - assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); - nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); - }); + }, + client.prepareSearch().setQuery(matchAllQuery()), + client.prepareSearch().setQuery(matchAllQuery()), + client.prepareSearch().setQuery(matchAllQuery()) + ); assertEquals(3, nodeIds.size()); // Now after more searches, we should select a node with the lowest ARS rank. From 5c928a431671fd2789c9d58fd26a0e48cb7d6f92 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 27 Nov 2024 07:27:21 -0800 Subject: [PATCH 282/386] Emit deprecation warnings only for new index or template (#117529) Currently, we emit a deprecation warning in the parser of the source field when source mode is used in mappings. However, this behavior causes warnings to be emitted for every mapping update. In tests with assertions enabled, warnings are also triggered for every change to index metadata. As a result, deprecation warnings are inadvertently emitted for index or update requests. This change relocates the deprecation check to the mapper, limiting it to cases where a new index is created or a template is created/updated. Relates to #117524 --- .../index/mapper/MappingParser.java | 9 +++++++++ .../index/mapper/SourceFieldMapper.java | 14 +------------- .../mapper/DocumentParserContextTests.java | 1 - .../index/mapper/SourceFieldMapperTests.java | 17 +---------------- .../index/shard/ShardGetServiceTests.java | 2 -- 5 files changed, 11 insertions(+), 32 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java index f30a0089e4eff..2ca14473c8385 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java @@ -10,6 +10,8 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.MapperService.MergeReason; @@ -31,6 +33,7 @@ public final class MappingParser { private final Supplier, MetadataFieldMapper>> metadataMappersSupplier; private final Map metadataMapperParsers; private final Function documentTypeResolver; + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(MappingParser.class); MappingParser( Supplier mappingParserContextSupplier, @@ -144,6 +147,12 @@ Mapping parse(@Nullable String type, MergeReason reason, Map map } @SuppressWarnings("unchecked") Map fieldNodeMap = (Map) fieldNode; + if (reason == MergeReason.INDEX_TEMPLATE + && SourceFieldMapper.NAME.equals(fieldName) + && fieldNodeMap.containsKey("mode") + && SourceFieldMapper.onOrAfterDeprecateModeVersion(mappingParserContext.indexVersionCreated())) { + deprecationLogger.critical(DeprecationCategory.MAPPINGS, "mapping_source_mode", SourceFieldMapper.DEPRECATION_WARNING); + } MetadataFieldMapper metadataFieldMapper = typeParser.parse(fieldName, fieldNodeMap, mappingParserContext).build(); metadataMappers.put(metadataFieldMapper.getClass(), metadataFieldMapper); assert fieldNodeMap.isEmpty(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index e7c7ec3535b91..b97e04fcddb5d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; @@ -40,7 +39,6 @@ import java.util.Collections; import java.util.List; import java.util.Locale; -import java.util.Map; public class SourceFieldMapper extends MetadataFieldMapper { public static final NodeFeature SYNTHETIC_SOURCE_FALLBACK = new NodeFeature("mapper.source.synthetic_source_fallback"); @@ -310,17 +308,7 @@ private static SourceFieldMapper resolveStaticInstance(final Mode sourceMode) { c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), onOrAfterDeprecateModeVersion(c.indexVersionCreated()) == false ) - ) { - @Override - public MetadataFieldMapper.Builder parse(String name, Map node, MappingParserContext parserContext) - throws MapperParsingException { - assert name.equals(SourceFieldMapper.NAME) : name; - if (onOrAfterDeprecateModeVersion(parserContext.indexVersionCreated()) && node.containsKey("mode")) { - deprecationLogger.critical(DeprecationCategory.MAPPINGS, "mapping_source_mode", SourceFieldMapper.DEPRECATION_WARNING); - } - return super.parse(name, node, parserContext); - } - }; + ); static final class SourceFieldType extends MappedFieldType { private final boolean enabled; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java index a4108caaf4fc3..be36ab9d6eac1 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java @@ -133,6 +133,5 @@ public void testCreateDynamicMapperBuilderContext() throws IOException { assertEquals(ObjectMapper.Defaults.DYNAMIC, resultFromParserContext.getDynamic()); assertEquals(MapperService.MergeReason.MAPPING_UPDATE, resultFromParserContext.getMergeReason()); assertFalse(resultFromParserContext.isInNestedContext()); - assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index fa173bc64518e..4d6a30849e263 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -65,7 +65,6 @@ protected void registerParameters(ParameterChecker checker) throws IOException { topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("mode", "synthetic").endObject()), dm -> { assertTrue(dm.metadataMapper(SourceFieldMapper.class).isSynthetic()); - assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } ); checker.registerConflictCheck("includes", b -> b.array("includes", "foo*")); @@ -74,7 +73,7 @@ protected void registerParameters(ParameterChecker checker) throws IOException { "mode", topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("mode", "synthetic").endObject()), topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("mode", "stored").endObject()), - dm -> assertWarnings(SourceFieldMapper.DEPRECATION_WARNING) + d -> {} ); } @@ -211,14 +210,12 @@ public void testSyntheticDisabledNotSupported() { ) ); assertThat(e.getMessage(), containsString("Cannot set both [mode] and [enabled] parameters")); - assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } public void testSyntheticUpdates() throws Exception { MapperService mapperService = createMapperService(""" { "_doc" : { "_source" : { "mode" : "synthetic" } } } """); - assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); SourceFieldMapper mapper = mapperService.documentMapper().sourceMapper(); assertTrue(mapper.enabled()); assertTrue(mapper.isSynthetic()); @@ -226,7 +223,6 @@ public void testSyntheticUpdates() throws Exception { merge(mapperService, """ { "_doc" : { "_source" : { "mode" : "synthetic" } } } """); - assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); mapper = mapperService.documentMapper().sourceMapper(); assertTrue(mapper.enabled()); assertTrue(mapper.isSynthetic()); @@ -239,12 +235,10 @@ public void testSyntheticUpdates() throws Exception { """)); assertThat(e.getMessage(), containsString("Cannot update parameter [mode] from [synthetic] to [stored]")); - assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); merge(mapperService, """ { "_doc" : { "_source" : { "mode" : "disabled" } } } """); - assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); mapper = mapperService.documentMapper().sourceMapper(); assertFalse(mapper.enabled()); @@ -281,7 +275,6 @@ public void testSupportsNonDefaultParameterValues() throws IOException { topMapping(b -> b.startObject("_source").field("mode", randomBoolean() ? "synthetic" : "stored").endObject()) ).documentMapper().sourceMapper(); assertThat(sourceFieldMapper, notNullValue()); - assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } Exception e = expectThrows( MapperParsingException.class, @@ -313,8 +306,6 @@ public void testSupportsNonDefaultParameterValues() throws IOException { .documentMapper() .sourceMapper() ); - assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); - assertThat(e.getMessage(), containsString("Parameter [mode=disabled] is not allowed in source")); e = expectThrows( @@ -423,7 +414,6 @@ public void testRecoverySourceWithSyntheticSource() throws IOException { ParsedDocument doc = docMapper.parse(source(b -> { b.field("field1", "value1"); })); assertNotNull(doc.rootDoc().getField("_recovery_source")); assertThat(doc.rootDoc().getField("_recovery_source").binaryValue(), equalTo(new BytesRef("{\"field1\":\"value1\"}"))); - assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } { Settings settings = Settings.builder().put(INDICES_RECOVERY_SOURCE_ENABLED_SETTING.getKey(), false).build(); @@ -434,7 +424,6 @@ public void testRecoverySourceWithSyntheticSource() throws IOException { DocumentMapper docMapper = mapperService.documentMapper(); ParsedDocument doc = docMapper.parse(source(b -> b.field("field1", "value1"))); assertNull(doc.rootDoc().getField("_recovery_source")); - assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } } @@ -629,7 +618,6 @@ public void testRecoverySourceWithLogsCustom() throws IOException { ParsedDocument doc = docMapper.parse(source(b -> { b.field("@timestamp", "2012-02-13"); })); assertNotNull(doc.rootDoc().getField("_recovery_source")); assertThat(doc.rootDoc().getField("_recovery_source").binaryValue(), equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\"}"))); - assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } { Settings settings = Settings.builder() @@ -640,7 +628,6 @@ public void testRecoverySourceWithLogsCustom() throws IOException { DocumentMapper docMapper = mapperService.documentMapper(); ParsedDocument doc = docMapper.parse(source(b -> b.field("@timestamp", "2012-02-13"))); assertNull(doc.rootDoc().getField("_recovery_source")); - assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } } @@ -709,7 +696,6 @@ public void testRecoverySourceWithTimeSeriesCustom() throws IOException { doc.rootDoc().getField("_recovery_source").binaryValue(), equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\",\"field\":\"value1\"}")) ); - assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } { Settings settings = Settings.builder() @@ -723,7 +709,6 @@ public void testRecoverySourceWithTimeSeriesCustom() throws IOException { source("123", b -> b.field("@timestamp", "2012-02-13").field("field", randomAlphaOfLength(5)), null) ); assertNull(doc.rootDoc().getField("_recovery_source")); - assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } } } diff --git a/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java b/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java index 307bc26c44ba6..a49d895f38f67 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.RoutingFieldMapper; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.xcontent.XContentType; @@ -115,7 +114,6 @@ public void testGetFromTranslogWithSyntheticSource() throws IOException { "mode": "synthetic" """; runGetFromTranslogWithOptions(docToIndex, sourceOptions, expectedFetchedSource, "\"long\"", 7L, true); - assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } public void testGetFromTranslogWithDenseVector() throws IOException { From 418cbbf7b9f175ceba858a684215f42c55c9830e Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Wed, 27 Nov 2024 07:56:54 -0800 Subject: [PATCH 283/386] Remove entitlement parameter (#117597) Removes the "entitlement" parameter from policy parsing. --- .../runtime/policy/PolicyParser.java | 13 -------- .../policy/PolicyParserFailureTests.java | 30 ++++++++----------- .../runtime/policy/test-policy.yaml | 11 ++++--- 3 files changed, 18 insertions(+), 36 deletions(-) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java index 229ccec3b8b2c..ea6603af99925 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java @@ -9,7 +9,6 @@ package org.elasticsearch.entitlement.runtime.policy; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.yaml.YamlXContent; @@ -31,8 +30,6 @@ */ public class PolicyParser { - protected static final ParseField ENTITLEMENTS_PARSEFIELD = new ParseField("entitlements"); - protected static final String entitlementPackageName = Entitlement.class.getPackage().getName(); protected final XContentParser policyParser; @@ -65,13 +62,6 @@ public Policy parsePolicy() { protected Scope parseScope(String scopeName) throws IOException { try { - if (policyParser.nextToken() != XContentParser.Token.START_OBJECT) { - throw newPolicyParserException(scopeName, "expected object [" + ENTITLEMENTS_PARSEFIELD.getPreferredName() + "]"); - } - if (policyParser.nextToken() != XContentParser.Token.FIELD_NAME - || policyParser.currentName().equals(ENTITLEMENTS_PARSEFIELD.getPreferredName()) == false) { - throw newPolicyParserException(scopeName, "expected object [" + ENTITLEMENTS_PARSEFIELD.getPreferredName() + "]"); - } if (policyParser.nextToken() != XContentParser.Token.START_ARRAY) { throw newPolicyParserException(scopeName, "expected array of "); } @@ -90,9 +80,6 @@ protected Scope parseScope(String scopeName) throws IOException { throw newPolicyParserException(scopeName, "expected closing object"); } } - if (policyParser.nextToken() != XContentParser.Token.END_OBJECT) { - throw newPolicyParserException(scopeName, "expected closing object"); - } return new Scope(scopeName, entitlements); } catch (IOException ioe) { throw new UncheckedIOException(ioe); diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java index b21d206f3eb6a..de8280ea87fe5 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java @@ -29,11 +29,10 @@ public void testParserSyntaxFailures() { public void testEntitlementDoesNotExist() throws IOException { PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: - entitlements: - - does_not_exist: {} + - does_not_exist: {} """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml").parsePolicy()); assertEquals( - "[3:7] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name]: " + "[2:5] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name]: " + "unknown entitlement type [does_not_exist]", ppe.getMessage() ); @@ -42,23 +41,21 @@ public void testEntitlementDoesNotExist() throws IOException { public void testEntitlementMissingParameter() throws IOException { PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: - entitlements: - - file: {} + - file: {} """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml").parsePolicy()); assertEquals( - "[3:14] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + "[2:12] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + "for entitlement type [file]: missing entitlement parameter [path]", ppe.getMessage() ); ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: - entitlements: - - file: - path: test-path + - file: + path: test-path """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml").parsePolicy()); assertEquals( - "[5:1] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + "[4:1] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + "for entitlement type [file]: missing entitlement parameter [actions]", ppe.getMessage() ); @@ -67,15 +64,14 @@ public void testEntitlementMissingParameter() throws IOException { public void testEntitlementExtraneousParameter() throws IOException { PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: - entitlements: - - file: - path: test-path - actions: - - read - extra: test + - file: + path: test-path + actions: + - read + extra: test """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml").parsePolicy()); assertEquals( - "[8:1] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + "[7:1] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + "for entitlement type [file]: extraneous entitlement parameter(s) {extra=test}", ppe.getMessage() ); diff --git a/libs/entitlement/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml b/libs/entitlement/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml index b58287cfc83b7..f13f574535bec 100644 --- a/libs/entitlement/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml +++ b/libs/entitlement/src/test/resources/org/elasticsearch/entitlement/runtime/policy/test-policy.yaml @@ -1,7 +1,6 @@ entitlement-module-name: - entitlements: - - file: - path: "test/path/to/file" - actions: - - "read" - - "write" + - file: + path: "test/path/to/file" + actions: + - "read" + - "write" From 9022cccba7b617d6ccd0b2ec411dbd1aa6aff0c1 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 27 Nov 2024 11:44:55 -0500 Subject: [PATCH 284/386] ESQL: CATEGORIZE as a BlockHash (#114317) Re-implement `CATEGORIZE` in a way that works for multi-node clusters. This requires that data is first categorized on each data node in a first pass, then the categorizers from each data node are merged on the coordinator node and previously categorized rows are re-categorized. BlockHashes, used in HashAggregations, already work in a very similar way. E.g. for queries like `... | STATS ... BY field1, field2` they map values for `field1` and `field2` to unique integer ids that are then passed to the actual aggregate functions to identify which "bucket" a row belongs to. When passed from the data nodes to the coordinator, the BlockHashes are also merged to obtain unique ids for every value in `field1, field2` that is seen on the coordinator (not only on the local data nodes). Therefore, we re-implement `CATEGORIZE` as a special BlockHash. To choose the correct BlockHash when a query plan is mapped to physical operations, the `AggregateExec` query plan node needs to know that we will be categorizing the field `message` in a query containing `... | STATS ... BY c = CATEGORIZE(message)`. For this reason, _we do not extract the expression_ `c = CATEGORIZE(message)` into an `EVAL` node, in contrast to e.g. `STATS ... BY b = BUCKET(field, 10)`. The expression `c = CATEGORIZE(message)` simply remains inside the `AggregateExec`'s groupings. **Important limitation:** For now, to use `CATEGORIZE` in a `STATS` command, there can be only 1 grouping (the `CATEGORIZE`) overall. --- docs/changelog/114317.yaml | 5 + .../kibana/definition/categorize.json | 4 +- .../esql/functions/types/categorize.asciidoc | 4 +- muted-tests.yml | 18 - .../AbstractCategorizeBlockHash.java | 105 ++++ .../aggregation/blockhash/BlockHash.java | 28 +- .../blockhash/CategorizeRawBlockHash.java | 137 +++++ .../CategorizedIntermediateBlockHash.java | 77 +++ .../operator/HashAggregationOperator.java | 9 + .../GroupingAggregatorFunctionTestCase.java | 1 + .../blockhash/BlockHashTestCase.java | 34 ++ .../aggregation/blockhash/BlockHashTests.java | 22 +- .../blockhash/CategorizeBlockHashTests.java | 406 ++++++++++++++ .../HashAggregationOperatorTests.java | 1 + .../xpack/esql/CsvTestsDataLoader.java | 2 + .../src/main/resources/categorize.csv-spec | 526 +++++++++++++++++- .../resources/mapping-mv_sample_data.json | 16 + .../src/main/resources/mv_sample_data.csv | 8 + .../grouping/CategorizeEvaluator.java | 145 ----- .../xpack/esql/action/EsqlCapabilities.java | 5 +- .../function/grouping/Categorize.java | 76 +-- .../rules/logical/CombineProjections.java | 38 +- .../optimizer/rules/logical/FoldNull.java | 2 + ...laceAggregateNestedExpressionWithEval.java | 31 +- .../physical/local/InsertFieldExtraction.java | 17 +- .../AbstractPhysicalOperationProviders.java | 42 +- .../xpack/esql/analysis/VerifierTests.java | 6 +- .../function/AbstractAggregationTestCase.java | 3 +- .../function/AbstractFunctionTestCase.java | 19 +- .../AbstractScalarFunctionTestCase.java | 1 + .../expression/function/TestCaseSupplier.java | 83 ++- .../function/grouping/CategorizeTests.java | 16 +- .../optimizer/LogicalPlanOptimizerTests.java | 61 ++ .../rules/logical/FoldNullTests.java | 13 + .../categorization/TokenListCategorizer.java | 24 + 35 files changed, 1660 insertions(+), 325 deletions(-) create mode 100644 docs/changelog/114317.yaml create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/AbstractCategorizeBlockHash.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeRawBlockHash.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizedIntermediateBlockHash.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTestCase.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-mv_sample_data.json create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_sample_data.csv delete mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/grouping/CategorizeEvaluator.java diff --git a/docs/changelog/114317.yaml b/docs/changelog/114317.yaml new file mode 100644 index 0000000000000..9c73fe513e197 --- /dev/null +++ b/docs/changelog/114317.yaml @@ -0,0 +1,5 @@ +pr: 114317 +summary: "ESQL: CATEGORIZE as a `BlockHash`" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/reference/esql/functions/kibana/definition/categorize.json b/docs/reference/esql/functions/kibana/definition/categorize.json index 386b178d3753f..ca3971a6e05a3 100644 --- a/docs/reference/esql/functions/kibana/definition/categorize.json +++ b/docs/reference/esql/functions/kibana/definition/categorize.json @@ -14,7 +14,7 @@ } ], "variadic" : false, - "returnType" : "integer" + "returnType" : "keyword" }, { "params" : [ @@ -26,7 +26,7 @@ } ], "variadic" : false, - "returnType" : "integer" + "returnType" : "keyword" } ], "preview" : false, diff --git a/docs/reference/esql/functions/types/categorize.asciidoc b/docs/reference/esql/functions/types/categorize.asciidoc index 4917ed313e6d7..5b64971cbc482 100644 --- a/docs/reference/esql/functions/types/categorize.asciidoc +++ b/docs/reference/esql/functions/types/categorize.asciidoc @@ -5,6 +5,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== field | result -keyword | integer -text | integer +keyword | keyword +text | keyword |=== diff --git a/muted-tests.yml b/muted-tests.yml index c97e46375c597..8b12bd2dd3365 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -67,9 +67,6 @@ tests: - class: org.elasticsearch.xpack.transform.integration.TransformIT method: testStopWaitForCheckpoint issue: https://github.com/elastic/elasticsearch/issues/106113 -- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT - method: test {categorize.Categorize SYNC} - issue: https://github.com/elastic/elasticsearch/issues/113722 - class: org.elasticsearch.kibana.KibanaThreadPoolIT method: testBlockedThreadPoolsRejectUserRequests issue: https://github.com/elastic/elasticsearch/issues/113939 @@ -126,12 +123,6 @@ tests: - class: org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT method: testLookbackWithIndicesOptions issue: https://github.com/elastic/elasticsearch/issues/116127 -- class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT - method: test {categorize.Categorize SYNC} - issue: https://github.com/elastic/elasticsearch/issues/113054 -- class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT - method: test {categorize.Categorize ASYNC} - issue: https://github.com/elastic/elasticsearch/issues/113055 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_start_stop/Test start already started transform} issue: https://github.com/elastic/elasticsearch/issues/98802 @@ -153,9 +144,6 @@ tests: - class: org.elasticsearch.xpack.shutdown.NodeShutdownIT method: testAllocationPreventedForRemoval issue: https://github.com/elastic/elasticsearch/issues/116363 -- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT - method: test {categorize.Categorize ASYNC} - issue: https://github.com/elastic/elasticsearch/issues/116373 - class: org.elasticsearch.threadpool.SimpleThreadPoolIT method: testThreadPoolMetrics issue: https://github.com/elastic/elasticsearch/issues/108320 @@ -168,9 +156,6 @@ tests: - class: org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshotsCanMatchOnCoordinatorIntegTests method: testSearchableSnapshotShardsAreSkippedBySearchRequestWithoutQueryingAnyNodeWhenTheyAreOutsideOfTheQueryRange issue: https://github.com/elastic/elasticsearch/issues/116523 -- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT - method: test {categorize.Categorize} - issue: https://github.com/elastic/elasticsearch/issues/116434 - class: org.elasticsearch.upgrades.SearchStatesIT method: testBWCSearchStates issue: https://github.com/elastic/elasticsearch/issues/116617 @@ -229,9 +214,6 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_reset/Test reset running transform} issue: https://github.com/elastic/elasticsearch/issues/117473 -- class: org.elasticsearch.xpack.esql.qa.single_node.FieldExtractorIT - method: testConstantKeywordField - issue: https://github.com/elastic/elasticsearch/issues/117524 - class: org.elasticsearch.xpack.esql.qa.multi_node.FieldExtractorIT method: testConstantKeywordField issue: https://github.com/elastic/elasticsearch/issues/117524 diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/AbstractCategorizeBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/AbstractCategorizeBlockHash.java new file mode 100644 index 0000000000000..22d3a10facb06 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/AbstractCategorizeBlockHash.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.blockhash; + +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.BitArray; +import org.elasticsearch.common.util.BytesRefHash; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.ReleasableIterator; +import org.elasticsearch.xpack.ml.aggs.categorization.CategorizationBytesRefHash; +import org.elasticsearch.xpack.ml.aggs.categorization.CategorizationPartOfSpeechDictionary; +import org.elasticsearch.xpack.ml.aggs.categorization.SerializableTokenListCategory; +import org.elasticsearch.xpack.ml.aggs.categorization.TokenListCategorizer; + +import java.io.IOException; + +/** + * Base BlockHash implementation for {@code Categorize} grouping function. + */ +public abstract class AbstractCategorizeBlockHash extends BlockHash { + // TODO: this should probably also take an emitBatchSize + private final int channel; + private final boolean outputPartial; + protected final TokenListCategorizer.CloseableTokenListCategorizer categorizer; + + AbstractCategorizeBlockHash(BlockFactory blockFactory, int channel, boolean outputPartial) { + super(blockFactory); + this.channel = channel; + this.outputPartial = outputPartial; + this.categorizer = new TokenListCategorizer.CloseableTokenListCategorizer( + new CategorizationBytesRefHash(new BytesRefHash(2048, blockFactory.bigArrays())), + CategorizationPartOfSpeechDictionary.getInstance(), + 0.70f + ); + } + + protected int channel() { + return channel; + } + + @Override + public Block[] getKeys() { + return new Block[] { outputPartial ? buildIntermediateBlock() : buildFinalBlock() }; + } + + @Override + public IntVector nonEmpty() { + return IntVector.range(0, categorizer.getCategoryCount(), blockFactory); + } + + @Override + public BitArray seenGroupIds(BigArrays bigArrays) { + throw new UnsupportedOperationException(); + } + + @Override + public final ReleasableIterator lookup(Page page, ByteSizeValue targetBlockSize) { + throw new UnsupportedOperationException(); + } + + /** + * Serializes the intermediate state into a single BytesRef block, or an empty Null block if there are no categories. + */ + private Block buildIntermediateBlock() { + if (categorizer.getCategoryCount() == 0) { + return blockFactory.newConstantNullBlock(0); + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + // TODO be more careful here. + out.writeVInt(categorizer.getCategoryCount()); + for (SerializableTokenListCategory category : categorizer.toCategoriesById()) { + category.writeTo(out); + } + // We're returning a block with N positions just because the Page must have all blocks with the same position count! + return blockFactory.newConstantBytesRefBlockWith(out.bytes().toBytesRef(), categorizer.getCategoryCount()); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private Block buildFinalBlock() { + try (BytesRefVector.Builder result = blockFactory.newBytesRefVectorBuilder(categorizer.getCategoryCount())) { + BytesRefBuilder scratch = new BytesRefBuilder(); + for (SerializableTokenListCategory category : categorizer.toCategoriesById()) { + scratch.copyChars(category.getRegex()); + result.appendBytesRef(scratch.get()); + scratch.clear(); + } + return result.build().asBlock(); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java index 919cb92f79260..ef0f3ceb112c4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.util.Int3Hash; import org.elasticsearch.common.util.LongHash; import org.elasticsearch.common.util.LongLongHash; +import org.elasticsearch.compute.aggregation.AggregatorMode; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; import org.elasticsearch.compute.data.Block; @@ -58,9 +59,7 @@ * leave a big gap, even if we never see {@code null}. *

    */ -public abstract sealed class BlockHash implements Releasable, SeenGroupIds // - permits BooleanBlockHash, BytesRefBlockHash, DoubleBlockHash, IntBlockHash, LongBlockHash, BytesRef2BlockHash, BytesRef3BlockHash, // - NullBlockHash, PackedValuesBlockHash, BytesRefLongBlockHash, LongLongBlockHash, TimeSeriesBlockHash { +public abstract class BlockHash implements Releasable, SeenGroupIds { protected final BlockFactory blockFactory; @@ -107,7 +106,15 @@ public abstract sealed class BlockHash implements Releasable, SeenGroupIds // @Override public abstract BitArray seenGroupIds(BigArrays bigArrays); - public record GroupSpec(int channel, ElementType elementType) {} + /** + * @param isCategorize Whether this group is a CATEGORIZE() or not. + * May be changed in the future when more stateful grouping functions are added. + */ + public record GroupSpec(int channel, ElementType elementType, boolean isCategorize) { + public GroupSpec(int channel, ElementType elementType) { + this(channel, elementType, false); + } + } /** * Creates a specialized hash table that maps one or more {@link Block}s to ids. @@ -159,6 +166,19 @@ public static BlockHash buildPackedValuesBlockHash(List groups, Block return new PackedValuesBlockHash(groups, blockFactory, emitBatchSize); } + /** + * Builds a BlockHash for the Categorize grouping function. + */ + public static BlockHash buildCategorizeBlockHash(List groups, AggregatorMode aggregatorMode, BlockFactory blockFactory) { + if (groups.size() != 1) { + throw new IllegalArgumentException("only a single CATEGORIZE group can used"); + } + + return aggregatorMode.isInputPartial() + ? new CategorizedIntermediateBlockHash(groups.get(0).channel, blockFactory, aggregatorMode.isOutputPartial()) + : new CategorizeRawBlockHash(groups.get(0).channel, blockFactory, aggregatorMode.isOutputPartial()); + } + /** * Creates a specialized hash table that maps a {@link Block} of the given input element type to ids. */ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeRawBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeRawBlockHash.java new file mode 100644 index 0000000000000..bf633e0454384 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeRawBlockHash.java @@ -0,0 +1,137 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.blockhash; + +import org.apache.lucene.analysis.core.WhitespaceTokenizer; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.index.analysis.CharFilterFactory; +import org.elasticsearch.index.analysis.CustomAnalyzer; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.TokenizerFactory; +import org.elasticsearch.xpack.ml.aggs.categorization.TokenListCategorizer; +import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzer; + +/** + * BlockHash implementation for {@code Categorize} grouping function. + *

    + * This implementation expects rows, and can't deserialize intermediate states coming from other nodes. + *

    + */ +public class CategorizeRawBlockHash extends AbstractCategorizeBlockHash { + private final CategorizeEvaluator evaluator; + + CategorizeRawBlockHash(int channel, BlockFactory blockFactory, boolean outputPartial) { + super(blockFactory, channel, outputPartial); + CategorizationAnalyzer analyzer = new CategorizationAnalyzer( + // TODO: should be the same analyzer as used in Production + new CustomAnalyzer( + TokenizerFactory.newFactory("whitespace", WhitespaceTokenizer::new), + new CharFilterFactory[0], + new TokenFilterFactory[0] + ), + true + ); + this.evaluator = new CategorizeEvaluator(analyzer, categorizer, blockFactory); + } + + @Override + public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { + try (IntBlock result = (IntBlock) evaluator.eval(page.getBlock(channel()))) { + addInput.add(0, result); + } + } + + @Override + public void close() { + evaluator.close(); + } + + /** + * Similar implementation to an Evaluator. + */ + public static final class CategorizeEvaluator implements Releasable { + private final CategorizationAnalyzer analyzer; + + private final TokenListCategorizer.CloseableTokenListCategorizer categorizer; + + private final BlockFactory blockFactory; + + public CategorizeEvaluator( + CategorizationAnalyzer analyzer, + TokenListCategorizer.CloseableTokenListCategorizer categorizer, + BlockFactory blockFactory + ) { + this.analyzer = analyzer; + this.categorizer = categorizer; + this.blockFactory = blockFactory; + } + + public Block eval(BytesRefBlock vBlock) { + BytesRefVector vVector = vBlock.asVector(); + if (vVector == null) { + return eval(vBlock.getPositionCount(), vBlock); + } + IntVector vector = eval(vBlock.getPositionCount(), vVector); + return vector.asBlock(); + } + + public IntBlock eval(int positionCount, BytesRefBlock vBlock) { + try (IntBlock.Builder result = blockFactory.newIntBlockBuilder(positionCount)) { + BytesRef vScratch = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue; + } + int first = vBlock.getFirstValueIndex(p); + int count = vBlock.getValueCount(p); + if (count == 1) { + result.appendInt(process(vBlock.getBytesRef(first, vScratch))); + continue; + } + int end = first + count; + result.beginPositionEntry(); + for (int i = first; i < end; i++) { + result.appendInt(process(vBlock.getBytesRef(i, vScratch))); + } + result.endPositionEntry(); + } + return result.build(); + } + } + + public IntVector eval(int positionCount, BytesRefVector vVector) { + try (IntVector.FixedBuilder result = blockFactory.newIntVectorFixedBuilder(positionCount)) { + BytesRef vScratch = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + result.appendInt(p, process(vVector.getBytesRef(p, vScratch))); + } + return result.build(); + } + } + + private int process(BytesRef v) { + return categorizer.computeCategory(v.utf8ToString(), analyzer).getId(); + } + + @Override + public void close() { + Releasables.closeExpectNoException(analyzer, categorizer); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizedIntermediateBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizedIntermediateBlockHash.java new file mode 100644 index 0000000000000..1bca34a70e5fa --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizedIntermediateBlockHash.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.blockhash; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.xpack.ml.aggs.categorization.SerializableTokenListCategory; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +/** + * BlockHash implementation for {@code Categorize} grouping function. + *

    + * This implementation expects a single intermediate state in a block, as generated by {@link AbstractCategorizeBlockHash}. + *

    + */ +public class CategorizedIntermediateBlockHash extends AbstractCategorizeBlockHash { + + CategorizedIntermediateBlockHash(int channel, BlockFactory blockFactory, boolean outputPartial) { + super(blockFactory, channel, outputPartial); + } + + @Override + public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { + if (page.getPositionCount() == 0) { + // No categories + return; + } + BytesRefBlock categorizerState = page.getBlock(channel()); + Map idMap = readIntermediate(categorizerState.getBytesRef(0, new BytesRef())); + try (IntBlock.Builder newIdsBuilder = blockFactory.newIntBlockBuilder(idMap.size())) { + for (int i = 0; i < idMap.size(); i++) { + newIdsBuilder.appendInt(idMap.get(i)); + } + try (IntBlock newIds = newIdsBuilder.build()) { + addInput.add(0, newIds); + } + } + } + + /** + * Read intermediate state from a block. + * + * @return a map from the old category id to the new one. The old ids go from 0 to {@code size - 1}. + */ + private Map readIntermediate(BytesRef bytes) { + Map idMap = new HashMap<>(); + try (StreamInput in = new BytesArray(bytes).streamInput()) { + int count = in.readVInt(); + for (int oldCategoryId = 0; oldCategoryId < count; oldCategoryId++) { + int newCategoryId = categorizer.mergeWireCategory(new SerializableTokenListCategory(in)).getId(); + idMap.put(oldCategoryId, newCategoryId); + } + return idMap; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + @Override + public void close() { + categorizer.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java index 03a4ca2b0ad5e..a69e8ca767014 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.Describable; +import org.elasticsearch.compute.aggregation.AggregatorMode; import org.elasticsearch.compute.aggregation.GroupingAggregator; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.blockhash.BlockHash; @@ -39,11 +40,19 @@ public class HashAggregationOperator implements Operator { public record HashAggregationOperatorFactory( List groups, + AggregatorMode aggregatorMode, List aggregators, int maxPageSize ) implements OperatorFactory { @Override public Operator get(DriverContext driverContext) { + if (groups.stream().anyMatch(BlockHash.GroupSpec::isCategorize)) { + return new HashAggregationOperator( + aggregators, + () -> BlockHash.buildCategorizeBlockHash(groups, aggregatorMode, driverContext.blockFactory()), + driverContext + ); + } return new HashAggregationOperator( aggregators, () -> BlockHash.build(groups, driverContext.blockFactory(), maxPageSize, false), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java index cb190dfffafb9..1e97bdf5a2e79 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java @@ -105,6 +105,7 @@ private Operator.OperatorFactory simpleWithMode( } return new HashAggregationOperator.HashAggregationOperatorFactory( List.of(new BlockHash.GroupSpec(0, ElementType.LONG)), + mode, List.of(supplier.groupingAggregatorFactory(mode)), randomPageSize() ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTestCase.java new file mode 100644 index 0000000000000..fa93c0aa1c375 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTestCase.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.blockhash; + +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.data.MockBlockFactory; +import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.test.ESTestCase; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public abstract class BlockHashTestCase extends ESTestCase { + + final CircuitBreaker breaker = newLimitedBreaker(ByteSizeValue.ofGb(1)); + final BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, mockBreakerService(breaker)); + final MockBlockFactory blockFactory = new MockBlockFactory(breaker, bigArrays); + + // A breaker service that always returns the given breaker for getBreaker(CircuitBreaker.REQUEST) + private static CircuitBreakerService mockBreakerService(CircuitBreaker breaker) { + CircuitBreakerService breakerService = mock(CircuitBreakerService.class); + when(breakerService.getBreaker(CircuitBreaker.REQUEST)).thenReturn(breaker); + return breakerService; + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java index 088e791348840..ede2d68ca2367 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java @@ -11,11 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.MockBigArrays; -import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; @@ -26,7 +22,6 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.compute.data.OrdinalBytesRefBlock; import org.elasticsearch.compute.data.OrdinalBytesRefVector; import org.elasticsearch.compute.data.Page; @@ -34,8 +29,6 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; -import org.elasticsearch.indices.breaker.CircuitBreakerService; -import org.elasticsearch.test.ESTestCase; import org.junit.After; import java.util.ArrayList; @@ -54,14 +47,8 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.startsWith; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; -public class BlockHashTests extends ESTestCase { - - final CircuitBreaker breaker = new MockBigArrays.LimitedBreaker("esql-test-breaker", ByteSizeValue.ofGb(1)); - final BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, mockBreakerService(breaker)); - final MockBlockFactory blockFactory = new MockBlockFactory(breaker, bigArrays); +public class BlockHashTests extends BlockHashTestCase { @ParametersFactory public static List params() { @@ -1534,13 +1521,6 @@ private void assertKeys(Block[] actualKeys, Object[][] expectedKeys) { } } - // A breaker service that always returns the given breaker for getBreaker(CircuitBreaker.REQUEST) - static CircuitBreakerService mockBreakerService(CircuitBreaker breaker) { - CircuitBreakerService breakerService = mock(CircuitBreakerService.class); - when(breakerService.getBreaker(CircuitBreaker.REQUEST)).thenReturn(breaker); - return breakerService; - } - IntVector intRange(int startInclusive, int endExclusive) { return IntVector.range(startInclusive, endExclusive, TestBlockFactory.getNonBreakingInstance()); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java new file mode 100644 index 0000000000000..de8a2a44266fe --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java @@ -0,0 +1,406 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.blockhash; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.MaxLongAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.SumLongAggregatorFunctionSupplier; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.CannedSourceOperator; +import org.elasticsearch.compute.operator.Driver; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.HashAggregationOperator; +import org.elasticsearch.compute.operator.LocalSourceOperator; +import org.elasticsearch.compute.operator.PageConsumerOperator; +import org.elasticsearch.core.Releasables; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.elasticsearch.compute.operator.OperatorTestCase.runDriver; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +public class CategorizeBlockHashTests extends BlockHashTestCase { + + public void testCategorizeRaw() { + final Page page; + final int positions = 7; + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(positions)) { + builder.appendBytesRef(new BytesRef("Connected to 10.1.0.1")); + builder.appendBytesRef(new BytesRef("Connection error")); + builder.appendBytesRef(new BytesRef("Connection error")); + builder.appendBytesRef(new BytesRef("Connection error")); + builder.appendBytesRef(new BytesRef("Disconnected")); + builder.appendBytesRef(new BytesRef("Connected to 10.1.0.2")); + builder.appendBytesRef(new BytesRef("Connected to 10.1.0.3")); + page = new Page(builder.build()); + } + + try (BlockHash hash = new CategorizeRawBlockHash(0, blockFactory, true)) { + hash.add(page, new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + assertEquals(groupIds.getPositionCount(), positions); + + assertEquals(0, groupIds.getInt(0)); + assertEquals(1, groupIds.getInt(1)); + assertEquals(1, groupIds.getInt(2)); + assertEquals(1, groupIds.getInt(3)); + assertEquals(2, groupIds.getInt(4)); + assertEquals(0, groupIds.getInt(5)); + assertEquals(0, groupIds.getInt(6)); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + add(positionOffset, groupIds.asBlock()); + } + + @Override + public void close() { + fail("hashes should not close AddInput"); + } + }); + } finally { + page.releaseBlocks(); + } + + // TODO: randomize and try multiple pages. + // TODO: assert the state of the BlockHash after adding pages. Including the categorizer state. + // TODO: also test the lookup method and other stuff. + } + + public void testCategorizeIntermediate() { + Page page1; + int positions1 = 7; + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(positions1)) { + builder.appendBytesRef(new BytesRef("Connected to 10.1.0.1")); + builder.appendBytesRef(new BytesRef("Connection error")); + builder.appendBytesRef(new BytesRef("Connection error")); + builder.appendBytesRef(new BytesRef("Connected to 10.1.0.2")); + builder.appendBytesRef(new BytesRef("Connection error")); + builder.appendBytesRef(new BytesRef("Connected to 10.1.0.3")); + builder.appendBytesRef(new BytesRef("Connected to 10.1.0.4")); + page1 = new Page(builder.build()); + } + Page page2; + int positions2 = 5; + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(positions2)) { + builder.appendBytesRef(new BytesRef("Disconnected")); + builder.appendBytesRef(new BytesRef("Connected to 10.2.0.1")); + builder.appendBytesRef(new BytesRef("Disconnected")); + builder.appendBytesRef(new BytesRef("Connected to 10.3.0.2")); + builder.appendBytesRef(new BytesRef("System shutdown")); + page2 = new Page(builder.build()); + } + + Page intermediatePage1, intermediatePage2; + + // Fill intermediatePages with the intermediate state from the raw hashes + try ( + BlockHash rawHash1 = new CategorizeRawBlockHash(0, blockFactory, true); + BlockHash rawHash2 = new CategorizeRawBlockHash(0, blockFactory, true) + ) { + rawHash1.add(page1, new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + assertEquals(groupIds.getPositionCount(), positions1); + assertEquals(0, groupIds.getInt(0)); + assertEquals(1, groupIds.getInt(1)); + assertEquals(1, groupIds.getInt(2)); + assertEquals(0, groupIds.getInt(3)); + assertEquals(1, groupIds.getInt(4)); + assertEquals(0, groupIds.getInt(5)); + assertEquals(0, groupIds.getInt(6)); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + add(positionOffset, groupIds.asBlock()); + } + + @Override + public void close() { + fail("hashes should not close AddInput"); + } + }); + intermediatePage1 = new Page(rawHash1.getKeys()[0]); + + rawHash2.add(page2, new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + assertEquals(groupIds.getPositionCount(), positions2); + assertEquals(0, groupIds.getInt(0)); + assertEquals(1, groupIds.getInt(1)); + assertEquals(0, groupIds.getInt(2)); + assertEquals(1, groupIds.getInt(3)); + assertEquals(2, groupIds.getInt(4)); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + add(positionOffset, groupIds.asBlock()); + } + + @Override + public void close() { + fail("hashes should not close AddInput"); + } + }); + intermediatePage2 = new Page(rawHash2.getKeys()[0]); + } finally { + page1.releaseBlocks(); + page2.releaseBlocks(); + } + + try (BlockHash intermediateHash = new CategorizedIntermediateBlockHash(0, blockFactory, true)) { + intermediateHash.add(intermediatePage1, new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + Set values = IntStream.range(0, groupIds.getPositionCount()) + .map(groupIds::getInt) + .boxed() + .collect(Collectors.toSet()); + assertEquals(values, Set.of(0, 1)); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + add(positionOffset, groupIds.asBlock()); + } + + @Override + public void close() { + fail("hashes should not close AddInput"); + } + }); + + intermediateHash.add(intermediatePage2, new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + Set values = IntStream.range(0, groupIds.getPositionCount()) + .map(groupIds::getInt) + .boxed() + .collect(Collectors.toSet()); + // The category IDs {0, 1, 2} should map to groups {0, 2, 3}, because + // 0 matches an existing category (Connected to ...), and the others are new. + assertEquals(values, Set.of(0, 2, 3)); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + add(positionOffset, groupIds.asBlock()); + } + + @Override + public void close() { + fail("hashes should not close AddInput"); + } + }); + } finally { + intermediatePage1.releaseBlocks(); + intermediatePage2.releaseBlocks(); + } + } + + public void testCategorize_withDriver() { + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofMb(256)).withCircuitBreaking(); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + DriverContext driverContext = new DriverContext(bigArrays, new BlockFactory(breaker, bigArrays)); + + LocalSourceOperator.BlockSupplier input1 = () -> { + try ( + BytesRefVector.Builder textsBuilder = driverContext.blockFactory().newBytesRefVectorBuilder(10); + LongVector.Builder countsBuilder = driverContext.blockFactory().newLongVectorBuilder(10) + ) { + textsBuilder.appendBytesRef(new BytesRef("a")); + textsBuilder.appendBytesRef(new BytesRef("b")); + textsBuilder.appendBytesRef(new BytesRef("words words words goodbye jan")); + textsBuilder.appendBytesRef(new BytesRef("words words words goodbye nik")); + textsBuilder.appendBytesRef(new BytesRef("words words words goodbye tom")); + textsBuilder.appendBytesRef(new BytesRef("words words words hello jan")); + textsBuilder.appendBytesRef(new BytesRef("c")); + textsBuilder.appendBytesRef(new BytesRef("d")); + countsBuilder.appendLong(1); + countsBuilder.appendLong(2); + countsBuilder.appendLong(800); + countsBuilder.appendLong(80); + countsBuilder.appendLong(8000); + countsBuilder.appendLong(900); + countsBuilder.appendLong(30); + countsBuilder.appendLong(4); + return new Block[] { textsBuilder.build().asBlock(), countsBuilder.build().asBlock() }; + } + }; + LocalSourceOperator.BlockSupplier input2 = () -> { + try ( + BytesRefVector.Builder textsBuilder = driverContext.blockFactory().newBytesRefVectorBuilder(10); + LongVector.Builder countsBuilder = driverContext.blockFactory().newLongVectorBuilder(10) + ) { + textsBuilder.appendBytesRef(new BytesRef("words words words hello nik")); + textsBuilder.appendBytesRef(new BytesRef("words words words hello nik")); + textsBuilder.appendBytesRef(new BytesRef("c")); + textsBuilder.appendBytesRef(new BytesRef("words words words goodbye chris")); + textsBuilder.appendBytesRef(new BytesRef("d")); + textsBuilder.appendBytesRef(new BytesRef("e")); + countsBuilder.appendLong(9); + countsBuilder.appendLong(90); + countsBuilder.appendLong(3); + countsBuilder.appendLong(8); + countsBuilder.appendLong(40); + countsBuilder.appendLong(5); + return new Block[] { textsBuilder.build().asBlock(), countsBuilder.build().asBlock() }; + } + }; + + List intermediateOutput = new ArrayList<>(); + + Driver driver = new Driver( + driverContext, + new LocalSourceOperator(input1), + List.of( + new HashAggregationOperator.HashAggregationOperatorFactory( + List.of(makeGroupSpec()), + AggregatorMode.INITIAL, + List.of( + new SumLongAggregatorFunctionSupplier(List.of(1)).groupingAggregatorFactory(AggregatorMode.INITIAL), + new MaxLongAggregatorFunctionSupplier(List.of(1)).groupingAggregatorFactory(AggregatorMode.INITIAL) + ), + 16 * 1024 + ).get(driverContext) + ), + new PageConsumerOperator(intermediateOutput::add), + () -> {} + ); + runDriver(driver); + + driver = new Driver( + driverContext, + new LocalSourceOperator(input2), + List.of( + new HashAggregationOperator.HashAggregationOperatorFactory( + List.of(makeGroupSpec()), + AggregatorMode.INITIAL, + List.of( + new SumLongAggregatorFunctionSupplier(List.of(1)).groupingAggregatorFactory(AggregatorMode.INITIAL), + new MaxLongAggregatorFunctionSupplier(List.of(1)).groupingAggregatorFactory(AggregatorMode.INITIAL) + ), + 16 * 1024 + ).get(driverContext) + ), + new PageConsumerOperator(intermediateOutput::add), + () -> {} + ); + runDriver(driver); + + List finalOutput = new ArrayList<>(); + + driver = new Driver( + driverContext, + new CannedSourceOperator(intermediateOutput.iterator()), + List.of( + new HashAggregationOperator.HashAggregationOperatorFactory( + List.of(makeGroupSpec()), + AggregatorMode.FINAL, + List.of( + new SumLongAggregatorFunctionSupplier(List.of(1, 2)).groupingAggregatorFactory(AggregatorMode.FINAL), + new MaxLongAggregatorFunctionSupplier(List.of(3, 4)).groupingAggregatorFactory(AggregatorMode.FINAL) + ), + 16 * 1024 + ).get(driverContext) + ), + new PageConsumerOperator(finalOutput::add), + () -> {} + ); + runDriver(driver); + + assertThat(finalOutput, hasSize(1)); + assertThat(finalOutput.get(0).getBlockCount(), equalTo(3)); + BytesRefBlock outputTexts = finalOutput.get(0).getBlock(0); + LongBlock outputSums = finalOutput.get(0).getBlock(1); + LongBlock outputMaxs = finalOutput.get(0).getBlock(2); + assertThat(outputSums.getPositionCount(), equalTo(outputTexts.getPositionCount())); + assertThat(outputMaxs.getPositionCount(), equalTo(outputTexts.getPositionCount())); + Map sums = new HashMap<>(); + Map maxs = new HashMap<>(); + for (int i = 0; i < outputTexts.getPositionCount(); i++) { + sums.put(outputTexts.getBytesRef(i, new BytesRef()).utf8ToString(), outputSums.getLong(i)); + maxs.put(outputTexts.getBytesRef(i, new BytesRef()).utf8ToString(), outputMaxs.getLong(i)); + } + assertThat( + sums, + equalTo( + Map.of( + ".*?a.*?", + 1L, + ".*?b.*?", + 2L, + ".*?c.*?", + 33L, + ".*?d.*?", + 44L, + ".*?e.*?", + 5L, + ".*?words.+?words.+?words.+?goodbye.*?", + 8888L, + ".*?words.+?words.+?words.+?hello.*?", + 999L + ) + ) + ); + assertThat( + maxs, + equalTo( + Map.of( + ".*?a.*?", + 1L, + ".*?b.*?", + 2L, + ".*?c.*?", + 30L, + ".*?d.*?", + 40L, + ".*?e.*?", + 5L, + ".*?words.+?words.+?words.+?goodbye.*?", + 8000L, + ".*?words.+?words.+?words.+?hello.*?", + 900L + ) + ) + ); + Releasables.close(() -> Iterators.map(finalOutput.iterator(), (Page p) -> p::releaseBlocks)); + } + + private BlockHash.GroupSpec makeGroupSpec() { + return new BlockHash.GroupSpec(0, ElementType.BYTES_REF, true); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java index f2fa94c1feb08..b2f4ad594936e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java @@ -54,6 +54,7 @@ protected Operator.OperatorFactory simpleWithMode(AggregatorMode mode) { return new HashAggregationOperator.HashAggregationOperatorFactory( List.of(new BlockHash.GroupSpec(0, ElementType.LONG)), + mode, List.of( new SumLongAggregatorFunctionSupplier(sumChannels).groupingAggregatorFactory(mode), new MaxLongAggregatorFunctionSupplier(maxChannels).groupingAggregatorFactory(mode) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index ffbac2829ea4a..9c987a02aca2d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -61,6 +61,7 @@ public class CsvTestsDataLoader { private static final TestsDataset ALERTS = new TestsDataset("alerts"); private static final TestsDataset UL_LOGS = new TestsDataset("ul_logs"); private static final TestsDataset SAMPLE_DATA = new TestsDataset("sample_data"); + private static final TestsDataset MV_SAMPLE_DATA = new TestsDataset("mv_sample_data"); private static final TestsDataset SAMPLE_DATA_STR = SAMPLE_DATA.withIndex("sample_data_str") .withTypeMapping(Map.of("client_ip", "keyword")); private static final TestsDataset SAMPLE_DATA_TS_LONG = SAMPLE_DATA.withIndex("sample_data_ts_long") @@ -104,6 +105,7 @@ public class CsvTestsDataLoader { Map.entry(LANGUAGES_LOOKUP.indexName, LANGUAGES_LOOKUP), Map.entry(UL_LOGS.indexName, UL_LOGS), Map.entry(SAMPLE_DATA.indexName, SAMPLE_DATA), + Map.entry(MV_SAMPLE_DATA.indexName, MV_SAMPLE_DATA), Map.entry(ALERTS.indexName, ALERTS), Map.entry(SAMPLE_DATA_STR.indexName, SAMPLE_DATA_STR), Map.entry(SAMPLE_DATA_TS_LONG.indexName, SAMPLE_DATA_TS_LONG), diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec index 8e0fcd78f0322..89d9026423204 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec @@ -1,14 +1,524 @@ -categorize -required_capability: categorize +standard aggs +required_capability: categorize_v2 FROM sample_data - | SORT message ASC - | STATS count=COUNT(), values=MV_SORT(VALUES(message)) BY category=CATEGORIZE(message) + | STATS count=COUNT(), + sum=SUM(event_duration), + avg=AVG(event_duration), + count_distinct=COUNT_DISTINCT(event_duration) + BY category=CATEGORIZE(message) + | SORT count DESC, category +; + +count:long | sum:long | avg:double | count_distinct:long | category:keyword + 3 | 7971589 | 2657196.3333333335 | 3 | .*?Connected.+?to.*? + 3 | 14027356 | 4675785.333333333 | 3 | .*?Connection.+?error.*? + 1 | 1232382 | 1232382.0 | 1 | .*?Disconnected.*? +; + +values aggs +required_capability: categorize_v2 + +FROM sample_data + | STATS values=MV_SORT(VALUES(message)), + top=TOP(event_duration, 2, "DESC") + BY category=CATEGORIZE(message) + | SORT category +; + +values:keyword | top:long | category:keyword +[Connected to 10.1.0.1, Connected to 10.1.0.2, Connected to 10.1.0.3] | [3450233, 2764889] | .*?Connected.+?to.*? +[Connection error] | [8268153, 5033755] | .*?Connection.+?error.*? +[Disconnected] | 1232382 | .*?Disconnected.*? +; + +mv +required_capability: categorize_v2 + +FROM mv_sample_data + | STATS COUNT(), SUM(event_duration) BY category=CATEGORIZE(message) + | SORT category +; + +COUNT():long | SUM(event_duration):long | category:keyword + 7 | 23231327 | .*?Banana.*? + 3 | 7971589 | .*?Connected.+?to.*? + 3 | 14027356 | .*?Connection.+?error.*? + 1 | 1232382 | .*?Disconnected.*? +; + +row mv +required_capability: categorize_v2 + +ROW message = ["connected to a", "connected to b", "disconnected"], str = ["a", "b", "c"] + | STATS COUNT(), VALUES(str) BY category=CATEGORIZE(message) + | SORT category +; + +COUNT():long | VALUES(str):keyword | category:keyword + 2 | [a, b, c] | .*?connected.+?to.*? + 1 | [a, b, c] | .*?disconnected.*? +; + +with multiple indices +required_capability: categorize_v2 +required_capability: union_types + +FROM sample_data* + | STATS COUNT() BY category=CATEGORIZE(message) + | SORT category +; + +COUNT():long | category:keyword + 12 | .*?Connected.+?to.*? + 12 | .*?Connection.+?error.*? + 4 | .*?Disconnected.*? +; + +mv with many values +required_capability: categorize_v2 + +FROM employees + | STATS COUNT() BY category=CATEGORIZE(job_positions) + | SORT category + | LIMIT 5 +; + +COUNT():long | category:keyword + 18 | .*?Accountant.*? + 13 | .*?Architect.*? + 11 | .*?Business.+?Analyst.*? + 13 | .*?Data.+?Scientist.*? + 10 | .*?Head.+?Human.+?Resources.*? +; + +# Throws when calling AbstractCategorizeBlockHash.seenGroupIds() - Requires nulls support? +mv with many values-Ignore +required_capability: categorize_v2 + +FROM employees + | STATS SUM(languages) BY category=CATEGORIZE(job_positions) + | SORT category DESC + | LIMIT 3 +; + +SUM(languages):integer | category:keyword + 43 | .*?Accountant.*? + 46 | .*?Architect.*? + 35 | .*?Business.+?Analyst.*? +; + +mv via eval +required_capability: categorize_v2 + +FROM sample_data + | EVAL message = MV_APPEND(message, "Banana") + | STATS COUNT() BY category=CATEGORIZE(message) + | SORT category +; + +COUNT():long | category:keyword + 7 | .*?Banana.*? + 3 | .*?Connected.+?to.*? + 3 | .*?Connection.+?error.*? + 1 | .*?Disconnected.*? +; + +mv via eval const +required_capability: categorize_v2 + +FROM sample_data + | EVAL message = ["Banana", "Bread"] + | STATS COUNT() BY category=CATEGORIZE(message) + | SORT category +; + +COUNT():long | category:keyword + 7 | .*?Banana.*? + 7 | .*?Bread.*? +; + +mv via eval const without aliases +required_capability: categorize_v2 + +FROM sample_data + | EVAL message = ["Banana", "Bread"] + | STATS COUNT() BY CATEGORIZE(message) + | SORT `CATEGORIZE(message)` +; + +COUNT():long | CATEGORIZE(message):keyword + 7 | .*?Banana.*? + 7 | .*?Bread.*? +; + +mv const in parameter +required_capability: categorize_v2 + +FROM sample_data + | STATS COUNT() BY c = CATEGORIZE(["Banana", "Bread"]) + | SORT c +; + +COUNT():long | c:keyword + 7 | .*?Banana.*? + 7 | .*?Bread.*? +; + +agg alias shadowing +required_capability: categorize_v2 + +FROM sample_data + | STATS c = COUNT() BY c = CATEGORIZE(["Banana", "Bread"]) + | SORT c +; + +warning:Line 2:9: Field 'c' shadowed by field at line 2:24 + +c:keyword +.*?Banana.*? +.*?Bread.*? +; + +chained aggregations using categorize +required_capability: categorize_v2 + +FROM sample_data + | STATS COUNT() BY category=CATEGORIZE(message) + | STATS COUNT() BY category=CATEGORIZE(category) + | SORT category +; + +COUNT():long | category:keyword + 1 | .*?\.\*\?Connected\.\+\?to\.\*\?.*? + 1 | .*?\.\*\?Connection\.\+\?error\.\*\?.*? + 1 | .*?\.\*\?Disconnected\.\*\?.*? +; + +stats without aggs +required_capability: categorize_v2 + +FROM sample_data + | STATS BY category=CATEGORIZE(message) + | SORT category +; + +category:keyword +.*?Connected.+?to.*? +.*?Connection.+?error.*? +.*?Disconnected.*? +; + +text field +required_capability: categorize_v2 + +FROM hosts + | STATS COUNT() BY category=CATEGORIZE(host_group) + | SORT category +; + +COUNT():long | category:keyword + 2 | .*?DB.+?servers.*? + 2 | .*?Gateway.+?instances.*? + 5 | .*?Kubernetes.+?cluster.*? +; + +on TO_UPPER +required_capability: categorize_v2 + +FROM sample_data + | STATS COUNT() BY category=CATEGORIZE(TO_UPPER(message)) + | SORT category +; + +COUNT():long | category:keyword + 3 | .*?CONNECTED.+?TO.*? + 3 | .*?CONNECTION.+?ERROR.*? + 1 | .*?DISCONNECTED.*? +; + +on CONCAT +required_capability: categorize_v2 + +FROM sample_data + | STATS COUNT() BY category=CATEGORIZE(CONCAT(message, " banana")) + | SORT category +; + +COUNT():long | category:keyword + 3 | .*?Connected.+?to.+?banana.*? + 3 | .*?Connection.+?error.+?banana.*? + 1 | .*?Disconnected.+?banana.*? +; + +on CONCAT with unicode +required_capability: categorize_v2 + +FROM sample_data + | STATS COUNT() BY category=CATEGORIZE(CONCAT(message, " 👍🏽😊")) + | SORT category +; + +COUNT():long | category:keyword + 3 | .*?Connected.+?to.+?👍🏽😊.*? + 3 | .*?Connection.+?error.+?👍🏽😊.*? + 1 | .*?Disconnected.+?👍🏽😊.*? +; + +on REVERSE(CONCAT()) +required_capability: categorize_v2 + +FROM sample_data + | STATS COUNT() BY category=CATEGORIZE(REVERSE(CONCAT(message, " 👍🏽😊"))) + | SORT category +; + +COUNT():long | category:keyword + 1 | .*?😊👍🏽.+?detcennocsiD.*? + 3 | .*?😊👍🏽.+?ot.+?detcennoC.*? + 3 | .*?😊👍🏽.+?rorre.+?noitcennoC.*? +; + +and then TO_LOWER +required_capability: categorize_v2 + +FROM sample_data + | STATS COUNT() BY category=CATEGORIZE(message) + | EVAL category=TO_LOWER(category) + | SORT category +; + +COUNT():long | category:keyword + 3 | .*?connected.+?to.*? + 3 | .*?connection.+?error.*? + 1 | .*?disconnected.*? +; + +# Throws NPE - Requires nulls support +on const empty string-Ignore +required_capability: categorize_v2 + +FROM sample_data + | STATS COUNT() BY category=CATEGORIZE("") + | SORT category +; + +COUNT():long | category:keyword + 7 | .*?.*? +; + +# Throws NPE - Requires nulls support +on const empty string from eval-Ignore +required_capability: categorize_v2 + +FROM sample_data + | EVAL x = "" + | STATS COUNT() BY category=CATEGORIZE(x) + | SORT category +; + +COUNT():long | category:keyword + 7 | .*?.*? +; + +# Doesn't give the correct results - Requires nulls support +on null-Ignore +required_capability: categorize_v2 + +FROM sample_data + | EVAL x = null + | STATS COUNT() BY category=CATEGORIZE(x) + | SORT category +; + +COUNT():long | category:keyword + 7 | null +; + +# Doesn't give the correct results - Requires nulls support +on null string-Ignore +required_capability: categorize_v2 + +FROM sample_data + | EVAL x = null::string + | STATS COUNT() BY category=CATEGORIZE(x) + | SORT category +; + +COUNT():long | category:keyword + 7 | null +; + +filtering out all data +required_capability: categorize_v2 + +FROM sample_data + | WHERE @timestamp < "2023-10-23T00:00:00Z" + | STATS COUNT() BY category=CATEGORIZE(message) + | SORT category +; + +COUNT():long | category:keyword +; + +filtering out all data with constant +required_capability: categorize_v2 + +FROM sample_data + | STATS COUNT() BY category=CATEGORIZE(message) + | WHERE false +; + +COUNT():long | category:keyword +; + +drop output columns +required_capability: categorize_v2 + +FROM sample_data + | STATS count=COUNT() BY category=CATEGORIZE(message) + | EVAL x=1 + | DROP count, category +; + +x:integer +1 +1 +1 +; + +category value processing +required_capability: categorize_v2 + +ROW message = ["connected to a", "connected to b", "disconnected"] + | STATS COUNT() BY category=CATEGORIZE(message) + | EVAL category = TO_UPPER(category) | SORT category ; -count:long | values:keyword | category:integer -3 | [Connected to 10.1.0.1, Connected to 10.1.0.2, Connected to 10.1.0.3] | 0 -3 | [Connection error] | 1 -1 | [Disconnected] | 2 +COUNT():long | category:keyword + 2 | .*?CONNECTED.+?TO.*? + 1 | .*?DISCONNECTED.*? +; + +row aliases +required_capability: categorize_v2 + +ROW message = "connected to a" + | EVAL x = message + | STATS COUNT() BY category=CATEGORIZE(x) + | EVAL y = category + | SORT y +; + +COUNT():long | category:keyword | y:keyword + 1 | .*?connected.+?to.+?a.*? | .*?connected.+?to.+?a.*? +; + +from aliases +required_capability: categorize_v2 + +FROM sample_data + | EVAL x = message + | STATS COUNT() BY category=CATEGORIZE(x) + | EVAL y = category + | SORT y +; + +COUNT():long | category:keyword | y:keyword + 3 | .*?Connected.+?to.*? | .*?Connected.+?to.*? + 3 | .*?Connection.+?error.*? | .*?Connection.+?error.*? + 1 | .*?Disconnected.*? | .*?Disconnected.*? +; + +row aliases with keep +required_capability: categorize_v2 + +ROW message = "connected to a" + | EVAL x = message + | KEEP x + | STATS COUNT() BY category=CATEGORIZE(x) + | EVAL y = category + | KEEP `COUNT()`, y + | SORT y +; + +COUNT():long | y:keyword + 1 | .*?connected.+?to.+?a.*? +; + +from aliases with keep +required_capability: categorize_v2 + +FROM sample_data + | EVAL x = message + | KEEP x + | STATS COUNT() BY category=CATEGORIZE(x) + | EVAL y = category + | KEEP `COUNT()`, y + | SORT y +; + +COUNT():long | y:keyword + 3 | .*?Connected.+?to.*? + 3 | .*?Connection.+?error.*? + 1 | .*?Disconnected.*? +; + +row rename +required_capability: categorize_v2 + +ROW message = "connected to a" + | RENAME message as x + | STATS COUNT() BY category=CATEGORIZE(x) + | RENAME category as y + | SORT y +; + +COUNT():long | y:keyword + 1 | .*?connected.+?to.+?a.*? +; + +from rename +required_capability: categorize_v2 + +FROM sample_data + | RENAME message as x + | STATS COUNT() BY category=CATEGORIZE(x) + | RENAME category as y + | SORT y +; + +COUNT():long | y:keyword + 3 | .*?Connected.+?to.*? + 3 | .*?Connection.+?error.*? + 1 | .*?Disconnected.*? +; + +row drop +required_capability: categorize_v2 + +ROW message = "connected to a" + | STATS c = COUNT() BY category=CATEGORIZE(message) + | DROP category + | SORT c +; + +c:long +1 +; + +from drop +required_capability: categorize_v2 + +FROM sample_data + | STATS c = COUNT() BY category=CATEGORIZE(message) + | DROP category + | SORT c +; + +c:long +1 +3 +3 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-mv_sample_data.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-mv_sample_data.json new file mode 100644 index 0000000000000..838a8ba09b45a --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-mv_sample_data.json @@ -0,0 +1,16 @@ +{ + "properties": { + "@timestamp": { + "type": "date" + }, + "client_ip": { + "type": "ip" + }, + "event_duration": { + "type": "long" + }, + "message": { + "type": "keyword" + } + } +} \ No newline at end of file diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_sample_data.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_sample_data.csv new file mode 100644 index 0000000000000..c02a4a7a5845f --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_sample_data.csv @@ -0,0 +1,8 @@ +@timestamp:date ,client_ip:ip,event_duration:long,message:keyword +2023-10-23T13:55:01.543Z,172.21.3.15 ,1756467,[Connected to 10.1.0.1, Banana] +2023-10-23T13:53:55.832Z,172.21.3.15 ,5033755,[Connection error, Banana] +2023-10-23T13:52:55.015Z,172.21.3.15 ,8268153,[Connection error, Banana] +2023-10-23T13:51:54.732Z,172.21.3.15 , 725448,[Connection error, Banana] +2023-10-23T13:33:34.937Z,172.21.0.5 ,1232382,[Disconnected, Banana] +2023-10-23T12:27:28.948Z,172.21.2.113,2764889,[Connected to 10.1.0.2, Banana] +2023-10-23T12:15:03.360Z,172.21.2.162,3450233,[Connected to 10.1.0.3, Banana] diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/grouping/CategorizeEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/grouping/CategorizeEvaluator.java deleted file mode 100644 index c6349907f9b4b..0000000000000 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/grouping/CategorizeEvaluator.java +++ /dev/null @@ -1,145 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.xpack.esql.expression.function.grouping; - -import java.lang.IllegalArgumentException; -import java.lang.Override; -import java.lang.String; -import java.util.function.Function; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.compute.operator.Warnings; -import org.elasticsearch.core.Releasables; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.ml.aggs.categorization.TokenListCategorizer; -import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzer; - -/** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Categorize}. - * This class is generated. Do not edit it. - */ -public final class CategorizeEvaluator implements EvalOperator.ExpressionEvaluator { - private final Source source; - - private final EvalOperator.ExpressionEvaluator v; - - private final CategorizationAnalyzer analyzer; - - private final TokenListCategorizer.CloseableTokenListCategorizer categorizer; - - private final DriverContext driverContext; - - private Warnings warnings; - - public CategorizeEvaluator(Source source, EvalOperator.ExpressionEvaluator v, - CategorizationAnalyzer analyzer, - TokenListCategorizer.CloseableTokenListCategorizer categorizer, DriverContext driverContext) { - this.source = source; - this.v = v; - this.analyzer = analyzer; - this.categorizer = categorizer; - this.driverContext = driverContext; - } - - @Override - public Block eval(Page page) { - try (BytesRefBlock vBlock = (BytesRefBlock) v.eval(page)) { - BytesRefVector vVector = vBlock.asVector(); - if (vVector == null) { - return eval(page.getPositionCount(), vBlock); - } - return eval(page.getPositionCount(), vVector).asBlock(); - } - } - - public IntBlock eval(int positionCount, BytesRefBlock vBlock) { - try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { - BytesRef vScratch = new BytesRef(); - position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (vBlock.getValueCount(p) != 1) { - if (vBlock.getValueCount(p) > 1) { - warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - result.appendInt(Categorize.process(vBlock.getBytesRef(vBlock.getFirstValueIndex(p), vScratch), this.analyzer, this.categorizer)); - } - return result.build(); - } - } - - public IntVector eval(int positionCount, BytesRefVector vVector) { - try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { - BytesRef vScratch = new BytesRef(); - position: for (int p = 0; p < positionCount; p++) { - result.appendInt(p, Categorize.process(vVector.getBytesRef(p, vScratch), this.analyzer, this.categorizer)); - } - return result.build(); - } - } - - @Override - public String toString() { - return "CategorizeEvaluator[" + "v=" + v + "]"; - } - - @Override - public void close() { - Releasables.closeExpectNoException(v, analyzer, categorizer); - } - - private Warnings warnings() { - if (warnings == null) { - this.warnings = Warnings.createWarnings( - driverContext.warningsMode(), - source.source().getLineNumber(), - source.source().getColumnNumber(), - source.text() - ); - } - return warnings; - } - - static class Factory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - - private final EvalOperator.ExpressionEvaluator.Factory v; - - private final Function analyzer; - - private final Function categorizer; - - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory v, - Function analyzer, - Function categorizer) { - this.source = source; - this.v = v; - this.analyzer = analyzer; - this.categorizer = categorizer; - } - - @Override - public CategorizeEvaluator get(DriverContext context) { - return new CategorizeEvaluator(source, v.get(context), analyzer.apply(context), categorizer.apply(context), context); - } - - @Override - public String toString() { - return "CategorizeEvaluator[" + "v=" + v + "]"; - } - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 3eaeceaa86564..58748781d1778 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -402,8 +402,11 @@ public enum Cap { /** * Supported the text categorization function "CATEGORIZE". + *

    + * This capability was initially named `CATEGORIZE`, and got renamed after the function started correctly returning keywords. + *

    */ - CATEGORIZE(Build.current().isSnapshot()), + CATEGORIZE_V2(Build.current().isSnapshot()), /** * QSTR function diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java index 75a9883a77102..31b603ecef889 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java @@ -7,20 +7,10 @@ package org.elasticsearch.xpack.esql.expression.function.grouping; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.core.WhitespaceTokenizer; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.BytesRefHash; -import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; -import org.elasticsearch.index.analysis.CharFilterFactory; -import org.elasticsearch.index.analysis.CustomAnalyzer; -import org.elasticsearch.index.analysis.TokenFilterFactory; -import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.xpack.esql.capabilities.Validatable; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -29,10 +19,6 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.ml.aggs.categorization.CategorizationBytesRefHash; -import org.elasticsearch.xpack.ml.aggs.categorization.CategorizationPartOfSpeechDictionary; -import org.elasticsearch.xpack.ml.aggs.categorization.TokenListCategorizer; -import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzer; import java.io.IOException; import java.util.List; @@ -42,16 +28,16 @@ /** * Categorizes text messages. - * - * This implementation is incomplete and comes with the following caveats: - * - it only works correctly on a single node. - * - when running on multiple nodes, category IDs of the different nodes are - * aggregated, even though the same ID can correspond to a totally different - * category - * - the output consists of category IDs, which should be replaced by category - * regexes or keys - * - * TODO(jan, nik): fix this + *

    + * This function has no evaluators, as it works like an aggregation (Accumulates values, stores intermediate states, etc). + *

    + *

    + * For the implementation, see: + *

    + *
      + *
    • {@link org.elasticsearch.compute.aggregation.blockhash.CategorizedIntermediateBlockHash}
    • + *
    • {@link org.elasticsearch.compute.aggregation.blockhash.CategorizeRawBlockHash}
    • + *
    */ public class Categorize extends GroupingFunction implements Validatable { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( @@ -62,7 +48,7 @@ public class Categorize extends GroupingFunction implements Validatable { private final Expression field; - @FunctionInfo(returnType = { "integer" }, description = "Categorizes text messages.") + @FunctionInfo(returnType = "keyword", description = "Categorizes text messages.") public Categorize( Source source, @Param(name = "field", type = { "text", "keyword" }, description = "Expression to categorize") Expression field @@ -88,43 +74,13 @@ public String getWriteableName() { @Override public boolean foldable() { - return field.foldable(); - } - - @Evaluator - static int process( - BytesRef v, - @Fixed(includeInToString = false, build = true) CategorizationAnalyzer analyzer, - @Fixed(includeInToString = false, build = true) TokenListCategorizer.CloseableTokenListCategorizer categorizer - ) { - String s = v.utf8ToString(); - try (TokenStream ts = analyzer.tokenStream("text", s)) { - return categorizer.computeCategory(ts, s.length(), 1).getId(); - } catch (IOException e) { - throw new RuntimeException(e); - } + // Categorize cannot be currently folded + return false; } @Override public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { - return new CategorizeEvaluator.Factory( - source(), - toEvaluator.apply(field), - context -> new CategorizationAnalyzer( - // TODO(jan): get the correct analyzer in here, see CategorizationAnalyzerConfig::buildStandardCategorizationAnalyzer - new CustomAnalyzer( - TokenizerFactory.newFactory("whitespace", WhitespaceTokenizer::new), - new CharFilterFactory[0], - new TokenFilterFactory[0] - ), - true - ), - context -> new TokenListCategorizer.CloseableTokenListCategorizer( - new CategorizationBytesRefHash(new BytesRefHash(2048, context.bigArrays())), - CategorizationPartOfSpeechDictionary.getInstance(), - 0.70f - ) - ); + throw new UnsupportedOperationException("CATEGORIZE is only evaluated during aggregations"); } @Override @@ -134,11 +90,11 @@ protected TypeResolution resolveType() { @Override public DataType dataType() { - return DataType.INTEGER; + return DataType.KEYWORD; } @Override - public Expression replaceChildren(List newChildren) { + public Categorize replaceChildren(List newChildren) { return new Categorize(source(), newChildren.get(0)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineProjections.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineProjections.java index 1c256012baeb0..be7096538fb9a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineProjections.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineProjections.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.expression.function.grouping.Categorize; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Project; @@ -61,12 +62,15 @@ protected LogicalPlan rule(UnaryPlan plan) { if (plan instanceof Aggregate a) { if (child instanceof Project p) { var groupings = a.groupings(); - List groupingAttrs = new ArrayList<>(a.groupings().size()); + List groupingAttrs = new ArrayList<>(a.groupings().size()); for (Expression grouping : groupings) { if (grouping instanceof Attribute attribute) { groupingAttrs.add(attribute); + } else if (grouping instanceof Alias as && as.child() instanceof Categorize) { + groupingAttrs.add(as); } else { - // After applying ReplaceAggregateNestedExpressionWithEval, groupings can only contain attributes. + // After applying ReplaceAggregateNestedExpressionWithEval, + // groupings (except Categorize) can only contain attributes. throw new EsqlIllegalArgumentException("Expected an Attribute, got {}", grouping); } } @@ -137,23 +141,33 @@ private static List combineProjections(List combineUpperGroupingsAndLowerProjections( - List upperGroupings, + List upperGroupings, List lowerProjections ) { // Collect the alias map for resolving the source (f1 = 1, f2 = f1, etc..) - AttributeMap aliases = new AttributeMap<>(); + AttributeMap aliases = new AttributeMap<>(); for (NamedExpression ne : lowerProjections) { - // Projections are just aliases for attributes, so casting is safe. - aliases.put(ne.toAttribute(), (Attribute) Alias.unwrap(ne)); + // record the alias + aliases.put(ne.toAttribute(), Alias.unwrap(ne)); } - // Replace any matching attribute directly with the aliased attribute from the projection. - AttributeSet replaced = new AttributeSet(); - for (Attribute attr : upperGroupings) { - // All substitutions happen before; groupings must be attributes at this point. - replaced.add(aliases.resolve(attr, attr)); + AttributeSet seen = new AttributeSet(); + List replaced = new ArrayList<>(); + for (NamedExpression ne : upperGroupings) { + // Duplicated attributes are ignored. + if (ne instanceof Attribute attribute) { + var newExpression = aliases.resolve(attribute, attribute); + if (newExpression instanceof Attribute newAttribute && seen.add(newAttribute) == false) { + // Already seen, skip + continue; + } + replaced.add(newExpression); + } else { + // For grouping functions, this will replace nested properties too + replaced.add(ne.transformUp(Attribute.class, a -> aliases.resolve(a, a))); + } } - return new ArrayList<>(replaced); + return replaced; } /** diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNull.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNull.java index 0f08cd66444a3..638fa1b8db456 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNull.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNull.java @@ -13,6 +13,7 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.esql.expression.function.grouping.Categorize; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; public class FoldNull extends OptimizerRules.OptimizerExpressionRule { @@ -42,6 +43,7 @@ public Expression rule(Expression e) { } } else if (e instanceof Alias == false && e.nullable() == Nullability.TRUE + && e instanceof Categorize == false && Expressions.anyMatch(e.children(), Expressions::isNull)) { return Literal.of(e, null); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceAggregateNestedExpressionWithEval.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceAggregateNestedExpressionWithEval.java index 173940af19935..985e68252a1f9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceAggregateNestedExpressionWithEval.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceAggregateNestedExpressionWithEval.java @@ -13,6 +13,7 @@ import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.esql.expression.function.grouping.Categorize; import org.elasticsearch.xpack.esql.expression.function.grouping.GroupingFunction; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Eval; @@ -46,15 +47,29 @@ protected LogicalPlan rule(Aggregate aggregate) { // start with the groupings since the aggs might duplicate it for (int i = 0, s = newGroupings.size(); i < s; i++) { Expression g = newGroupings.get(i); - // move the alias into an eval and replace it with its attribute + // Move the alias into an eval and replace it with its attribute. + // Exception: Categorize is internal to the aggregation and remains in the groupings. We move its child expression into an eval. if (g instanceof Alias as) { - groupingChanged = true; - var attr = as.toAttribute(); - evals.add(as); - evalNames.put(as.name(), attr); - newGroupings.set(i, attr); - if (as.child() instanceof GroupingFunction gf) { - groupingAttributes.put(gf, attr); + if (as.child() instanceof Categorize cat) { + if (cat.field() instanceof Attribute == false) { + groupingChanged = true; + var fieldAs = new Alias(as.source(), as.name(), cat.field(), null, true); + var fieldAttr = fieldAs.toAttribute(); + evals.add(fieldAs); + evalNames.put(fieldAs.name(), fieldAttr); + Categorize replacement = cat.replaceChildren(List.of(fieldAttr)); + newGroupings.set(i, as.replaceChild(replacement)); + groupingAttributes.put(cat, fieldAttr); + } + } else { + groupingChanged = true; + var attr = as.toAttribute(); + evals.add(as); + evalNames.put(as.name(), attr); + newGroupings.set(i, attr); + if (as.child() instanceof GroupingFunction gf) { + groupingAttributes.put(gf, attr); + } } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java index ea9cd76bcb9bc..72573821dfeb8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.TypedAttribute; +import org.elasticsearch.xpack.esql.expression.function.grouping.Categorize; import org.elasticsearch.xpack.esql.optimizer.rules.physical.ProjectAwayColumns; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; @@ -58,11 +59,17 @@ public PhysicalPlan apply(PhysicalPlan plan) { * make sure the fields are loaded for the standard hash aggregator. */ if (p instanceof AggregateExec agg && agg.groupings().size() == 1) { - var leaves = new LinkedList<>(); - // TODO: this seems out of place - agg.aggregates().stream().filter(a -> agg.groupings().contains(a) == false).forEach(a -> leaves.addAll(a.collectLeaves())); - var remove = agg.groupings().stream().filter(g -> leaves.contains(g) == false).toList(); - missing.removeAll(Expressions.references(remove)); + // CATEGORIZE requires the standard hash aggregator as well. + if (agg.groupings().get(0).anyMatch(e -> e instanceof Categorize) == false) { + var leaves = new LinkedList<>(); + // TODO: this seems out of place + agg.aggregates() + .stream() + .filter(a -> agg.groupings().contains(a) == false) + .forEach(a -> leaves.addAll(a.collectLeaves())); + var remove = agg.groupings().stream().filter(g -> leaves.contains(g) == false).toList(); + missing.removeAll(Expressions.references(remove)); + } } // add extractor diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java index 94a9246a56f83..a7418654f6b0e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java @@ -29,6 +29,7 @@ import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; +import org.elasticsearch.xpack.esql.expression.function.grouping.Categorize; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSourceExec; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.LocalExecutionPlannerContext; @@ -52,6 +53,7 @@ public final PhysicalOperation groupingPhysicalOperation( PhysicalOperation source, LocalExecutionPlannerContext context ) { + // The layout this operation will produce. Layout.Builder layout = new Layout.Builder(); Operator.OperatorFactory operatorFactory = null; AggregatorMode aggregatorMode = aggregateExec.getMode(); @@ -95,12 +97,17 @@ public final PhysicalOperation groupingPhysicalOperation( List aggregatorFactories = new ArrayList<>(); List groupSpecs = new ArrayList<>(aggregateExec.groupings().size()); for (Expression group : aggregateExec.groupings()) { - var groupAttribute = Expressions.attribute(group); - if (groupAttribute == null) { + Attribute groupAttribute = Expressions.attribute(group); + // In case of `... BY groupAttribute = CATEGORIZE(sourceGroupAttribute)` the actual source attribute is different. + Attribute sourceGroupAttribute = (aggregatorMode.isInputPartial() == false + && group instanceof Alias as + && as.child() instanceof Categorize categorize) ? Expressions.attribute(categorize.field()) : groupAttribute; + if (sourceGroupAttribute == null) { throw new EsqlIllegalArgumentException("Unexpected non-named expression[{}] as grouping in [{}]", group, aggregateExec); } - Layout.ChannelSet groupAttributeLayout = new Layout.ChannelSet(new HashSet<>(), groupAttribute.dataType()); - groupAttributeLayout.nameIds().add(groupAttribute.id()); + Layout.ChannelSet groupAttributeLayout = new Layout.ChannelSet(new HashSet<>(), sourceGroupAttribute.dataType()); + groupAttributeLayout.nameIds() + .add(group instanceof Alias as && as.child() instanceof Categorize ? groupAttribute.id() : sourceGroupAttribute.id()); /* * Check for aliasing in aggregates which occurs in two cases (due to combining project + stats): @@ -119,7 +126,7 @@ public final PhysicalOperation groupingPhysicalOperation( // check if there's any alias used in grouping - no need for the final reduction since the intermediate data // is in the output form // if the group points to an alias declared in the aggregate, use the alias child as source - else if (aggregatorMode == AggregatorMode.INITIAL || aggregatorMode == AggregatorMode.INTERMEDIATE) { + else if (aggregatorMode.isOutputPartial()) { if (groupAttribute.semanticEquals(a.toAttribute())) { groupAttribute = attr; break; @@ -129,8 +136,8 @@ else if (aggregatorMode == AggregatorMode.INITIAL || aggregatorMode == Aggregato } } layout.append(groupAttributeLayout); - Layout.ChannelAndType groupInput = source.layout.get(groupAttribute.id()); - groupSpecs.add(new GroupSpec(groupInput == null ? null : groupInput.channel(), groupAttribute)); + Layout.ChannelAndType groupInput = source.layout.get(sourceGroupAttribute.id()); + groupSpecs.add(new GroupSpec(groupInput == null ? null : groupInput.channel(), sourceGroupAttribute, group)); } if (aggregatorMode == AggregatorMode.FINAL) { @@ -164,6 +171,7 @@ else if (aggregatorMode == AggregatorMode.INITIAL || aggregatorMode == Aggregato } else { operatorFactory = new HashAggregationOperatorFactory( groupSpecs.stream().map(GroupSpec::toHashGroupSpec).toList(), + aggregatorMode, aggregatorFactories, context.pageSize(aggregateExec.estimatedRowSize()) ); @@ -178,10 +186,14 @@ else if (aggregatorMode == AggregatorMode.INITIAL || aggregatorMode == Aggregato /*** * Creates a standard layout for intermediate aggregations, typically used across exchanges. * Puts the group first, followed by each aggregation. - * - * It's similar to the code above (groupingPhysicalOperation) but ignores the factory creation. + *

    + * It's similar to the code above (groupingPhysicalOperation) but ignores the factory creation. + *

    */ public static List intermediateAttributes(List aggregates, List groupings) { + // TODO: This should take CATEGORIZE into account: + // it currently works because the CATEGORIZE intermediate state is just 1 block with the same type as the function return, + // so the attribute generated here is the expected one var aggregateMapper = new AggregateMapper(); List attrs = new ArrayList<>(); @@ -304,12 +316,20 @@ private static AggregatorFunctionSupplier supplier(AggregateFunction aggregateFu throw new EsqlIllegalArgumentException("aggregate functions must extend ToAggregator"); } - private record GroupSpec(Integer channel, Attribute attribute) { + /** + * The input configuration of this group. + * + * @param channel The source channel of this group + * @param attribute The attribute, source of this group + * @param expression The expression being used to group + */ + private record GroupSpec(Integer channel, Attribute attribute, Expression expression) { BlockHash.GroupSpec toHashGroupSpec() { if (channel == null) { throw new EsqlIllegalArgumentException("planned to use ordinals but tried to use the hash instead"); } - return new BlockHash.GroupSpec(channel, elementType()); + + return new BlockHash.GroupSpec(channel, elementType(), Alias.unwrap(expression) instanceof Categorize); } ElementType elementType() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index f25b19c4e5d1c..355073fcc873f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1821,7 +1821,7 @@ public void testIntervalAsString() { } public void testCategorizeSingleGrouping() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE.isEnabled()); + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V2.isEnabled()); query("from test | STATS COUNT(*) BY CATEGORIZE(first_name)"); query("from test | STATS COUNT(*) BY cat = CATEGORIZE(first_name)"); @@ -1850,7 +1850,7 @@ public void testCategorizeSingleGrouping() { } public void testCategorizeNestedGrouping() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE.isEnabled()); + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V2.isEnabled()); query("from test | STATS COUNT(*) BY CATEGORIZE(LENGTH(first_name)::string)"); @@ -1865,7 +1865,7 @@ public void testCategorizeNestedGrouping() { } public void testCategorizeWithinAggregations() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE.isEnabled()); + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V2.isEnabled()); query("from test | STATS MV_COUNT(cat), COUNT(*) BY cat = CATEGORIZE(first_name)"); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java index db5d8e03458ea..df1675ba22568 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java @@ -111,7 +111,8 @@ protected static List withNoRowsExpectingNull(List anyNullIsNull( oc.getExpectedTypeError(), null, null, - null + null, + oc.canBuildEvaluator() ); })); @@ -260,7 +261,8 @@ protected static List anyNullIsNull( oc.getExpectedTypeError(), null, null, - null + null, + oc.canBuildEvaluator() ); })); } @@ -648,18 +650,7 @@ protected static List randomizeBytesRefsOffset(List data, String expectedTypeError) Class foldingExceptionClass, String foldingExceptionMessage, Object extra + ) { + this( + data, + evaluatorToString, + expectedType, + matcher, + expectedWarnings, + expectedBuildEvaluatorWarnings, + expectedTypeError, + foldingExceptionClass, + foldingExceptionMessage, + extra, + data.stream().allMatch(d -> d.forceLiteral || DataType.isRepresentable(d.type)) + ); + } + + TestCase( + List data, + Matcher evaluatorToString, + DataType expectedType, + Matcher matcher, + String[] expectedWarnings, + String[] expectedBuildEvaluatorWarnings, + String expectedTypeError, + Class foldingExceptionClass, + String foldingExceptionMessage, + Object extra, + boolean canBuildEvaluator ) { this.source = Source.EMPTY; this.data = data; @@ -1442,10 +1470,10 @@ public static TestCase typeError(List data, String expectedTypeError) this.expectedWarnings = expectedWarnings; this.expectedBuildEvaluatorWarnings = expectedBuildEvaluatorWarnings; this.expectedTypeError = expectedTypeError; - this.canBuildEvaluator = data.stream().allMatch(d -> d.forceLiteral || DataType.isRepresentable(d.type)); this.foldingExceptionClass = foldingExceptionClass; this.foldingExceptionMessage = foldingExceptionMessage; this.extra = extra; + this.canBuildEvaluator = canBuildEvaluator; } public Source getSource() { @@ -1520,6 +1548,25 @@ public Object extra() { return extra; } + /** + * Build a new {@link TestCase} with new {@link #data}. + */ + public TestCase withData(List data) { + return new TestCase( + data, + evaluatorToString, + expectedType, + matcher, + expectedWarnings, + expectedBuildEvaluatorWarnings, + expectedTypeError, + foldingExceptionClass, + foldingExceptionMessage, + extra, + canBuildEvaluator + ); + } + /** * Build a new {@link TestCase} with new {@link #extra()}. */ @@ -1534,7 +1581,8 @@ public TestCase withExtra(Object extra) { expectedTypeError, foldingExceptionClass, foldingExceptionMessage, - extra + extra, + canBuildEvaluator ); } @@ -1549,7 +1597,8 @@ public TestCase withWarning(String warning) { expectedTypeError, foldingExceptionClass, foldingExceptionMessage, - extra + extra, + canBuildEvaluator ); } @@ -1568,7 +1617,8 @@ public TestCase withBuildEvaluatorWarning(String warning) { expectedTypeError, foldingExceptionClass, foldingExceptionMessage, - extra + extra, + canBuildEvaluator ); } @@ -1592,7 +1642,30 @@ public TestCase withFoldingException(Class clazz, String me expectedTypeError, clazz, message, - extra + extra, + canBuildEvaluator + ); + } + + /** + * Build a new {@link TestCase} that can't build an evaluator. + *

    + * Useful for special cases that can't be executed, but should still be considered. + *

    + */ + public TestCase withoutEvaluator() { + return new TestCase( + data, + evaluatorToString, + expectedType, + matcher, + expectedWarnings, + expectedBuildEvaluatorWarnings, + expectedTypeError, + foldingExceptionClass, + foldingExceptionMessage, + extra, + false ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/CategorizeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/CategorizeTests.java index f93389d5cb659..d29ac635e4bb7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/CategorizeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/CategorizeTests.java @@ -23,6 +23,12 @@ import static org.hamcrest.Matchers.equalTo; +/** + * Dummy test implementation for Categorize. Used just to generate documentation. + *

    + * Most test cases are currently skipped as this function can't build an evaluator. + *

    + */ public class CategorizeTests extends AbstractScalarFunctionTestCase { public CategorizeTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -37,11 +43,11 @@ public static Iterable parameters() { "text with " + dataType.typeName(), List.of(dataType), () -> new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(new BytesRef("blah blah blah"), dataType, "f")), - "CategorizeEvaluator[v=Attribute[channel=0]]", - DataType.INTEGER, - equalTo(0) - ) + List.of(new TestCaseSupplier.TypedData(new BytesRef(""), dataType, "field")), + "", + DataType.KEYWORD, + equalTo(new BytesRef("")) + ).withoutEvaluator() ) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index a11a9cef82989..2b4fb6ad68972 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -57,6 +57,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.ToPartial; import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; import org.elasticsearch.xpack.esql.expression.function.grouping.Bucket; +import org.elasticsearch.xpack.esql.expression.function.grouping.Categorize; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong; @@ -1203,6 +1204,33 @@ public void testCombineProjectionWithAggregationFirstAndAliasedGroupingUsedInAgg assertThat(Expressions.names(agg.groupings()), contains("first_name")); } + /** + * Expects + * Limit[1000[INTEGER]] + * \_Aggregate[STANDARD,[CATEGORIZE(first_name{f}#18) AS cat],[SUM(salary{f}#22,true[BOOLEAN]) AS s, cat{r}#10]] + * \_EsRelation[test][_meta_field{f}#23, emp_no{f}#17, first_name{f}#18, ..] + */ + public void testCombineProjectionWithCategorizeGrouping() { + var plan = plan(""" + from test + | eval k = first_name, k1 = k + | stats s = sum(salary) by cat = CATEGORIZE(k) + | keep s, cat + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + assertThat(agg.child(), instanceOf(EsRelation.class)); + + assertThat(Expressions.names(agg.aggregates()), contains("s", "cat")); + assertThat(Expressions.names(agg.groupings()), contains("cat")); + + var categorizeAlias = as(agg.groupings().get(0), Alias.class); + var categorize = as(categorizeAlias.child(), Categorize.class); + var categorizeField = as(categorize.field(), FieldAttribute.class); + assertThat(categorizeField.name(), is("first_name")); + } + /** * Expects * Limit[1000[INTEGER]] @@ -3909,6 +3937,39 @@ public void testNestedExpressionsInGroups() { assertThat(eval.fields().get(0).name(), is("emp_no % 2")); } + /** + * Expects + * Limit[1000[INTEGER]] + * \_Aggregate[STANDARD,[CATEGORIZE(CATEGORIZE(CONCAT(first_name, "abc")){r$}#18) AS CATEGORIZE(CONCAT(first_name, "abc"))],[CO + * UNT(salary{f}#13,true[BOOLEAN]) AS c, CATEGORIZE(CONCAT(first_name, "abc")){r}#3]] + * \_Eval[[CONCAT(first_name{f}#9,[61 62 63][KEYWORD]) AS CATEGORIZE(CONCAT(first_name, "abc"))]] + * \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] + */ + public void testNestedExpressionsInGroupsWithCategorize() { + var plan = optimizedPlan(""" + from test + | stats c = count(salary) by CATEGORIZE(CONCAT(first_name, "abc")) + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + var groupings = agg.groupings(); + var categorizeAlias = as(groupings.get(0), Alias.class); + var categorize = as(categorizeAlias.child(), Categorize.class); + var aggs = agg.aggregates(); + assertThat(aggs.get(1), is(categorizeAlias.toAttribute())); + + var eval = as(agg.child(), Eval.class); + assertThat(eval.fields(), hasSize(1)); + var evalFieldAlias = as(eval.fields().get(0), Alias.class); + var evalField = as(evalFieldAlias.child(), Concat.class); + + assertThat(evalFieldAlias.name(), is("CATEGORIZE(CONCAT(first_name, \"abc\"))")); + assertThat(categorize.field(), is(evalFieldAlias.toAttribute())); + assertThat(evalField.source().text(), is("CONCAT(first_name, \"abc\")")); + assertThat(categorizeAlias.source(), is(evalFieldAlias.source())); + } + /** * Expects * Limit[1000[INTEGER]] diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNullTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNullTests.java index 89117b5d4e729..ae31576184938 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNullTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNullTests.java @@ -28,6 +28,8 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Percentile; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; +import org.elasticsearch.xpack.esql.expression.function.grouping.Bucket; +import org.elasticsearch.xpack.esql.expression.function.grouping.Categorize; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateExtract; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateFormat; @@ -267,6 +269,17 @@ public void testNullFoldableDoesNotApplyToIsNullAndNotNull() { } } + public void testNullBucketGetsFolded() { + FoldNull foldNull = new FoldNull(); + assertEquals(NULL, foldNull.rule(new Bucket(EMPTY, NULL, NULL, NULL, NULL))); + } + + public void testNullCategorizeGroupingNotFolded() { + FoldNull foldNull = new FoldNull(); + Categorize categorize = new Categorize(EMPTY, NULL); + assertEquals(categorize, foldNull.rule(categorize)); + } + private void assertNullLiteral(Expression expression) { assertEquals(Literal.class, expression.getClass()); assertNull(expression.fold()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/TokenListCategorizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/TokenListCategorizer.java index d0088edcb0805..e4257270ce641 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/TokenListCategorizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/TokenListCategorizer.java @@ -19,6 +19,7 @@ import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.xpack.ml.aggs.categorization.TokenListCategory.TokenAndWeight; +import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzer; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -83,6 +84,8 @@ public void close() { @Nullable private final CategorizationPartOfSpeechDictionary partOfSpeechDictionary; + private final List categoriesById; + /** * Categories stored in such a way that the most common are accessed first. * This is implemented as an {@link ArrayList} with bespoke ordering rather @@ -108,9 +111,18 @@ public TokenListCategorizer( this.lowerThreshold = threshold; this.upperThreshold = (1.0f + threshold) / 2.0f; this.categoriesByNumMatches = new ArrayList<>(); + this.categoriesById = new ArrayList<>(); cacheRamUsage(0); } + public TokenListCategory computeCategory(String s, CategorizationAnalyzer analyzer) { + try (TokenStream ts = analyzer.tokenStream("text", s)) { + return computeCategory(ts, s.length(), 1); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + public TokenListCategory computeCategory(TokenStream ts, int unfilteredStringLen, long numDocs) throws IOException { assert partOfSpeechDictionary != null : "This version of computeCategory should only be used when a part-of-speech dictionary is available"; @@ -301,6 +313,7 @@ private synchronized TokenListCategory computeCategory( maxUnfilteredStringLen, numDocs ); + categoriesById.add(newCategory); categoriesByNumMatches.add(newCategory); cacheRamUsage(newCategory.ramBytesUsed()); return repositionCategory(newCategory, newIndex); @@ -412,6 +425,17 @@ static float similarity(List left, int leftWeight, List toCategories(int size) { + return categoriesByNumMatches.stream() + .limit(size) + .map(category -> new SerializableTokenListCategory(category, bytesRefHash)) + .toList(); + } + + public List toCategoriesById() { + return categoriesById.stream().map(category -> new SerializableTokenListCategory(category, bytesRefHash)).toList(); + } + public InternalCategorizationAggregation.Bucket[] toOrderedBuckets(int size) { return categoriesByNumMatches.stream() .limit(size) From 31ebc5f33fece5e32a4350c13bcd385ee20aabcc Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Wed, 27 Nov 2024 13:51:02 -0500 Subject: [PATCH 285/386] Bump versions after 8.15.5 release --- .buildkite/pipelines/periodic-packaging.yml | 6 +++--- .buildkite/pipelines/periodic.yml | 6 +++--- .ci/bwcVersions | 2 +- server/src/main/java/org/elasticsearch/Version.java | 1 + .../main/resources/org/elasticsearch/TransportVersions.csv | 1 + .../resources/org/elasticsearch/index/IndexVersions.csv | 1 + 6 files changed, 10 insertions(+), 7 deletions(-) diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index a49e486176484..c1b10a46c62a7 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -273,8 +273,8 @@ steps: env: BWC_VERSION: 8.14.3 - - label: "{{matrix.image}} / 8.15.4 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.15.4 + - label: "{{matrix.image}} / 8.15.6 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.15.6 timeout_in_minutes: 300 matrix: setup: @@ -287,7 +287,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.15.4 + BWC_VERSION: 8.15.6 - label: "{{matrix.image}} / 8.16.2 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.2 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index aa1db893df8cc..69d11ef1dabb6 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -287,8 +287,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.15.4 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.15.4#bwcTest + - label: 8.15.6 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.15.6#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -297,7 +297,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.15.4 + BWC_VERSION: 8.15.6 retry: automatic: - exit_status: "-1" diff --git a/.ci/bwcVersions b/.ci/bwcVersions index a8d6dda4fb0c2..826091807ce57 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -14,7 +14,7 @@ BWC_VERSION: - "8.12.2" - "8.13.4" - "8.14.3" - - "8.15.4" + - "8.15.6" - "8.16.2" - "8.17.0" - "8.18.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 7b65547a7d591..24aa5bd261d7e 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -187,6 +187,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_15_2 = new Version(8_15_02_99); public static final Version V_8_15_3 = new Version(8_15_03_99); public static final Version V_8_15_4 = new Version(8_15_04_99); + public static final Version V_8_15_6 = new Version(8_15_06_99); public static final Version V_8_16_0 = new Version(8_16_00_99); public static final Version V_8_16_1 = new Version(8_16_01_99); public static final Version V_8_16_2 = new Version(8_16_02_99); diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index 6191922f13094..faeb7fe848159 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -132,5 +132,6 @@ 8.15.2,8702003 8.15.3,8702003 8.15.4,8702003 +8.15.5,8702003 8.16.0,8772001 8.16.1,8772004 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index f84d69af727ac..1fc8bd8648ad6 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -132,5 +132,6 @@ 8.15.2,8512000 8.15.3,8512000 8.15.4,8512000 +8.15.5,8512000 8.16.0,8518000 8.16.1,8518000 From 807d994c5b956841546c2ce40eb2cd8ddd6a339d Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Wed, 27 Nov 2024 13:52:47 -0500 Subject: [PATCH 286/386] Prune changelogs after 8.15.5 release --- docs/changelog/114193.yaml | 5 ----- docs/changelog/114227.yaml | 6 ------ docs/changelog/114268.yaml | 5 ----- docs/changelog/114521.yaml | 5 ----- docs/changelog/114548.yaml | 5 ----- docs/changelog/116277.yaml | 6 ------ docs/changelog/116292.yaml | 5 ----- docs/changelog/116357.yaml | 5 ----- docs/changelog/116382.yaml | 5 ----- docs/changelog/116408.yaml | 6 ------ docs/changelog/116478.yaml | 5 ----- docs/changelog/116650.yaml | 5 ----- docs/changelog/116676.yaml | 5 ----- docs/changelog/116915.yaml | 5 ----- docs/changelog/116918.yaml | 5 ----- docs/changelog/116942.yaml | 5 ----- docs/changelog/116995.yaml | 5 ----- docs/changelog/117182.yaml | 6 ------ 18 files changed, 94 deletions(-) delete mode 100644 docs/changelog/114193.yaml delete mode 100644 docs/changelog/114227.yaml delete mode 100644 docs/changelog/114268.yaml delete mode 100644 docs/changelog/114521.yaml delete mode 100644 docs/changelog/114548.yaml delete mode 100644 docs/changelog/116277.yaml delete mode 100644 docs/changelog/116292.yaml delete mode 100644 docs/changelog/116357.yaml delete mode 100644 docs/changelog/116382.yaml delete mode 100644 docs/changelog/116408.yaml delete mode 100644 docs/changelog/116478.yaml delete mode 100644 docs/changelog/116650.yaml delete mode 100644 docs/changelog/116676.yaml delete mode 100644 docs/changelog/116915.yaml delete mode 100644 docs/changelog/116918.yaml delete mode 100644 docs/changelog/116942.yaml delete mode 100644 docs/changelog/116995.yaml delete mode 100644 docs/changelog/117182.yaml diff --git a/docs/changelog/114193.yaml b/docs/changelog/114193.yaml deleted file mode 100644 index f18f9359007b8..0000000000000 --- a/docs/changelog/114193.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114193 -summary: Add postal_code support to the City and Enterprise databases -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/114227.yaml b/docs/changelog/114227.yaml deleted file mode 100644 index 9b508f07c9e5a..0000000000000 --- a/docs/changelog/114227.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 114227 -summary: Ignore conflicting fields during dynamic mapping update -area: Mapping -type: bug -issues: - - 114228 diff --git a/docs/changelog/114268.yaml b/docs/changelog/114268.yaml deleted file mode 100644 index 5e4457005d7d3..0000000000000 --- a/docs/changelog/114268.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114268 -summary: Support more maxmind fields in the geoip processor -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/114521.yaml b/docs/changelog/114521.yaml deleted file mode 100644 index c3a9c7cdd0848..0000000000000 --- a/docs/changelog/114521.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114521 -summary: Add support for registered country fields for maxmind geoip databases -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/114548.yaml b/docs/changelog/114548.yaml deleted file mode 100644 index b9692bcb2d10c..0000000000000 --- a/docs/changelog/114548.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 114548 -summary: Support IPinfo database configurations -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/116277.yaml b/docs/changelog/116277.yaml deleted file mode 100644 index 62262b7797783..0000000000000 --- a/docs/changelog/116277.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 116277 -summary: Update Semantic Query To Handle Zero Size Responses -area: Vector Search -type: bug -issues: - - 116083 diff --git a/docs/changelog/116292.yaml b/docs/changelog/116292.yaml deleted file mode 100644 index f741c67bea155..0000000000000 --- a/docs/changelog/116292.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116292 -summary: Add missing header in `put_data_lifecycle` rest-api-spec -area: Data streams -type: bug -issues: [] diff --git a/docs/changelog/116357.yaml b/docs/changelog/116357.yaml deleted file mode 100644 index a1a7831eab9ca..0000000000000 --- a/docs/changelog/116357.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116357 -summary: Add tracking for query rule types -area: Relevance -type: enhancement -issues: [] diff --git a/docs/changelog/116382.yaml b/docs/changelog/116382.yaml deleted file mode 100644 index c941fb6eaa1e4..0000000000000 --- a/docs/changelog/116382.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116382 -summary: Validate missing shards after the coordinator rewrite -area: Search -type: bug -issues: [] diff --git a/docs/changelog/116408.yaml b/docs/changelog/116408.yaml deleted file mode 100644 index 5f4c8459778a6..0000000000000 --- a/docs/changelog/116408.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 116408 -summary: Propagating nested `inner_hits` to the parent compound retriever -area: Ranking -type: bug -issues: - - 116397 diff --git a/docs/changelog/116478.yaml b/docs/changelog/116478.yaml deleted file mode 100644 index ec50799eb2019..0000000000000 --- a/docs/changelog/116478.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116478 -summary: Semantic text simple partial update -area: Search -type: bug -issues: [] diff --git a/docs/changelog/116650.yaml b/docs/changelog/116650.yaml deleted file mode 100644 index d314a918aede9..0000000000000 --- a/docs/changelog/116650.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116650 -summary: Fix bug in ML autoscaling when some node info is unavailable -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/116676.yaml b/docs/changelog/116676.yaml deleted file mode 100644 index 8c6671e177499..0000000000000 --- a/docs/changelog/116676.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116676 -summary: Fix handling of time exceeded exception in fetch phase -area: Search -type: bug -issues: [] diff --git a/docs/changelog/116915.yaml b/docs/changelog/116915.yaml deleted file mode 100644 index 9686f0023a14a..0000000000000 --- a/docs/changelog/116915.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116915 -summary: Improve message about insecure S3 settings -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/116918.yaml b/docs/changelog/116918.yaml deleted file mode 100644 index 3b04b4ae4a69a..0000000000000 --- a/docs/changelog/116918.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116918 -summary: Split searchable snapshot into multiple repo operations -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/116942.yaml b/docs/changelog/116942.yaml deleted file mode 100644 index 5037e8c59cd85..0000000000000 --- a/docs/changelog/116942.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116942 -summary: Fix handling of bulk requests with semantic text fields and delete ops -area: Relevance -type: bug -issues: [] diff --git a/docs/changelog/116995.yaml b/docs/changelog/116995.yaml deleted file mode 100644 index a0467c630edf3..0000000000000 --- a/docs/changelog/116995.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 116995 -summary: "Apm-data: disable date_detection for all apm data streams" -area: Data streams -type: enhancement -issues: [] \ No newline at end of file diff --git a/docs/changelog/117182.yaml b/docs/changelog/117182.yaml deleted file mode 100644 index b5398bec1ef30..0000000000000 --- a/docs/changelog/117182.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 117182 -summary: Change synthetic source logic for `constant_keyword` -area: Mapping -type: bug -issues: - - 117083 From a46547c8dcf8b58d822b2e30639fe35e4687883b Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Wed, 27 Nov 2024 15:26:23 -0500 Subject: [PATCH 287/386] [CI] Pull in the latest mutes from base branch for PRs at runtime (#117587) --- .buildkite/hooks/pre-command | 4 ++++ .buildkite/hooks/pre-command.bat | 3 +++ .buildkite/scripts/get-latest-test-mutes.sh | 20 +++++++++++++++++++ .../internal/test/MutedTestsBuildService.java | 12 ++++++----- 4 files changed, 34 insertions(+), 5 deletions(-) create mode 100755 .buildkite/scripts/get-latest-test-mutes.sh diff --git a/.buildkite/hooks/pre-command b/.buildkite/hooks/pre-command index 0ece129a3c238..f25092bc6d42f 100644 --- a/.buildkite/hooks/pre-command +++ b/.buildkite/hooks/pre-command @@ -47,6 +47,8 @@ export GRADLE_BUILD_CACHE_PASSWORD BUILDKITE_API_TOKEN=$(vault read -field=token secret/ci/elastic-elasticsearch/buildkite-api-token) export BUILDKITE_API_TOKEN +export GH_TOKEN="$VAULT_GITHUB_TOKEN" + if [[ "${USE_LUCENE_SNAPSHOT_CREDS:-}" == "true" ]]; then data=$(.buildkite/scripts/get-legacy-secret.sh aws-elastic/creds/lucene-snapshots) @@ -117,3 +119,5 @@ if [[ -f /etc/os-release ]] && grep -q '"Amazon Linux 2"' /etc/os-release; then echo "$(hostname -i | cut -d' ' -f 2) $(hostname -f)." | sudo tee /etc/dnsmasq.hosts sudo systemctl restart dnsmasq.service fi + +.buildkite/scripts/get-latest-test-mutes.sh diff --git a/.buildkite/hooks/pre-command.bat b/.buildkite/hooks/pre-command.bat index fe7c2371de0e5..752c2bf23eb14 100644 --- a/.buildkite/hooks/pre-command.bat +++ b/.buildkite/hooks/pre-command.bat @@ -15,9 +15,12 @@ set BUILD_NUMBER=%BUILDKITE_BUILD_NUMBER% set COMPOSE_HTTP_TIMEOUT=120 set JOB_BRANCH=%BUILDKITE_BRANCH% +set GH_TOKEN=%VAULT_GITHUB_TOKEN% + set GRADLE_BUILD_CACHE_USERNAME=vault read -field=username secret/ci/elastic-elasticsearch/migrated/gradle-build-cache set GRADLE_BUILD_CACHE_PASSWORD=vault read -field=password secret/ci/elastic-elasticsearch/migrated/gradle-build-cache bash.exe -c "nohup bash .buildkite/scripts/setup-monitoring.sh /dev/null 2>&1 &" +bash.exe -c "bash .buildkite/scripts/get-latest-test-mutes.sh" exit /b 0 diff --git a/.buildkite/scripts/get-latest-test-mutes.sh b/.buildkite/scripts/get-latest-test-mutes.sh new file mode 100755 index 0000000000000..5721e29f1b773 --- /dev/null +++ b/.buildkite/scripts/get-latest-test-mutes.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +if [[ ! "${BUILDKITE_PULL_REQUEST:-}" || "${BUILDKITE_AGENT_META_DATA_PROVIDER:-}" == "k8s" ]]; then + exit 0 +fi + +testMuteBranch="${BUILDKITE_PULL_REQUEST_BASE_BRANCH:-main}" +testMuteFile="$(mktemp)" + +# If this PR contains changes to muted-tests.yml, we disable this functionality +# Otherwise, we wouldn't be able to test unmutes +if [[ ! $(gh pr diff "$BUILDKITE_PULL_REQUEST" --name-only | grep 'muted-tests.yml') ]]; then + gh api -H 'Accept: application/vnd.github.v3.raw' "repos/elastic/elasticsearch/contents/muted-tests.yml?ref=$testMuteBranch" > "$testMuteFile" + + if [[ -s "$testMuteFile" ]]; then + mkdir -p ~/.gradle + # This is using gradle.properties instead of an env var so that it's easily compatible with the Windows pre-command hook + echo "org.gradle.project.org.elasticsearch.additional.muted.tests=$testMuteFile" >> ~/.gradle/gradle.properties + fi +fi diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/MutedTestsBuildService.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/MutedTestsBuildService.java index 1dfa3bbb29aa2..df3d1c9b70a94 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/MutedTestsBuildService.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/MutedTestsBuildService.java @@ -28,10 +28,12 @@ import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Collections; +import java.util.LinkedHashSet; import java.util.List; +import java.util.Set; public abstract class MutedTestsBuildService implements BuildService { - private final List excludePatterns = new ArrayList<>(); + private final Set excludePatterns = new LinkedHashSet<>(); private final ObjectMapper objectMapper = new ObjectMapper(new YAMLFactory()); public MutedTestsBuildService() { @@ -43,23 +45,23 @@ public MutedTestsBuildService() { } } - public List getExcludePatterns() { + public Set getExcludePatterns() { return excludePatterns; } - private List buildExcludePatterns(File file) { + private Set buildExcludePatterns(File file) { List mutedTests; try (InputStream is = new BufferedInputStream(new FileInputStream(file))) { mutedTests = objectMapper.readValue(is, MutedTests.class).getTests(); if (mutedTests == null) { - return Collections.emptyList(); + return Collections.emptySet(); } } catch (IOException e) { throw new UncheckedIOException(e); } - List excludes = new ArrayList<>(); + Set excludes = new LinkedHashSet<>(); if (mutedTests.isEmpty() == false) { for (MutedTestsBuildService.MutedTest mutedTest : mutedTests) { if (mutedTest.getClassName() != null && mutedTest.getMethods().isEmpty() == false) { From 7a98e31f9db4e7155eecc3563284640ea8b5dbf1 Mon Sep 17 00:00:00 2001 From: Brendan Cully Date: Wed, 27 Nov 2024 12:30:02 -0800 Subject: [PATCH 288/386] Make VerifyingIndexInput public (#117518) This way we can verify store files as we read them directly, without going through a store abstraction we may not have if we copy lucene files around. --- server/src/main/java/org/elasticsearch/index/store/Store.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index 887fe486b6003..e6b499c07f189 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -1217,14 +1217,14 @@ public static String digestToString(long digest) { * mechanism that is used in some repository plugins (S3 for example). However, the checksum is only calculated on * the first read. All consecutive reads of the same data are not used to calculate the checksum. */ - static class VerifyingIndexInput extends ChecksumIndexInput { + public static class VerifyingIndexInput extends ChecksumIndexInput { private final IndexInput input; private final Checksum digest; private final long checksumPosition; private final byte[] checksum = new byte[8]; private long verifiedPosition = 0; - VerifyingIndexInput(IndexInput input) { + public VerifyingIndexInput(IndexInput input) { this(input, new BufferedChecksum(new CRC32())); } From e33e1a03da31c88e4fa7bbaa074fa33ecd4c68ab Mon Sep 17 00:00:00 2001 From: Michael Peterson Date: Wed, 27 Nov 2024 16:14:57 -0500 Subject: [PATCH 289/386] ESQL: async search responses have CCS metadata while searches are running (#117265) ES|QL async search responses now include CCS metadata while the query is still running. The CCS metadata will be present only if a remote cluster is queried and the user requested it with the `include_ccs_metadata: true` setting on the original request to `POST /_query/async`. The setting cannot be modified in the query to `GET /_query/async/:id`. The core change is that the EsqlExecutionInfo object is set on the EsqlQueryTask, which is used for async ES|QL queries, so that calls to `GET /_query/async/:id` have access to the same EsqlExecutionInfo object that is being updated as the planning and query progress. Secondly, the overall `took` time is now always present on ES|QL responses, even for async-searches while the query is still running. The took time shows a "took-so-far" value and will change upon refresh until the query has finished. This is present regardless of the `include_ccs_metadata` setting. Example response showing in progress state of the query: ``` GET _query/async/FlhaeTBxUU0yU2xhVzM2TlRLY3F1eXcceWlSWWZlRDhUVTJEUGFfZUROaDdtUTo0MDQwNA ``` ```json { "id": "FlhaeTBxUU0yU2xhVzM2TlRLY3F1eXcceWlSWWZlRDhUVTJEUGFfZUROaDdtUTo0MDQwNA==", "is_running": true, "took": 2032, "columns": [], "values": [], "_clusters": { "total": 3, "successful": 1, "running": 2, "skipped": 0, "partial": 0, "failed": 0, "details": { "(local)": { "status": "running", "indices": "web_traffic", "_shards": { "total": 2, "skipped": 0 } }, "remote1": { "status": "running", "indices": "web_traffic" }, "remote2": { "status": "successful", "indices": "web_traffic", "took": 180, "_shards": { "total": 2, "successful": 2, "skipped": 0, "failed": 0 } } } } } ``` --- docs/changelog/117265.yaml | 5 + .../esql/action/CrossClusterAsyncQueryIT.java | 522 ++++++++++++++++++ .../esql/action/CrossClustersQueryIT.java | 9 +- .../xpack/esql/action/EsqlExecutionInfo.java | 13 +- .../xpack/esql/action/EsqlQueryResponse.java | 7 +- .../xpack/esql/action/EsqlQueryTask.java | 13 +- .../xpack/esql/plugin/ComputeListener.java | 29 +- .../xpack/esql/plugin/ComputeService.java | 26 +- .../esql/plugin/TransportEsqlQueryAction.java | 23 +- .../xpack/esql/session/EsqlSession.java | 1 + .../esql/action/EsqlQueryResponseTests.java | 3 +- .../esql/plugin/ComputeListenerTests.java | 16 +- 12 files changed, 634 insertions(+), 33 deletions(-) create mode 100644 docs/changelog/117265.yaml create mode 100644 x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java diff --git a/docs/changelog/117265.yaml b/docs/changelog/117265.yaml new file mode 100644 index 0000000000000..ec6605155538d --- /dev/null +++ b/docs/changelog/117265.yaml @@ -0,0 +1,5 @@ +pr: 117265 +summary: Async search responses have CCS metadata while searches are running +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java new file mode 100644 index 0000000000000..440582dcfbb45 --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java @@ -0,0 +1,522 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.compute.operator.exchange.ExchangeService; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.mapper.OnScriptError; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.script.LongFieldScript; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.test.AbstractMultiClustersTestCase; +import org.elasticsearch.test.XContentTestUtils; +import org.elasticsearch.transport.RemoteClusterAware; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.core.async.DeleteAsyncResultRequest; +import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; +import org.elasticsearch.xpack.core.async.TransportDeleteAsyncResultAction; +import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; +import org.junit.Before; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.core.TimeValue.timeValueMillis; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.not; + +public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase { + + private static final String REMOTE_CLUSTER_1 = "cluster-a"; + private static final String REMOTE_CLUSTER_2 = "remote-b"; + private static String LOCAL_INDEX = "logs-1"; + private static String REMOTE_INDEX = "logs-2"; + private static final String INDEX_WITH_RUNTIME_MAPPING = "blocking"; + + @Override + protected Collection remoteClusterAlias() { + return List.of(REMOTE_CLUSTER_1, REMOTE_CLUSTER_2); + } + + @Override + protected Map skipUnavailableForRemoteClusters() { + return Map.of(REMOTE_CLUSTER_1, randomBoolean(), REMOTE_CLUSTER_2, randomBoolean()); + } + + @Override + protected Collection> nodePlugins(String clusterAlias) { + List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); + plugins.add(EsqlPlugin.class); + plugins.add(EsqlAsyncActionIT.LocalStateEsqlAsync.class); // allows the async_search DELETE action + plugins.add(InternalExchangePlugin.class); + plugins.add(PauseFieldPlugin.class); + return plugins; + } + + public static class InternalExchangePlugin extends Plugin { + @Override + public List> getSettings() { + return List.of( + Setting.timeSetting( + ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, + TimeValue.timeValueSeconds(30), + Setting.Property.NodeScope + ) + ); + } + } + + @Before + public void resetPlugin() { + PauseFieldPlugin.allowEmitting = new CountDownLatch(1); + PauseFieldPlugin.startEmitting = new CountDownLatch(1); + } + + public static class PauseFieldPlugin extends Plugin implements ScriptPlugin { + public static CountDownLatch startEmitting = new CountDownLatch(1); + public static CountDownLatch allowEmitting = new CountDownLatch(1); + + @Override + public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { + return new ScriptEngine() { + @Override + + public String getType() { + return "pause"; + } + + @Override + @SuppressWarnings("unchecked") + public FactoryType compile( + String name, + String code, + ScriptContext context, + Map params + ) { + if (context == LongFieldScript.CONTEXT) { + return (FactoryType) new LongFieldScript.Factory() { + @Override + public LongFieldScript.LeafFactory newFactory( + String fieldName, + Map params, + SearchLookup searchLookup, + OnScriptError onScriptError + ) { + return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { + @Override + public void execute() { + startEmitting.countDown(); + try { + assertTrue(allowEmitting.await(30, TimeUnit.SECONDS)); + } catch (InterruptedException e) { + throw new AssertionError(e); + } + emit(1); + } + }; + } + }; + } + throw new IllegalStateException("unsupported type " + context); + } + + @Override + public Set> getSupportedContexts() { + return Set.of(LongFieldScript.CONTEXT); + } + }; + } + } + + /** + * Includes testing for CCS metadata in the GET /_query/async/:id response while the search is still running + */ + public void testSuccessfulPathways() throws Exception { + Map testClusterInfo = setupClusters(3); + int localNumShards = (Integer) testClusterInfo.get("local.num_shards"); + int remote1NumShards = (Integer) testClusterInfo.get("remote1.num_shards"); + int remote2NumShards = (Integer) testClusterInfo.get("remote2.blocking_index.num_shards"); + + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + + AtomicReference asyncExecutionId = new AtomicReference<>(); + + String q = "FROM logs-*,cluster-a:logs-*,remote-b:blocking | STATS total=sum(const) | LIMIT 10"; + try (EsqlQueryResponse resp = runAsyncQuery(q, requestIncludeMeta, null, TimeValue.timeValueMillis(100))) { + assertTrue(resp.isRunning()); + assertNotNull("async execution id is null", resp.asyncExecutionId()); + asyncExecutionId.set(resp.asyncExecutionId().get()); + // executionInfo may or may not be set on the initial response when there is a relatively low wait_for_completion_timeout + // so we do not check for it here + } + + // wait until we know that the query against 'remote-b:blocking' has started + PauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS); + + // wait until the query of 'cluster-a:logs-*' has finished (it is not blocked since we are not searching the 'blocking' index on it) + assertBusy(() -> { + try (EsqlQueryResponse asyncResponse = getAsyncResponse(asyncExecutionId.get())) { + EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo(); + assertNotNull(executionInfo); + EsqlExecutionInfo.Cluster clusterA = executionInfo.getCluster("cluster-a"); + assertThat(clusterA.getStatus(), not(equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING))); + } + }); + + /* at this point: + * the query against cluster-a should be finished + * the query against remote-b should be running (blocked on the PauseFieldPlugin.allowEmitting CountDown) + * the query against the local cluster should be running because it has a STATS clause that needs to wait on remote-b + */ + try (EsqlQueryResponse asyncResponse = getAsyncResponse(asyncExecutionId.get())) { + EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo(); + assertThat(asyncResponse.isRunning(), is(true)); + assertThat( + executionInfo.clusterAliases(), + equalTo(Set.of(REMOTE_CLUSTER_1, REMOTE_CLUSTER_2, RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY)) + ); + assertThat(executionInfo.getClusterStateCount(EsqlExecutionInfo.Cluster.Status.RUNNING), equalTo(2)); + assertThat(executionInfo.getClusterStateCount(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL), equalTo(1)); + + EsqlExecutionInfo.Cluster clusterA = executionInfo.getCluster(REMOTE_CLUSTER_1); + assertThat(clusterA.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + assertThat(clusterA.getTotalShards(), greaterThanOrEqualTo(1)); + assertThat(clusterA.getSuccessfulShards(), equalTo(clusterA.getTotalShards())); + assertThat(clusterA.getSkippedShards(), equalTo(0)); + assertThat(clusterA.getFailedShards(), equalTo(0)); + assertThat(clusterA.getFailures().size(), equalTo(0)); + assertThat(clusterA.getTook().millis(), greaterThanOrEqualTo(0L)); + + EsqlExecutionInfo.Cluster local = executionInfo.getCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + // should still be RUNNING since the local cluster has to do a STATS on the coordinator, waiting on remoteB + assertThat(local.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING)); + assertThat(clusterA.getTotalShards(), greaterThanOrEqualTo(1)); + + EsqlExecutionInfo.Cluster remoteB = executionInfo.getCluster(REMOTE_CLUSTER_2); + // should still be RUNNING since we haven't released the countdown lock to proceed + assertThat(remoteB.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING)); + assertNull(remoteB.getSuccessfulShards()); // should not be filled in until query is finished + + assertClusterMetadataInResponse(asyncResponse, responseExpectMeta, 3); + } + + // allow remoteB query to proceed + PauseFieldPlugin.allowEmitting.countDown(); + + // wait until both remoteB and local queries have finished + assertBusy(() -> { + try (EsqlQueryResponse asyncResponse = getAsyncResponse(asyncExecutionId.get())) { + EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo(); + assertNotNull(executionInfo); + EsqlExecutionInfo.Cluster remoteB = executionInfo.getCluster(REMOTE_CLUSTER_2); + assertThat(remoteB.getStatus(), not(equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING))); + EsqlExecutionInfo.Cluster local = executionInfo.getCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + assertThat(local.getStatus(), not(equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING))); + assertThat(asyncResponse.isRunning(), is(false)); + } + }); + + try (EsqlQueryResponse asyncResponse = getAsyncResponse(asyncExecutionId.get())) { + EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo(); + assertNotNull(executionInfo); + assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(1L)); + + EsqlExecutionInfo.Cluster clusterA = executionInfo.getCluster(REMOTE_CLUSTER_1); + assertThat(clusterA.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + assertThat(clusterA.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(clusterA.getTotalShards(), equalTo(remote1NumShards)); + assertThat(clusterA.getSuccessfulShards(), equalTo(remote1NumShards)); + assertThat(clusterA.getSkippedShards(), equalTo(0)); + assertThat(clusterA.getFailedShards(), equalTo(0)); + assertThat(clusterA.getFailures().size(), equalTo(0)); + + EsqlExecutionInfo.Cluster remoteB = executionInfo.getCluster(REMOTE_CLUSTER_2); + assertThat(remoteB.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remoteB.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + assertThat(remoteB.getTotalShards(), equalTo(remote2NumShards)); + assertThat(remoteB.getSuccessfulShards(), equalTo(remote2NumShards)); + assertThat(remoteB.getSkippedShards(), equalTo(0)); + assertThat(remoteB.getFailedShards(), equalTo(0)); + assertThat(remoteB.getFailures().size(), equalTo(0)); + + EsqlExecutionInfo.Cluster local = executionInfo.getCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + assertThat(local.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(local.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + assertThat(local.getTotalShards(), equalTo(localNumShards)); + assertThat(local.getSuccessfulShards(), equalTo(localNumShards)); + assertThat(local.getSkippedShards(), equalTo(0)); + assertThat(local.getFailedShards(), equalTo(0)); + assertThat(local.getFailures().size(), equalTo(0)); + } finally { + AcknowledgedResponse acknowledgedResponse = deleteAsyncId(asyncExecutionId.get()); + assertThat(acknowledgedResponse.isAcknowledged(), is(true)); + } + } + + public void testAsyncQueriesWithLimit0() throws IOException { + setupClusters(3); + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + + final TimeValue waitForCompletion = TimeValue.timeValueNanos(randomFrom(1L, Long.MAX_VALUE)); + String asyncExecutionId = null; + try (EsqlQueryResponse resp = runAsyncQuery("FROM logs*,*:logs* | LIMIT 0", requestIncludeMeta, null, waitForCompletion)) { + EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + if (resp.isRunning()) { + asyncExecutionId = resp.asyncExecutionId().get(); + assertThat(resp.columns().size(), equalTo(0)); + assertThat(resp.values().hasNext(), is(false)); // values should be empty list + + } else { + assertThat(resp.columns().size(), equalTo(4)); + assertThat(resp.columns().contains(new ColumnInfoImpl("const", "long")), is(true)); + assertThat(resp.columns().contains(new ColumnInfoImpl("id", "keyword")), is(true)); + assertThat(resp.columns().contains(new ColumnInfoImpl("tag", "keyword")), is(true)); + assertThat(resp.columns().contains(new ColumnInfoImpl("v", "long")), is(true)); + assertThat(resp.values().hasNext(), is(false)); // values should be empty list + + assertNotNull(executionInfo); + assertThat(executionInfo.isCrossClusterSearch(), is(true)); + long overallTookMillis = executionInfo.overallTook().millis(); + assertThat(overallTookMillis, greaterThanOrEqualTo(0L)); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); + assertThat(executionInfo.clusterAliases(), equalTo(Set.of(LOCAL_CLUSTER, REMOTE_CLUSTER_1, REMOTE_CLUSTER_2))); + + EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER_1); + assertThat(remoteCluster.getIndexExpression(), equalTo("logs*")); + assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); + assertThat(remoteCluster.getTotalShards(), equalTo(0)); + assertThat(remoteCluster.getSuccessfulShards(), equalTo(0)); + assertThat(remoteCluster.getSkippedShards(), equalTo(0)); + assertThat(remoteCluster.getFailedShards(), equalTo(0)); + + EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE_CLUSTER_1); + assertThat(remote2Cluster.getIndexExpression(), equalTo("logs*")); + assertThat(remote2Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + assertThat(remote2Cluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(remote2Cluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); + assertThat(remote2Cluster.getTotalShards(), equalTo(0)); + assertThat(remote2Cluster.getSuccessfulShards(), equalTo(0)); + assertThat(remote2Cluster.getSkippedShards(), equalTo(0)); + assertThat(remote2Cluster.getFailedShards(), equalTo(0)); + + EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER); + assertThat(localCluster.getIndexExpression(), equalTo("logs*")); + assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); + assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L)); + assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis)); + assertThat(remote2Cluster.getTotalShards(), equalTo(0)); + assertThat(remote2Cluster.getSuccessfulShards(), equalTo(0)); + assertThat(remote2Cluster.getSkippedShards(), equalTo(0)); + assertThat(remote2Cluster.getFailedShards(), equalTo(0)); + + assertClusterMetadataInResponse(resp, responseExpectMeta, 3); + } + } finally { + if (asyncExecutionId != null) { + AcknowledgedResponse acknowledgedResponse = deleteAsyncId(asyncExecutionId); + assertThat(acknowledgedResponse.isAcknowledged(), is(true)); + } + } + } + + protected EsqlQueryResponse runAsyncQuery(String query, Boolean ccsMetadata, QueryBuilder filter, TimeValue waitCompletionTime) { + EsqlQueryRequest request = EsqlQueryRequest.asyncEsqlQueryRequest(); + request.query(query); + request.pragmas(AbstractEsqlIntegTestCase.randomPragmas()); + request.profile(randomInt(5) == 2); + request.columnar(randomBoolean()); + if (ccsMetadata != null) { + request.includeCCSMetadata(ccsMetadata); + } + request.waitForCompletionTimeout(waitCompletionTime); + request.keepOnCompletion(false); + if (filter != null) { + request.filter(filter); + } + return runAsyncQuery(request); + } + + protected EsqlQueryResponse runAsyncQuery(EsqlQueryRequest request) { + try { + return client(LOCAL_CLUSTER).execute(EsqlQueryAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS); + } catch (ElasticsearchTimeoutException e) { + throw new AssertionError("timeout waiting for query response", e); + } + } + + AcknowledgedResponse deleteAsyncId(String id) { + try { + DeleteAsyncResultRequest request = new DeleteAsyncResultRequest(id); + return client().execute(TransportDeleteAsyncResultAction.TYPE, request).actionGet(30, TimeUnit.SECONDS); + } catch (ElasticsearchTimeoutException e) { + throw new AssertionError("timeout waiting for DELETE response", e); + } + } + + EsqlQueryResponse getAsyncResponse(String id) { + try { + var getResultsRequest = new GetAsyncResultRequest(id).setWaitForCompletionTimeout(timeValueMillis(1)); + return client().execute(EsqlAsyncGetResultAction.INSTANCE, getResultsRequest).actionGet(30, TimeUnit.SECONDS); + } catch (ElasticsearchTimeoutException e) { + throw new AssertionError("timeout waiting for GET async result", e); + } + } + + private static void assertClusterMetadataInResponse(EsqlQueryResponse resp, boolean responseExpectMeta, int numClusters) { + try { + final Map esqlResponseAsMap = XContentTestUtils.convertToMap(resp); + final Object clusters = esqlResponseAsMap.get("_clusters"); + if (responseExpectMeta) { + assertNotNull(clusters); + // test a few entries to ensure it looks correct (other tests do a full analysis of the metadata in the response) + @SuppressWarnings("unchecked") + Map inner = (Map) clusters; + assertTrue(inner.containsKey("total")); + assertThat((int) inner.get("total"), equalTo(numClusters)); + assertTrue(inner.containsKey("details")); + } else { + assertNull(clusters); + } + } catch (IOException e) { + fail("Could not convert ESQLQueryResponse to Map: " + e); + } + } + + /** + * v1: value to send to runQuery (can be null; null means use default value) + * v2: whether to expect CCS Metadata in the response (cannot be null) + * @return + */ + public static Tuple randomIncludeCCSMetadata() { + return switch (randomIntBetween(1, 3)) { + case 1 -> new Tuple<>(Boolean.TRUE, Boolean.TRUE); + case 2 -> new Tuple<>(Boolean.FALSE, Boolean.FALSE); + case 3 -> new Tuple<>(null, Boolean.FALSE); + default -> throw new AssertionError("should not get here"); + }; + } + + Map setupClusters(int numClusters) throws IOException { + assert numClusters == 2 || numClusters == 3 : "2 or 3 clusters supported not: " + numClusters; + int numShardsLocal = randomIntBetween(1, 5); + populateLocalIndices(LOCAL_INDEX, numShardsLocal); + + int numShardsRemote = randomIntBetween(1, 5); + populateRemoteIndices(REMOTE_CLUSTER_1, REMOTE_INDEX, numShardsRemote); + + Map clusterInfo = new HashMap<>(); + clusterInfo.put("local.num_shards", numShardsLocal); + clusterInfo.put("local.index", LOCAL_INDEX); + clusterInfo.put("remote1.num_shards", numShardsRemote); + clusterInfo.put("remote1.index", REMOTE_INDEX); + + if (numClusters == 3) { + int numShardsRemote2 = randomIntBetween(1, 5); + populateRemoteIndices(REMOTE_CLUSTER_2, REMOTE_INDEX, numShardsRemote2); + populateRemoteIndicesWithRuntimeMapping(REMOTE_CLUSTER_2); + clusterInfo.put("remote2.index", REMOTE_INDEX); + clusterInfo.put("remote2.num_shards", numShardsRemote2); + clusterInfo.put("remote2.blocking_index", INDEX_WITH_RUNTIME_MAPPING); + clusterInfo.put("remote2.blocking_index.num_shards", 1); + } + + String skipUnavailableKey = Strings.format("cluster.remote.%s.skip_unavailable", REMOTE_CLUSTER_1); + Setting skipUnavailableSetting = cluster(REMOTE_CLUSTER_1).clusterService().getClusterSettings().get(skipUnavailableKey); + boolean skipUnavailable = (boolean) cluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY).clusterService() + .getClusterSettings() + .get(skipUnavailableSetting); + clusterInfo.put("remote.skip_unavailable", skipUnavailable); + + return clusterInfo; + } + + void populateLocalIndices(String indexName, int numShards) { + Client localClient = client(LOCAL_CLUSTER); + assertAcked( + localClient.admin() + .indices() + .prepareCreate(indexName) + .setSettings(Settings.builder().put("index.number_of_shards", numShards)) + .setMapping("id", "type=keyword", "tag", "type=keyword", "v", "type=long", "const", "type=long") + ); + for (int i = 0; i < 10; i++) { + localClient.prepareIndex(indexName).setSource("id", "local-" + i, "tag", "local", "v", i).get(); + } + localClient.admin().indices().prepareRefresh(indexName).get(); + } + + void populateRemoteIndicesWithRuntimeMapping(String clusterAlias) throws IOException { + XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); + mapping.startObject("runtime"); + { + mapping.startObject("const"); + { + mapping.field("type", "long"); + mapping.startObject("script").field("source", "").field("lang", "pause").endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + mapping.endObject(); + client(clusterAlias).admin().indices().prepareCreate(INDEX_WITH_RUNTIME_MAPPING).setMapping(mapping).get(); + BulkRequestBuilder bulk = client(clusterAlias).prepareBulk(INDEX_WITH_RUNTIME_MAPPING) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + for (int i = 0; i < 10; i++) { + bulk.add(new IndexRequest().source("foo", i)); + } + bulk.get(); + } + + void populateRemoteIndices(String clusterAlias, String indexName, int numShards) throws IOException { + Client remoteClient = client(clusterAlias); + assertAcked( + remoteClient.admin() + .indices() + .prepareCreate(indexName) + .setSettings(Settings.builder().put("index.number_of_shards", numShards)) + .setMapping("id", "type=keyword", "tag", "type=keyword", "v", "type=long") + ); + for (int i = 0; i < 10; i++) { + remoteClient.prepareIndex(indexName).setSource("id", "remote-" + i, "tag", "remote", "v", i * i).get(); + } + remoteClient.admin().indices().prepareRefresh(indexName).get(); + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java index 6801e1f4eb404..596c70e57ccd6 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java @@ -61,6 +61,10 @@ public class CrossClustersQueryIT extends AbstractMultiClustersTestCase { private static final String REMOTE_CLUSTER_1 = "cluster-a"; private static final String REMOTE_CLUSTER_2 = "remote-b"; + private static String LOCAL_INDEX = "logs-1"; + private static String IDX_ALIAS = "alias1"; + private static String FILTERED_IDX_ALIAS = "alias-filtered-1"; + private static String REMOTE_INDEX = "logs-2"; @Override protected Collection remoteClusterAlias() { @@ -1278,11 +1282,6 @@ Map setupTwoClusters() { return setupClusters(2); } - private static String LOCAL_INDEX = "logs-1"; - private static String IDX_ALIAS = "alias1"; - private static String FILTERED_IDX_ALIAS = "alias-filtered-1"; - private static String REMOTE_INDEX = "logs-2"; - Map setupClusters(int numClusters) { assert numClusters == 2 || numClusters == 3 : "2 or 3 clusters supported not: " + numClusters; int numShardsLocal = randomIntBetween(1, 5); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java index 80bb2afe57122..ba7a7e8266845 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java @@ -169,6 +169,17 @@ public TimeValue overallTook() { return overallTook; } + /** + * How much time the query took since starting. + */ + public TimeValue tookSoFar() { + if (relativeStartNanos == null) { + return new TimeValue(0); + } else { + return new TimeValue(System.nanoTime() - relativeStartNanos, TimeUnit.NANOSECONDS); + } + } + public Set clusterAliases() { return clusterInfo.keySet(); } @@ -478,7 +489,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws { builder.field(STATUS_FIELD.getPreferredName(), getStatus().toString()); builder.field(INDICES_FIELD.getPreferredName(), indexExpression); - if (took != null) { + if (took != null && status != Status.RUNNING) { builder.field(TOOK.getPreferredName(), took.millis()); } if (totalShards != null) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index 4e59d5419fe6f..77aed298baea5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -196,8 +196,11 @@ public Iterator toXContentChunked(ToXContent.Params params } b.field("is_running", isRunning); } - if (executionInfo != null && executionInfo.overallTook() != null) { - b.field("took", executionInfo.overallTook().millis()); + if (executionInfo != null) { + long tookInMillis = executionInfo.overallTook() == null + ? executionInfo.tookSoFar().millis() + : executionInfo.overallTook().millis(); + b.field("took", tookInMillis); } if (dropNullColumns) { b.append(ResponseXContentUtils.allColumns(columns, "all_columns")) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryTask.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryTask.java index b12cf4eb354bf..f896a25317102 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryTask.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryTask.java @@ -17,6 +17,8 @@ public class EsqlQueryTask extends StoredAsyncTask { + private EsqlExecutionInfo executionInfo; + public EsqlQueryTask( long id, String type, @@ -29,10 +31,19 @@ public EsqlQueryTask( TimeValue keepAlive ) { super(id, type, action, description, parentTaskId, headers, originHeaders, asyncExecutionId, keepAlive); + this.executionInfo = null; + } + + public void setExecutionInfo(EsqlExecutionInfo executionInfo) { + this.executionInfo = executionInfo; + } + + public EsqlExecutionInfo executionInfo() { + return executionInfo; } @Override public EsqlQueryResponse getCurrentResult() { - return new EsqlQueryResponse(List.of(), List.of(), null, false, getExecutionId().getEncoded(), true, true, null); + return new EsqlQueryResponse(List.of(), List.of(), null, false, getExecutionId().getEncoded(), true, true, executionInfo); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java index 49af4a593e6e5..8d041ffbdf0e4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java @@ -112,6 +112,7 @@ private ComputeListener( if (runningOnRemoteCluster()) { // for remote executions - this ComputeResponse is created on the remote cluster/node and will be serialized and // received by the acquireCompute method callback on the coordinating cluster + setFinalStatusAndShardCounts(clusterAlias, executionInfo); EsqlExecutionInfo.Cluster cluster = esqlExecutionInfo.getCluster(clusterAlias); result = new ComputeResponse( collectedProfiles.isEmpty() ? List.of() : collectedProfiles.stream().toList(), @@ -126,19 +127,33 @@ private ComputeListener( if (coordinatingClusterIsSearchedInCCS()) { // if not already marked as SKIPPED, mark the local cluster as finished once the coordinator and all // data nodes have finished processing - executionInfo.swapCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, (k, v) -> { - if (v.getStatus() != EsqlExecutionInfo.Cluster.Status.SKIPPED) { - return new EsqlExecutionInfo.Cluster.Builder(v).setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL).build(); - } else { - return v; - } - }); + setFinalStatusAndShardCounts(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, executionInfo); } } delegate.onResponse(result); }, e -> delegate.onFailure(failureCollector.getFailure()))); } + private static void setFinalStatusAndShardCounts(String clusterAlias, EsqlExecutionInfo executionInfo) { + executionInfo.swapCluster(clusterAlias, (k, v) -> { + // TODO: once PARTIAL status is supported (partial results work to come), modify this code as needed + if (v.getStatus() != EsqlExecutionInfo.Cluster.Status.SKIPPED) { + assert v.getTotalShards() != null && v.getSkippedShards() != null : "Null total or skipped shard count: " + v; + return new EsqlExecutionInfo.Cluster.Builder(v).setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL) + /* + * Total and skipped shard counts are set early in execution (after can-match). + * Until ES|QL supports shard-level partial results, we just set all non-skipped shards + * as successful and none are failed. + */ + .setSuccessfulShards(v.getTotalShards()) + .setFailedShards(0) + .build(); + } else { + return v; + } + }); + } + /** * @return true if the "local" querying/coordinator cluster is being searched in a cross-cluster search */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 6a0d1bf9bb035..73266551f169c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -178,6 +178,7 @@ public void execute( null ); String local = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; + updateShardCountForCoordinatorOnlyQuery(execInfo); try (var computeListener = ComputeListener.create(local, transportService, rootTask, execInfo, listener.map(r -> { updateExecutionInfoAfterCoordinatorOnlyQuery(execInfo); return new Result(physicalPlan.output(), collectedPages, r.getProfiles(), execInfo); @@ -260,6 +261,22 @@ public void execute( } } + // For queries like: FROM logs* | LIMIT 0 (including cross-cluster LIMIT 0 queries) + private static void updateShardCountForCoordinatorOnlyQuery(EsqlExecutionInfo execInfo) { + if (execInfo.isCrossClusterSearch()) { + for (String clusterAlias : execInfo.clusterAliases()) { + execInfo.swapCluster( + clusterAlias, + (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setTotalShards(0) + .setSuccessfulShards(0) + .setSkippedShards(0) + .setFailedShards(0) + .build() + ); + } + } + } + // For queries like: FROM logs* | LIMIT 0 (including cross-cluster LIMIT 0 queries) private static void updateExecutionInfoAfterCoordinatorOnlyQuery(EsqlExecutionInfo execInfo) { execInfo.markEndQuery(); // TODO: revisit this time recording model as part of INLINESTATS improvements @@ -267,11 +284,7 @@ private static void updateExecutionInfoAfterCoordinatorOnlyQuery(EsqlExecutionIn assert execInfo.planningTookTime() != null : "Planning took time should be set on EsqlExecutionInfo but is null"; for (String clusterAlias : execInfo.clusterAliases()) { execInfo.swapCluster(clusterAlias, (k, v) -> { - var builder = new EsqlExecutionInfo.Cluster.Builder(v).setTook(execInfo.overallTook()) - .setTotalShards(0) - .setSuccessfulShards(0) - .setSkippedShards(0) - .setFailedShards(0); + var builder = new EsqlExecutionInfo.Cluster.Builder(v).setTook(execInfo.overallTook()); if (v.getStatus() == EsqlExecutionInfo.Cluster.Status.RUNNING) { builder.setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL); } @@ -324,9 +337,8 @@ private void startComputeOnDataNodes( executionInfo.swapCluster( clusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setTotalShards(dataNodeResult.totalShards()) - .setSuccessfulShards(dataNodeResult.totalShards()) + // do not set successful or failed shard count here - do it when search is done .setSkippedShards(dataNodeResult.skippedShards()) - .setFailedShards(0) .build() ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index fdc6e06a11032..76bfb95d07926 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -151,6 +151,8 @@ private void doExecuteForked(Task task, EsqlQueryRequest request, ActionListener @Override public void execute(EsqlQueryRequest request, EsqlQueryTask task, ActionListener listener) { + // set EsqlExecutionInfo on async-search task so that it is accessible to GET _query/async while the query is still running + task.setExecutionInfo(createEsqlExecutionInfo(request)); ActionListener.run(listener, l -> innerExecute(task, request, l)); } @@ -170,10 +172,9 @@ private void innerExecute(Task task, EsqlQueryRequest request, ActionListener remoteClusterService.isSkipUnavailable(clusterAlias), - request.includeCCSMetadata() - ); + // async-query uses EsqlQueryTask, so pull the EsqlExecutionInfo out of the task + // sync query uses CancellableTask which does not have EsqlExecutionInfo, so create one + EsqlExecutionInfo executionInfo = getOrCreateExecutionInfo(task, request); PlanRunner planRunner = (plan, resultListener) -> computeService.execute( sessionId, (CancellableTask) task, @@ -194,6 +195,18 @@ private void innerExecute(Task task, EsqlQueryRequest request, ActionListener remoteClusterService.isSkipUnavailable(clusterAlias), request.includeCCSMetadata()); + } + private EsqlQueryResponse toResponse(Task task, EsqlQueryRequest request, Configuration configuration, Result result) { List columns = result.schema().stream().map(c -> new ColumnInfoImpl(c.name(), c.dataType().outputType())).toList(); EsqlQueryResponse.Profile profile = configuration.profile() ? new EsqlQueryResponse.Profile(result.profiles()) : null; @@ -269,7 +282,7 @@ public EsqlQueryResponse initialResponse(EsqlQueryTask task) { asyncExecutionId, true, // is_running true, // isAsync - null + task.executionInfo() ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 8f65914d1c30d..021596c31f65d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -147,6 +147,7 @@ public String sessionId() { * Execute an ESQL request. */ public void execute(EsqlQueryRequest request, EsqlExecutionInfo executionInfo, PlanRunner planRunner, ActionListener listener) { + assert executionInfo != null : "Null EsqlExecutionInfo"; LOGGER.debug("ESQL query:\n{}", request.query()); analyzedPlan( parse(request.query(), request.params()), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 35364089127cc..f7b402b909732 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -519,14 +519,15 @@ static EsqlQueryResponse fromXContent(XContentParser parser) { } public void testChunkResponseSizeColumnar() { - int sizeClusterDetails = 14; try (EsqlQueryResponse resp = randomResponse(true, null)) { + int sizeClusterDetails = 14; int columnCount = resp.pages().get(0).getBlockCount(); int bodySize = resp.pages().stream().mapToInt(p -> p.getPositionCount() * p.getBlockCount()).sum() + columnCount * 2; assertChunkCount(resp, r -> 5 + sizeClusterDetails + bodySize); } try (EsqlQueryResponse resp = randomResponseAsync(true, null, true)) { + int sizeClusterDetails = resp.isRunning() ? 13 : 14; // overall took time not present when is_running=true int columnCount = resp.pages().get(0).getBlockCount(); int bodySize = resp.pages().stream().mapToInt(p -> p.getPositionCount() * p.getBlockCount()).sum() + columnCount * 2; assertChunkCount(resp, r -> 7 + sizeClusterDetails + bodySize); // is_running diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java index 625cb5628d039..b606f99df437c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java @@ -353,10 +353,7 @@ public void testAcquireComputeRunningOnRemoteClusterFillsInTookTime() { assertThat(response.getTook().millis(), greaterThanOrEqualTo(0L)); assertThat(executionInfo.getCluster(remoteAlias).getTook().millis(), greaterThanOrEqualTo(0L)); assertThat(executionInfo.getCluster(remoteAlias).getTook(), equalTo(response.getTook())); - - // the status in the (remote) executionInfo will still be RUNNING, since the SUCCESSFUL status gets set on the querying - // cluster executionInfo in the acquireCompute CCS listener, NOT present in this test - see testCollectComputeResultsInCCSListener - assertThat(executionInfo.getCluster(remoteAlias).getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING)); + assertThat(executionInfo.getCluster(remoteAlias).getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL)); Mockito.verifyNoInteractions(transportService.getTaskManager()); } @@ -376,6 +373,17 @@ public void testAcquireComputeRunningOnQueryingClusterFillsInTookTime() { // fully filled in for cross-cluster searches executionInfo.swapCluster(localCluster, (k, v) -> new EsqlExecutionInfo.Cluster(localCluster, "logs*", false)); executionInfo.swapCluster("my_remote", (k, v) -> new EsqlExecutionInfo.Cluster("my_remote", "my_remote:logs*", false)); + + // before acquire-compute, can-match (SearchShards) runs filling in total shards and skipped shards, so simulate that here + executionInfo.swapCluster( + localCluster, + (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setTotalShards(10).setSkippedShards(1).build() + ); + executionInfo.swapCluster( + "my_remote", + (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setTotalShards(10).setSkippedShards(1).build() + ); + try ( ComputeListener computeListener = ComputeListener.create( // whereRunning=localCluster simulates running on the querying cluster From c2e4afcfd584fe35aa88a9b9840cf5ff4c3c80b6 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 27 Nov 2024 13:23:20 -0800 Subject: [PATCH 290/386] Try to finish remote sink once (#117592) Currently, we have three clients fetching pages by default, each with its own lifecycle. This can result in scenarios where more than one request is sent to complete the remote sink. While this does not cause correctness issues, it is inefficient, especially for cross-cluster requests. This change tracks the status of the remote sink and tries to send only one finish request per remote sink. --- .../operator/exchange/ExchangeService.java | 28 +++++++++++++++++++ .../exchange/ExchangeServiceTests.java | 9 ++++++ 2 files changed, 37 insertions(+) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java index d633270b5c595..a943a90d02e87 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java @@ -42,6 +42,7 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.Executor; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; /** @@ -292,6 +293,7 @@ static final class TransportRemoteSink implements RemoteSink { final Executor responseExecutor; final AtomicLong estimatedPageSizeInBytes = new AtomicLong(0L); + final AtomicBoolean finished = new AtomicBoolean(false); TransportRemoteSink( TransportService transportService, @@ -311,6 +313,32 @@ static final class TransportRemoteSink implements RemoteSink { @Override public void fetchPageAsync(boolean allSourcesFinished, ActionListener listener) { + if (allSourcesFinished) { + if (finished.compareAndSet(false, true)) { + doFetchPageAsync(true, listener); + } else { + // already finished or promised + listener.onResponse(new ExchangeResponse(blockFactory, null, true)); + } + } else { + // already finished + if (finished.get()) { + listener.onResponse(new ExchangeResponse(blockFactory, null, true)); + return; + } + doFetchPageAsync(false, ActionListener.wrap(r -> { + if (r.finished()) { + finished.set(true); + } + listener.onResponse(r); + }, e -> { + finished.set(true); + listener.onFailure(e); + })); + } + } + + private void doFetchPageAsync(boolean allSourcesFinished, ActionListener listener) { final long reservedBytes = allSourcesFinished ? 0 : estimatedPageSizeInBytes.get(); if (reservedBytes > 0) { // This doesn't fully protect ESQL from OOM, but reduces the likelihood. diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index 8949f61b7420d..4178f02898d79 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -449,6 +449,15 @@ public void testConcurrentWithTransportActions() { ExchangeService exchange1 = new ExchangeService(Settings.EMPTY, threadPool, ESQL_TEST_EXECUTOR, blockFactory()); exchange1.registerTransportHandler(node1); AbstractSimpleTransportTestCase.connectToNode(node0, node1.getLocalNode()); + Set finishingRequests = ConcurrentCollections.newConcurrentSet(); + node1.addRequestHandlingBehavior(ExchangeService.EXCHANGE_ACTION_NAME, (handler, request, channel, task) -> { + final ExchangeRequest exchangeRequest = (ExchangeRequest) request; + if (exchangeRequest.sourcesFinished()) { + String exchangeId = exchangeRequest.exchangeId(); + assertTrue("tried to finish [" + exchangeId + "] twice", finishingRequests.add(exchangeId)); + } + handler.messageReceived(request, channel, task); + }); try (exchange0; exchange1; node0; node1) { String exchangeId = "exchange"; From 656b5f94804a9efe9329041a933e92075400f592 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Wed, 27 Nov 2024 14:31:30 -0800 Subject: [PATCH 291/386] Refactor PluginsLoader to better support tests (#117522) This refactors the way PluginsLoader is created to better support various types of testing. --- .../script/ScriptScoreBenchmark.java | 2 +- .../bootstrap/Elasticsearch.java | 2 +- .../elasticsearch/plugins/PluginsLoader.java | 71 ++++++++++++------- .../plugins/PluginsServiceTests.java | 12 ++-- .../plugins/MockPluginsService.java | 13 ++-- .../bench/WatcherScheduleEngineBenchmark.java | 5 +- 6 files changed, 61 insertions(+), 44 deletions(-) diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java index d44586ef4901a..b44f04c3a26a4 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java @@ -77,7 +77,7 @@ public class ScriptScoreBenchmark { private final PluginsService pluginsService = new PluginsService( Settings.EMPTY, null, - new PluginsLoader(null, Path.of(System.getProperty("plugins.dir"))) + PluginsLoader.createPluginsLoader(null, Path.of(System.getProperty("plugins.dir"))) ); private final ScriptModule scriptModule = new ScriptModule(Settings.EMPTY, pluginsService.filterPlugins(ScriptPlugin.class).toList()); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index b7774259bf289..c06ea9305aef8 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -206,7 +206,7 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { ); // load the plugin Java modules and layers now for use in entitlements - var pluginsLoader = new PluginsLoader(nodeEnv.modulesFile(), nodeEnv.pluginsFile()); + var pluginsLoader = PluginsLoader.createPluginsLoader(nodeEnv.modulesFile(), nodeEnv.pluginsFile()); bootstrap.setPluginsLoader(pluginsLoader); if (Boolean.parseBoolean(System.getProperty("es.entitlements.enabled"))) { diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java b/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java index 6b3eda6c0c9b4..aa21e5c64d903 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java @@ -118,15 +118,30 @@ public static LayerAndLoader ofLoader(ClassLoader loader) { * @param modulesDirectory The directory modules exist in, or null if modules should not be loaded from the filesystem * @param pluginsDirectory The directory plugins exist in, or null if plugins should not be loaded from the filesystem */ - @SuppressWarnings("this-escape") - public PluginsLoader(Path modulesDirectory, Path pluginsDirectory) { + public static PluginsLoader createPluginsLoader(Path modulesDirectory, Path pluginsDirectory) { + return createPluginsLoader(modulesDirectory, pluginsDirectory, true); + } - Map> qualifiedExports = new HashMap<>(ModuleQualifiedExportsService.getBootServices()); - addServerExportsService(qualifiedExports); + /** + * Constructs a new PluginsLoader + * + * @param modulesDirectory The directory modules exist in, or null if modules should not be loaded from the filesystem + * @param pluginsDirectory The directory plugins exist in, or null if plugins should not be loaded from the filesystem + * @param withServerExports {@code true} to add server module exports + */ + public static PluginsLoader createPluginsLoader(Path modulesDirectory, Path pluginsDirectory, boolean withServerExports) { + Map> qualifiedExports; + if (withServerExports) { + qualifiedExports = new HashMap<>(ModuleQualifiedExportsService.getBootServices()); + addServerExportsService(qualifiedExports); + } else { + qualifiedExports = Collections.emptyMap(); + } Set seenBundles = new LinkedHashSet<>(); // load (elasticsearch) module layers + List moduleDescriptors; if (modulesDirectory != null) { try { Set modules = PluginsUtils.getModuleBundles(modulesDirectory); @@ -140,6 +155,7 @@ public PluginsLoader(Path modulesDirectory, Path pluginsDirectory) { } // load plugin layers + List pluginDescriptors; if (pluginsDirectory != null) { try { // TODO: remove this leniency, but tests bogusly rely on it @@ -158,7 +174,28 @@ public PluginsLoader(Path modulesDirectory, Path pluginsDirectory) { pluginDescriptors = Collections.emptyList(); } - this.loadedPluginLayers = Collections.unmodifiableMap(loadPluginLayers(seenBundles, qualifiedExports)); + Map loadedPluginLayers = new LinkedHashMap<>(); + Map> transitiveUrls = new HashMap<>(); + List sortedBundles = PluginsUtils.sortBundles(seenBundles); + if (sortedBundles.isEmpty() == false) { + Set systemLoaderURLs = JarHell.parseModulesAndClassPath(); + for (PluginBundle bundle : sortedBundles) { + PluginsUtils.checkBundleJarHell(systemLoaderURLs, bundle, transitiveUrls); + loadPluginLayer(bundle, loadedPluginLayers, qualifiedExports); + } + } + + return new PluginsLoader(moduleDescriptors, pluginDescriptors, loadedPluginLayers); + } + + PluginsLoader( + List moduleDescriptors, + List pluginDescriptors, + Map loadedPluginLayers + ) { + this.moduleDescriptors = moduleDescriptors; + this.pluginDescriptors = pluginDescriptors; + this.loadedPluginLayers = loadedPluginLayers; } public List moduleDescriptors() { @@ -173,25 +210,7 @@ public Stream pluginLayers() { return loadedPluginLayers.values().stream().map(Function.identity()); } - private Map loadPluginLayers( - Set bundles, - Map> qualifiedExports - ) { - Map loaded = new LinkedHashMap<>(); - Map> transitiveUrls = new HashMap<>(); - List sortedBundles = PluginsUtils.sortBundles(bundles); - if (sortedBundles.isEmpty() == false) { - Set systemLoaderURLs = JarHell.parseModulesAndClassPath(); - for (PluginBundle bundle : sortedBundles) { - PluginsUtils.checkBundleJarHell(systemLoaderURLs, bundle, transitiveUrls); - loadPluginLayer(bundle, loaded, qualifiedExports); - } - } - - return loaded; - } - - private void loadPluginLayer( + private static void loadPluginLayer( PluginBundle bundle, Map loaded, Map> qualifiedExports @@ -211,7 +230,7 @@ private void loadPluginLayer( } final ClassLoader parentLoader = ExtendedPluginsClassLoader.create( - getClass().getClassLoader(), + PluginsLoader.class.getClassLoader(), extendedPlugins.stream().map(LoadedPluginLayer::spiClassLoader).toList() ); LayerAndLoader spiLayerAndLoader = null; @@ -427,7 +446,7 @@ private static List parentLayersOrBoot(List parentLaye } } - protected void addServerExportsService(Map> qualifiedExports) { + private static void addServerExportsService(Map> qualifiedExports) { var exportsService = new ModuleQualifiedExportsService(serverModule) { @Override protected void addExports(String pkg, Module target) { diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index 015bc72747bf2..79d8f98c7dca6 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.index.IndexModule; -import org.elasticsearch.jdk.ModuleQualifiedExportsService; import org.elasticsearch.plugin.analysis.CharFilterFactory; import org.elasticsearch.plugins.scanners.PluginInfo; import org.elasticsearch.plugins.spi.BarPlugin; @@ -66,12 +65,11 @@ public class PluginsServiceTests extends ESTestCase { public static class FilterablePlugin extends Plugin implements ScriptPlugin {} static PluginsService newPluginsService(Settings settings) { - return new PluginsService(settings, null, new PluginsLoader(null, TestEnvironment.newEnvironment(settings).pluginsFile()) { - @Override - protected void addServerExportsService(Map> qualifiedExports) { - // tests don't run modular - } - }); + return new PluginsService( + settings, + null, + PluginsLoader.createPluginsLoader(null, TestEnvironment.newEnvironment(settings).pluginsFile(), false) + ); } static PluginsService newMockPluginsService(List> classpathPlugins) { diff --git a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java index 9e96396493bdf..a9a825af3b865 100644 --- a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java +++ b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.env.Environment; -import org.elasticsearch.jdk.ModuleQualifiedExportsService; import org.elasticsearch.plugins.spi.SPIClassIterator; import java.lang.reflect.Constructor; @@ -43,13 +42,11 @@ public class MockPluginsService extends PluginsService { * @param classpathPlugins Plugins that exist in the classpath which should be loaded */ public MockPluginsService(Settings settings, Environment environment, Collection> classpathPlugins) { - super(settings, environment.configFile(), new PluginsLoader(environment.modulesFile(), environment.pluginsFile()) { - - @Override - protected void addServerExportsService(Map> qualifiedExports) { - // tests don't run modular - } - }); + super( + settings, + environment.configFile(), + new PluginsLoader(Collections.emptyList(), Collections.emptyList(), Collections.emptyMap()) + ); List pluginsLoaded = new ArrayList<>(); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java index 99fb626ad9474..59dc1db88e991 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java @@ -109,7 +109,10 @@ public static void main(String[] args) throws Exception { // First clean everything and index the watcher (but not via put alert api!) try ( - Node node = new Node(internalNodeEnv, new PluginsLoader(internalNodeEnv.modulesFile(), internalNodeEnv.pluginsFile())).start() + Node node = new Node( + internalNodeEnv, + PluginsLoader.createPluginsLoader(internalNodeEnv.modulesFile(), internalNodeEnv.pluginsFile()) + ).start() ) { final Client client = node.client(); ClusterHealthResponse response = client.admin().cluster().prepareHealth(TimeValue.THIRTY_SECONDS).setWaitForNodes("2").get(); From 77626d686b62fc85ce91d65cfff8adf631f84bcd Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 27 Nov 2024 16:45:22 -0800 Subject: [PATCH 292/386] Unmute FieldExtractorIT (#117669) Fixed in #117529 Closes #117524 Closes #117531 --- muted-tests.yml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 8b12bd2dd3365..5cf16fdf3da0a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -214,14 +214,8 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_reset/Test reset running transform} issue: https://github.com/elastic/elasticsearch/issues/117473 -- class: org.elasticsearch.xpack.esql.qa.multi_node.FieldExtractorIT - method: testConstantKeywordField - issue: https://github.com/elastic/elasticsearch/issues/117524 - class: org.elasticsearch.repositories.s3.RepositoryS3EcsClientYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/117525 -- class: org.elasticsearch.xpack.esql.qa.mixed.FieldExtractorIT - method: testConstantKeywordField - issue: https://github.com/elastic/elasticsearch/issues/117531 - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=synonyms/90_synonyms_reloading_for_synset/Reload analyzers for specific synonym set} issue: https://github.com/elastic/elasticsearch/issues/116777 From bb93f1f3ce8f1460e48a4b86d3b0fee72b4fa4b1 Mon Sep 17 00:00:00 2001 From: Michael Peterson Date: Wed, 27 Nov 2024 21:14:19 -0500 Subject: [PATCH 293/386] Adjusted testChunkResponseSizeColumnar to always expected the overall took time in the async response (#117673) --- .../xpack/esql/action/EsqlQueryResponseTests.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index f7b402b909732..35364089127cc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -519,15 +519,14 @@ static EsqlQueryResponse fromXContent(XContentParser parser) { } public void testChunkResponseSizeColumnar() { + int sizeClusterDetails = 14; try (EsqlQueryResponse resp = randomResponse(true, null)) { - int sizeClusterDetails = 14; int columnCount = resp.pages().get(0).getBlockCount(); int bodySize = resp.pages().stream().mapToInt(p -> p.getPositionCount() * p.getBlockCount()).sum() + columnCount * 2; assertChunkCount(resp, r -> 5 + sizeClusterDetails + bodySize); } try (EsqlQueryResponse resp = randomResponseAsync(true, null, true)) { - int sizeClusterDetails = resp.isRunning() ? 13 : 14; // overall took time not present when is_running=true int columnCount = resp.pages().get(0).getBlockCount(); int bodySize = resp.pages().stream().mapToInt(p -> p.getPositionCount() * p.getBlockCount()).sum() + columnCount * 2; assertChunkCount(resp, r -> 7 + sizeClusterDetails + bodySize); // is_running From c3ac2bd58a5c406982212def72580cc25e89761a Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Thu, 28 Nov 2024 08:23:28 +0100 Subject: [PATCH 294/386] [DOCS] Add Elastic Rerank usage docs (#117625) --- .../inference/service-elasticsearch.asciidoc | 41 +++++++-- .../reranking/semantic-reranking.asciidoc | 20 +++-- docs/reference/search/retriever.asciidoc | 83 +++++++++++++++++-- 3 files changed, 121 insertions(+), 23 deletions(-) diff --git a/docs/reference/inference/service-elasticsearch.asciidoc b/docs/reference/inference/service-elasticsearch.asciidoc index 0103b425faefe..cd06e6d7b2f64 100644 --- a/docs/reference/inference/service-elasticsearch.asciidoc +++ b/docs/reference/inference/service-elasticsearch.asciidoc @@ -69,15 +69,15 @@ include::inference-shared.asciidoc[tag=service-settings] These settings are specific to the `elasticsearch` service. -- -`adaptive_allocations`::: -(Optional, object) -include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=adaptive-allocation] - `deployment_id`::: (Optional, string) The `deployment_id` of an existing trained model deployment. When `deployment_id` is used the `model_id` is optional. +`adaptive_allocations`::: +(Optional, object) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=adaptive-allocation] + `enabled`:::: (Optional, Boolean) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=adaptive-allocation-enabled] @@ -119,7 +119,6 @@ include::inference-shared.asciidoc[tag=task-settings] Returns the document instead of only the index. Defaults to `true`. ===== - [discrete] [[inference-example-elasticsearch-elser]] ==== ELSER via the `elasticsearch` service @@ -137,7 +136,7 @@ PUT _inference/sparse_embedding/my-elser-model "adaptive_allocations": { <1> "enabled": true, "min_number_of_allocations": 1, - "max_number_of_allocations": 10 + "max_number_of_allocations": 4 }, "num_threads": 1, "model_id": ".elser_model_2" <2> @@ -150,6 +149,34 @@ PUT _inference/sparse_embedding/my-elser-model Valid values are `.elser_model_2` and `.elser_model_2_linux-x86_64`. For further details, refer to the {ml-docs}/ml-nlp-elser.html[ELSER model documentation]. +[discrete] +[[inference-example-elastic-reranker]] +==== Elastic Rerank via the `elasticsearch` service + +The following example shows how to create an {infer} endpoint called `my-elastic-rerank` to perform a `rerank` task type using the built-in Elastic Rerank cross-encoder model. + +The API request below will automatically download the Elastic Rerank model if it isn't already downloaded and then deploy the model. +Once deployed, the model can be used for semantic re-ranking with a <>. + +[source,console] +------------------------------------------------------------ +PUT _inference/rerank/my-elastic-rerank +{ + "service": "elasticsearch", + "service_settings": { + "model_id": ".rerank-v1", <1> + "num_threads": 1, + "adaptive_allocations": { <2> + "enabled": true, + "min_number_of_allocations": 1, + "max_number_of_allocations": 4 + } + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> The `model_id` must be the ID of the built-in Elastic Rerank model: `.rerank-v1`. +<2> {ml-docs}/ml-nlp-auto-scale.html#nlp-model-adaptive-allocations[Adaptive allocations] will be enabled with the minimum of 1 and the maximum of 10 allocations. [discrete] [[inference-example-elasticsearch]] @@ -186,7 +213,7 @@ If using the Python client, you can set the `timeout` parameter to a higher valu [discrete] [[inference-example-eland]] -==== Models uploaded by Eland via the elasticsearch service +==== Models uploaded by Eland via the `elasticsearch` service The following example shows how to create an {infer} endpoint called `my-msmarco-minilm-model` to perform a `text_embedding` task type. diff --git a/docs/reference/reranking/semantic-reranking.asciidoc b/docs/reference/reranking/semantic-reranking.asciidoc index 4ebe90e44708e..e1e2abd224a8e 100644 --- a/docs/reference/reranking/semantic-reranking.asciidoc +++ b/docs/reference/reranking/semantic-reranking.asciidoc @@ -85,14 +85,16 @@ In {es}, semantic re-rankers are implemented using the {es} <> using the `rerank` task type -** Integrate directly with the <> using the `rerank` task type -** Upload a model to {es} from Hugging Face with {eland-docs}/machine-learning.html#ml-nlp-pytorch[Eland]. You'll need to use the `text_similarity` NLP task type when loading the model using Eland. Refer to {ml-docs}/ml-nlp-model-ref.html#ml-nlp-model-ref-text-similarity[the Elastic NLP model reference] for a list of third party text similarity models supported by {es} for semantic re-ranking. -*** Then set up an <> with the `rerank` task type -. *Create a `rerank` task using the <>*. +. *Select and configure a re-ranking model*. +You have the following options: +.. Use the <> cross-encoder model via the inference API's {es} service. +.. Use the <> to create a `rerank` endpoint. +.. Use the <> to create a `rerank` endpoint. +.. Upload a model to {es} from Hugging Face with {eland-docs}/machine-learning.html#ml-nlp-pytorch[Eland]. You'll need to use the `text_similarity` NLP task type when loading the model using Eland. Then set up an <> with the `rerank` endpoint type. ++ +Refer to {ml-docs}/ml-nlp-model-ref.html#ml-nlp-model-ref-text-similarity[the Elastic NLP model reference] for a list of third party text similarity models supported by {es} for semantic re-ranking. + +. *Create a `rerank` endpoint using the <>*. The Inference API creates an inference endpoint and configures your chosen machine learning model to perform the re-ranking task. . *Define a `text_similarity_reranker` retriever in your search request*. The retriever syntax makes it simple to configure both the retrieval and re-ranking of search results in a single API call. @@ -117,7 +119,7 @@ POST _search } }, "field": "text", - "inference_id": "my-cohere-rerank-model", + "inference_id": "elastic-rerank", "inference_text": "How often does the moon hide the sun?", "rank_window_size": 100, "min_score": 0.5 diff --git a/docs/reference/search/retriever.asciidoc b/docs/reference/search/retriever.asciidoc index 86a81f1d155d2..b90b7e312c790 100644 --- a/docs/reference/search/retriever.asciidoc +++ b/docs/reference/search/retriever.asciidoc @@ -11,6 +11,7 @@ This allows for complex behavior to be depicted in a tree-like structure, called [TIP] ==== Refer to <> for a high level overview of the retrievers abstraction. +Refer to <> for additional examples. ==== The following retrievers are available: @@ -382,16 +383,17 @@ Refer to <> for a high level overview of semantic re-ranking ===== Prerequisites -To use `text_similarity_reranker` you must first set up a `rerank` task using the <>. -The `rerank` task should be set up with a machine learning model that can compute text similarity. +To use `text_similarity_reranker` you must first set up an inference endpoint for the `rerank` task using the <>. +The endpoint should be set up with a machine learning model that can compute text similarity. Refer to {ml-docs}/ml-nlp-model-ref.html#ml-nlp-model-ref-text-similarity[the Elastic NLP model reference] for a list of third-party text similarity models supported by {es}. -Currently you can: +You have the following options: -* Integrate directly with the <> using the `rerank` task type -* Integrate directly with the <> using the `rerank` task type +* Use the the built-in <> cross-encoder model via the inference API's {es} service. +* Use the <> with the `rerank` task type. +* Use the <> with the `rerank` task type. * Upload a model to {es} with {eland-docs}/machine-learning.html#ml-nlp-pytorch[Eland] using the `text_similarity` NLP task type. -** Then set up an <> with the `rerank` task type +** Then set up an <> with the `rerank` task type. ** Refer to the <> on this page for a step-by-step guide. ===== Parameters @@ -436,13 +438,70 @@ Note that score calculations vary depending on the model used. Applies the specified <> to the child <>. If the child retriever already specifies any filters, then this top-level filter is applied in conjuction with the filter defined in the child retriever. +[discrete] +[[text-similarity-reranker-retriever-example-elastic-rerank]] +==== Example: Elastic Rerank + +This examples demonstrates how to deploy the Elastic Rerank model and use it to re-rank search results using the `text_similarity_reranker` retriever. + +Follow these steps: + +. Create an inference endpoint for the `rerank` task using the <>. ++ +[source,console] +---- +PUT _inference/rerank/my-elastic-rerank +{ + "service": "elasticsearch", + "service_settings": { + "model_id": ".rerank-v1", + "num_threads": 1, + "adaptive_allocations": { <1> + "enabled": true, + "min_number_of_allocations": 1, + "max_number_of_allocations": 10 + } + } +} +---- +// TEST[skip:uses ML] +<1> {ml-docs}/ml-nlp-auto-scale.html#nlp-model-adaptive-allocations[Adaptive allocations] will be enabled with the minimum of 1 and the maximum of 10 allocations. ++ +. Define a `text_similarity_rerank` retriever: ++ +[source,console] +---- +POST _search +{ + "retriever": { + "text_similarity_reranker": { + "retriever": { + "standard": { + "query": { + "match": { + "text": "How often does the moon hide the sun?" + } + } + } + }, + "field": "text", + "inference_id": "my-elastic-rerank", + "inference_text": "How often does the moon hide the sun?", + "rank_window_size": 100, + "min_score": 0.5 + } + } +} +---- +// TEST[skip:uses ML] + [discrete] [[text-similarity-reranker-retriever-example-cohere]] ==== Example: Cohere Rerank This example enables out-of-the-box semantic search by re-ranking top documents using the Cohere Rerank API. This approach eliminates the need to generate and store embeddings for all indexed documents. -This requires a <> using the `rerank` task type. +This requires a <> that is set up for the `rerank` task type. [source,console] ---- @@ -680,6 +739,12 @@ GET movies/_search <1> The `rule` retriever is the outermost retriever, applying rules to the search results that were previously reranked using the `rrf` retriever. <2> The `rrf` retriever returns results from all of its sub-retrievers, and the output of the `rrf` retriever is used as input to the `rule` retriever. +[discrete] +[[retriever-common-parameters]] +=== Common usage guidelines + +[discrete] +[[retriever-size-pagination]] ==== Using `from` and `size` with a retriever tree The <> and <> @@ -688,12 +753,16 @@ parameters are provided globally as part of the general They are applied to all retrievers in a retriever tree, unless a specific retriever overrides the `size` parameter using a different parameter such as `rank_window_size`. Though, the final search hits are always limited to `size`. +[discrete] +[[retriever-aggregations]] ==== Using aggregations with a retriever tree <> are globally specified as part of a search request. The query used for an aggregation is the combination of all leaf retrievers as `should` clauses in a <>. +[discrete] +[[retriever-restrictions]] ==== Restrictions on search parameters when specifying a retriever When a retriever is specified as part of a search, the following elements are not allowed at the top-level. From 79d70686b3ba86dcab4694d46e5a81de74ba06f8 Mon Sep 17 00:00:00 2001 From: kosabogi <105062005+kosabogi@users.noreply.github.com> Date: Thu, 28 Nov 2024 09:26:16 +0100 Subject: [PATCH 295/386] Fixes typo (#117684) --- .../ml/trained-models/apis/get-trained-models-stats.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/ml/trained-models/apis/get-trained-models-stats.asciidoc b/docs/reference/ml/trained-models/apis/get-trained-models-stats.asciidoc index beff87e6ec6e6..b55f022a5d168 100644 --- a/docs/reference/ml/trained-models/apis/get-trained-models-stats.asciidoc +++ b/docs/reference/ml/trained-models/apis/get-trained-models-stats.asciidoc @@ -235,7 +235,7 @@ The reason for the current state. Usually only populated when the `routing_state (string) The current routing state. -- -* `starting`: The model is attempting to allocate on this model, inference calls are not yet accepted. +* `starting`: The model is attempting to allocate on this node, inference calls are not yet accepted. * `started`: The model is allocated and ready to accept inference requests. * `stopping`: The model is being deallocated from this node. * `stopped`: The model is fully deallocated from this node. From dc7ea9eff9a5897fabc2fb9dd3bb291eee77ca11 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Thu, 28 Nov 2024 09:40:38 +0100 Subject: [PATCH 296/386] ESQL: Fix LookupJoin output (#117639) * Fix output methods related to LookupJoin * Add tests with subsequent EVAL * Fix BinaryPlan.computeReferences This must not just use the references from its own output. Not only is this wrong, it also leads to failures when we call the .references() method on unresolved plans. --- .../xpack/esql/ccq/MultiClusterSpecIT.java | 4 +- .../src/main/resources/lookup-join.csv-spec | 67 +++++++++++++++---- .../xpack/esql/action/EsqlCapabilities.java | 2 +- .../xpack/esql/analysis/Analyzer.java | 15 ++--- .../xpack/esql/plan/QueryPlan.java | 5 ++ .../xpack/esql/plan/logical/BinaryPlan.java | 7 -- .../xpack/esql/plan/logical/join/Join.java | 48 ++++--------- .../esql/plan/logical/join/LookupJoin.java | 43 +++--------- .../xpack/esql/session/EsqlSession.java | 4 -- .../elasticsearch/xpack/esql/CsvTests.java | 2 +- .../xpack/esql/analysis/AnalyzerTests.java | 5 +- 11 files changed, 91 insertions(+), 111 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index 5df85d1004dd1..8f4522573f880 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -47,7 +47,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS_V2; -import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V2; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_PLANNING_V1; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.METADATA_FIELDS_REMOTE_TEST; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.SYNC; @@ -125,7 +125,7 @@ protected void shouldSkipTest(String testName) throws IOException { assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS_V2.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_PLANNING_V1.capabilityName())); - assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP.capabilityName())); + assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V2.capabilityName())); } private TestFeatureService remoteFeaturesService() throws IOException { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec index 605bf78c20a32..11786fb905c60 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -3,22 +3,22 @@ // Reuses the sample dataset and commands from enrich.csv-spec // -basicOnTheDataNode -required_capability: join_lookup +//TODO: this sometimes returns null instead of the looked up value (likely related to the execution order) +basicOnTheDataNode-Ignore +required_capability: join_lookup_v2 -//TODO: this returns different results in CI then locally -// sometimes null, sometimes spanish (likely related to the execution order) FROM employees | EVAL language_code = languages | LOOKUP JOIN languages_lookup ON language_code -| WHERE emp_no < 500 -| KEEP emp_no, language_name +| WHERE emp_no >= 10091 AND emp_no < 10094 | SORT emp_no -| LIMIT 1 +| KEEP emp_no, language_code, language_name ; -emp_no:integer | language_name:keyword -//10091 | Spanish +emp_no:integer | language_code:integer | language_name:keyword +10091 | 3 | Spanish +10092 | 1 | English +10093 | 3 | Spanish ; basicRow-Ignore @@ -33,16 +33,55 @@ language_code:keyword | language_name:keyword ; basicOnTheCoordinator -required_capability: join_lookup +required_capability: join_lookup_v2 + +FROM employees +| SORT emp_no +| LIMIT 3 +| EVAL language_code = languages +| LOOKUP JOIN languages_lookup ON language_code +| KEEP emp_no, language_code, language_name +; + +emp_no:integer | language_code:integer | language_name:keyword +10001 | 2 | French +10002 | 5 | null +10003 | 4 | German +; + +//TODO: this sometimes returns null instead of the looked up value (likely related to the execution order) +subsequentEvalOnTheDataNode-Ignore +required_capability: join_lookup_v2 + +FROM employees +| EVAL language_code = languages +| LOOKUP JOIN languages_lookup ON language_code +| WHERE emp_no >= 10091 AND emp_no < 10094 +| SORT emp_no +| KEEP emp_no, language_code, language_name +| EVAL language_name = TO_LOWER(language_name), language_code_x2 = 2*language_code +; + +emp_no:integer | language_code:integer | language_name:keyword | language_code_x2:integer +10091 | 3 | spanish | 6 +10092 | 1 | english | 2 +10093 | 3 | spanish | 6 +; + +subsequentEvalOnTheCoordinator +required_capability: join_lookup_v2 FROM employees | SORT emp_no -| LIMIT 1 +| LIMIT 3 | EVAL language_code = languages | LOOKUP JOIN languages_lookup ON language_code -| KEEP emp_no, language_name +| KEEP emp_no, language_code, language_name +| EVAL language_name = TO_LOWER(language_name), language_code_x2 = 2*language_code ; -emp_no:integer | language_name:keyword -10001 | French +emp_no:integer | language_code:integer | language_name:keyword | language_code_x2:integer +10001 | 2 | french | 4 +10002 | 5 | null | 10 +10003 | 4 | german | 8 ; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 58748781d1778..d8004f73f613f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -524,7 +524,7 @@ public enum Cap { /** * LOOKUP JOIN */ - JOIN_LOOKUP(Build.current().isSnapshot()), + JOIN_LOOKUP_V2(Build.current().isSnapshot()), /** * Fix for https://github.com/elastic/elasticsearch/issues/117054 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index dde7bc09ac615..b847508d2b161 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -21,7 +21,6 @@ import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.EmptyAttribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; @@ -609,8 +608,7 @@ private Join resolveLookupJoin(LookupJoin join) { JoinConfig config = join.config(); // for now, support only (LEFT) USING clauses JoinType type = config.type(); - // rewrite the join into a equi-join between the field with the same name between left and right - // per SQL standard, the USING columns are placed first in the output, followed by the rest of left, then right + // rewrite the join into an equi-join between the field with the same name between left and right if (type instanceof UsingJoinType using) { List cols = using.columns(); // the lookup cannot be resolved, bail out @@ -632,14 +630,9 @@ private Join resolveLookupJoin(LookupJoin join) { // resolve the using columns against the left and the right side then assemble the new join config List leftKeys = resolveUsingColumns(cols, join.left().output(), "left"); List rightKeys = resolveUsingColumns(cols, join.right().output(), "right"); - List output = new ArrayList<>(join.left().output()); - // the order is stable (since the AttributeSet preservers the insertion order) - output.addAll(join.right().outputSet().subtract(new AttributeSet(rightKeys))); - - // update the config - pick the left keys as those in the output - type = new UsingJoinType(coreJoin, rightKeys); - config = new JoinConfig(type, leftKeys, leftKeys, rightKeys); - join = new LookupJoin(join.source(), join.left(), join.right(), config, output); + + config = new JoinConfig(coreJoin, leftKeys, leftKeys, rightKeys); + join = new LookupJoin(join.source(), join.left(), join.right(), config); } // everything else is unsupported for now else { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/QueryPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/QueryPlan.java index ef8c3983faf2e..02373cc62e81f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/QueryPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/QueryPlan.java @@ -33,6 +33,10 @@ public QueryPlan(Source source, List children) { super(source, children); } + /** + * The ordered list of attributes (i.e. columns) this plan produces when executed. + * Must be called only on resolved plans, otherwise may throw an exception or return wrong results. + */ public abstract List output(); public AttributeSet outputSet() { @@ -87,6 +91,7 @@ public AttributeSet references() { /** * This very likely needs to be overridden for {@link QueryPlan#references} to be correct when inheriting. + * This can be called on unresolved plans and therefore must not rely on calls to {@link QueryPlan#output()}. */ protected AttributeSet computeReferences() { return Expressions.references(expressions()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/BinaryPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/BinaryPlan.java index e65cdda4b6069..91cd7f7a15840 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/BinaryPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/BinaryPlan.java @@ -6,8 +6,6 @@ */ package org.elasticsearch.xpack.esql.plan.logical; -import org.elasticsearch.xpack.esql.core.expression.AttributeSet; -import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.tree.Source; import java.util.Arrays; @@ -45,11 +43,6 @@ public final BinaryPlan replaceRight(LogicalPlan newRight) { return replaceChildren(left, newRight); } - protected AttributeSet computeReferences() { - // TODO: this needs to be driven by the join config - return Expressions.references(output()); - } - public abstract BinaryPlan replaceChildren(LogicalPlan left, LogicalPlan right); @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java index 0e182646d914a..dd6b3ea3455f7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java @@ -10,9 +10,8 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.Maps; import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.Nullability; +import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -23,9 +22,11 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import java.util.Map; import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; +import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; import static org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.LEFT; import static org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.RIGHT; @@ -107,37 +108,24 @@ public static List computeOutput(List leftOutput, List output; // TODO: make the other side nullable + Set matchFieldNames = config.matchFields().stream().map(NamedExpression::name).collect(Collectors.toSet()); if (LEFT.equals(joinType)) { - // right side becomes nullable and overrides left - // output = merge(leftOutput, makeNullable(rightOutput)); - output = merge(leftOutput, rightOutput); + // right side becomes nullable and overrides left except for match fields, which we preserve from the left + List rightOutputWithoutMatchFields = rightOutput.stream() + .filter(attr -> matchFieldNames.contains(attr.name()) == false) + .toList(); + output = mergeOutputAttributes(rightOutputWithoutMatchFields, leftOutput); } else if (RIGHT.equals(joinType)) { - // left side becomes nullable and overrides right - // output = merge(makeNullable(leftOutput), rightOutput); - output = merge(leftOutput, rightOutput); + List leftOutputWithoutMatchFields = leftOutput.stream() + .filter(attr -> matchFieldNames.contains(attr.name()) == false) + .toList(); + output = mergeOutputAttributes(leftOutputWithoutMatchFields, rightOutput); } else { throw new IllegalArgumentException(joinType.joinName() + " unsupported"); } return output; } - /** - * Merge the two lists of attributes into one and preserves order. - */ - private static List merge(List left, List right) { - // use linked hash map to preserve order - Map nameToAttribute = Maps.newLinkedHashMapWithExpectedSize(left.size() + right.size()); - for (Attribute a : left) { - nameToAttribute.put(a.name(), a); - } - for (Attribute a : right) { - // override the existing entry in place - nameToAttribute.compute(a.name(), (name, existing) -> a); - } - - return new ArrayList<>(nameToAttribute.values()); - } - /** * Make fields references, so we don't check if they exist in the index. * We do this for fields that we know don't come from the index. @@ -161,14 +149,6 @@ public static List makeReference(List output) { return out; } - private static List makeNullable(List output) { - List out = new ArrayList<>(output.size()); - for (Attribute a : output) { - out.add(a.withNullability(Nullability.TRUE)); - } - return out; - } - @Override public boolean expressionsResolved() { return config.expressionsResolved(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java index 2ee9213f45b36..57c8cb00baa32 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.UsingJoinType; import java.util.List; -import java.util.Objects; import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.LEFT; @@ -26,10 +25,8 @@ */ public class LookupJoin extends Join implements SurrogateLogicalPlan { - private final List output; - public LookupJoin(Source source, LogicalPlan left, LogicalPlan right, List joinFields) { - this(source, left, right, new UsingJoinType(LEFT, joinFields), emptyList(), emptyList(), emptyList(), emptyList()); + this(source, left, right, new UsingJoinType(LEFT, joinFields), emptyList(), emptyList(), emptyList()); } public LookupJoin( @@ -39,15 +36,13 @@ public LookupJoin( JoinType type, List joinFields, List leftFields, - List rightFields, - List output + List rightFields ) { - this(source, left, right, new JoinConfig(type, joinFields, leftFields, rightFields), output); + this(source, left, right, new JoinConfig(type, joinFields, leftFields, rightFields)); } - public LookupJoin(Source source, LogicalPlan left, LogicalPlan right, JoinConfig joinConfig, List output) { + public LookupJoin(Source source, LogicalPlan left, LogicalPlan right, JoinConfig joinConfig) { super(source, left, right, joinConfig); - this.output = output; } /** @@ -55,20 +50,14 @@ public LookupJoin(Source source, LogicalPlan left, LogicalPlan right, JoinConfig */ @Override public LogicalPlan surrogate() { - JoinConfig cfg = config(); - JoinConfig newConfig = new JoinConfig(LEFT, cfg.matchFields(), cfg.leftFields(), cfg.rightFields()); - Join normalized = new Join(source(), left(), right(), newConfig); + Join normalized = new Join(source(), left(), right(), config()); // TODO: decide whether to introduce USING or just basic ON semantics - keep the ordering out for now - return new Project(source(), normalized, output); - } - - public List output() { - return output; + return new Project(source(), normalized, output()); } @Override public Join replaceChildren(LogicalPlan left, LogicalPlan right) { - return new LookupJoin(source(), left, right, config(), output); + return new LookupJoin(source(), left, right, config()); } @Override @@ -81,23 +70,7 @@ protected NodeInfo info() { config().type(), config().matchFields(), config().leftFields(), - config().rightFields(), - output + config().rightFields() ); } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), output); - } - - @Override - public boolean equals(Object obj) { - if (super.equals(obj) == false) { - return false; - } - - LookupJoin other = (LookupJoin) obj; - return Objects.equals(output, other.output); - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 021596c31f65d..3b0f9ab578df9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -79,7 +79,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.function.Predicate; import java.util.stream.Collectors; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; @@ -466,8 +465,6 @@ static Set fieldNames(LogicalPlan parsed, Set enrichPolicyMatchF // ie "from test | eval lang = languages + 1 | keep *l" should consider both "languages" and "*l" as valid fields to ask for AttributeSet keepCommandReferences = new AttributeSet(); AttributeSet keepJoinReferences = new AttributeSet(); - List> keepMatches = new ArrayList<>(); - List keepPatterns = new ArrayList<>(); parsed.forEachDown(p -> {// go over each plan top-down if (p instanceof RegexExtract re) { // for Grok and Dissect @@ -501,7 +498,6 @@ static Set fieldNames(LogicalPlan parsed, Set enrichPolicyMatchF references.add(ua); if (p instanceof Keep) { keepCommandReferences.add(ua); - keepMatches.add(up::match); } }); if (p instanceof Keep) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index c745801bf505f..6763988eac638 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -263,7 +263,7 @@ public final void test() throws Throwable { ); assumeFalse( "lookup join disabled for csv tests", - testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP.capabilityName()) + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP_V2.capabilityName()) ); if (Build.current().isSnapshot()) { assertThat( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 2770ed1f336ae..e0ebc92afa95d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1945,9 +1945,10 @@ public void testLookup() { .item(startsWith("job{f}")) .item(startsWith("job.raw{f}")) /* - * Int key is returned as a full field (despite the rename) + * Int is a reference here because we renamed it in project. + * If we hadn't it'd be a field and that'd be fine. */ - .item(containsString("int{f}")) + .item(containsString("int{r}")) .item(startsWith("last_name{f}")) .item(startsWith("long_noidx{f}")) .item(startsWith("salary{f}")) From 11ffe8831793a5cad91b5bb5fb63e2365286451a Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 28 Nov 2024 09:54:42 +0100 Subject: [PATCH 297/386] Speedup HealthNodeTaskExecutor CS listener (#113436) This method was quite slow in tests because there's an expensive assertion in `ClusterApplierService.state()` that we run when calling `ClusterService.localNode()` --- .../selection/HealthNodeTaskExecutor.java | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java b/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java index 3efad1aee26b0..5991bc248ba76 100644 --- a/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/health/node/selection/HealthNodeTaskExecutor.java @@ -182,8 +182,8 @@ void startTask(ClusterChangedEvent event) { // visible for testing void shuttingDown(ClusterChangedEvent event) { - DiscoveryNode node = clusterService.localNode(); - if (isNodeShuttingDown(event, node.getId())) { + if (isNodeShuttingDown(event)) { + var node = event.state().getNodes().getLocalNode(); abortTaskIfApplicable("node [{" + node.getName() + "}{" + node.getId() + "}] shutting down"); } } @@ -198,9 +198,18 @@ void abortTaskIfApplicable(String reason) { } } - private static boolean isNodeShuttingDown(ClusterChangedEvent event, String nodeId) { - return event.previousState().metadata().nodeShutdowns().contains(nodeId) == false - && event.state().metadata().nodeShutdowns().contains(nodeId); + private static boolean isNodeShuttingDown(ClusterChangedEvent event) { + if (event.metadataChanged() == false) { + return false; + } + var shutdownsOld = event.previousState().metadata().nodeShutdowns(); + var shutdownsNew = event.state().metadata().nodeShutdowns(); + if (shutdownsNew == shutdownsOld) { + return false; + } + String nodeId = event.state().nodes().getLocalNodeId(); + return shutdownsOld.contains(nodeId) == false && shutdownsNew.contains(nodeId); + } public static List getNamedXContentParsers() { From d4bcd979a5b9196f23b00d97cb17aad1679818c8 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 28 Nov 2024 10:05:26 +0100 Subject: [PATCH 298/386] Update synthetic source legacy license cutoff date. (#117658) Update default cutoff date from 12-12-2024T00:00 UTC to 01-02-2025T00:00 UTC. --- .../xpack/logsdb/SyntheticSourceLicenseService.java | 2 +- .../SyntheticSourceIndexSettingsProviderLegacyLicenseTests.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java index 71de2f7909835..26a672fb1c903 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceLicenseService.java @@ -29,7 +29,7 @@ final class SyntheticSourceLicenseService { // You can only override this property if you received explicit approval from Elastic. static final String CUTOFF_DATE_SYS_PROP_NAME = "es.mapping.synthetic_source_fallback_to_stored_source.cutoff_date_restricted_override"; private static final Logger LOGGER = LogManager.getLogger(SyntheticSourceLicenseService.class); - static final long DEFAULT_CUTOFF_DATE = LocalDateTime.of(2024, 12, 12, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); + static final long DEFAULT_CUTOFF_DATE = LocalDateTime.of(2025, 2, 1, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); /** * A setting that determines whether source mode should always be stored source. Regardless of licence. diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderLegacyLicenseTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderLegacyLicenseTests.java index 939d7d892a48d..eda0d87868745 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderLegacyLicenseTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderLegacyLicenseTests.java @@ -98,7 +98,7 @@ public void testGetAdditionalIndexSettingsTsdb() throws IOException { } public void testGetAdditionalIndexSettingsTsdbAfterCutoffDate() throws Exception { - long start = LocalDateTime.of(2024, 12, 20, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); + long start = LocalDateTime.of(2025, 2, 2, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); License license = createGoldOrPlatinumLicense(start); long time = LocalDateTime.of(2024, 12, 31, 0, 0).toInstant(ZoneOffset.UTC).toEpochMilli(); var licenseState = new XPackLicenseState(() -> time, new XPackLicenseStatus(license.operationMode(), true, null)); From 5d686973084e926a2dbec96a311a6684807f5406 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Thu, 28 Nov 2024 09:36:59 +0000 Subject: [PATCH 299/386] [ML] Delete accidental changelog for a non issue (#117636) --- docs/changelog/117235.yaml | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 docs/changelog/117235.yaml diff --git a/docs/changelog/117235.yaml b/docs/changelog/117235.yaml deleted file mode 100644 index dbf0b4cc18388..0000000000000 --- a/docs/changelog/117235.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117235 -summary: "Deprecate `ChunkingOptions` parameter" -area: ES|QL -type: enhancement -issues: [] From 6a4b68d263fe3533fc44e90d779537b48ffaf5f6 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 28 Nov 2024 10:53:39 +0100 Subject: [PATCH 300/386] Add source mode stats to MappingStats (#117463) --- docs/reference/cluster/stats.asciidoc | 5 +- .../test/cluster.stats/40_source_modes.yml | 50 ++++++++++ server/src/main/java/module-info.java | 3 +- .../org/elasticsearch/TransportVersions.java | 3 + .../cluster/stats/ClusterStatsFeatures.java | 26 ++++++ .../admin/cluster/stats/MappingStats.java | 55 ++++++++++- ...lasticsearch.features.FeatureSpecification | 1 + .../cluster/stats/MappingStatsTests.java | 92 ++++++++++++++++++- .../ClusterStatsMonitoringDocTests.java | 3 +- 9 files changed, 226 insertions(+), 12 deletions(-) create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/40_source_modes.yml create mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsFeatures.java diff --git a/docs/reference/cluster/stats.asciidoc b/docs/reference/cluster/stats.asciidoc index bd818a538f78b..d875417bde51a 100644 --- a/docs/reference/cluster/stats.asciidoc +++ b/docs/reference/cluster/stats.asciidoc @@ -1644,7 +1644,10 @@ The API returns the following response: "total_deduplicated_mapping_size": "0b", "total_deduplicated_mapping_size_in_bytes": 0, "field_types": [], - "runtime_field_types": [] + "runtime_field_types": [], + "source_modes" : { + "stored": 0 + } }, "analysis": { "char_filter_types": [], diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/40_source_modes.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/40_source_modes.yml new file mode 100644 index 0000000000000..64bbad7fb1c6d --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/40_source_modes.yml @@ -0,0 +1,50 @@ +--- +test source modes: + - requires: + cluster_features: ["cluster.stats.source_modes"] + reason: requires source modes features + + - do: + indices.create: + index: test-synthetic + body: + settings: + index: + mapping: + source.mode: synthetic + + - do: + indices.create: + index: test-stored + + - do: + indices.create: + index: test-disabled + body: + settings: + index: + mapping: + source.mode: disabled + + - do: + bulk: + refresh: true + body: + - '{ "create": { "_index": "test-synthetic" } }' + - '{ "name": "aaaa", "some_string": "AaAa", "some_int": 1000, "some_double": 123.456789, "some_bool": true }' + - '{ "create": { "_index": "test-stored" } }' + - '{ "name": "bbbb", "some_string": "BbBb", "some_int": 2000, "some_double": 321.987654, "some_bool": false }' + - '{ "create": { "_index": "test-disabled" } }' + - '{ "name": "cccc", "some_string": "CcCc", "some_int": 3000, "some_double": 421.484654, "some_bool": false }' + + - do: + search: + index: test-* + - match: { hits.total.value: 3 } + + - do: + cluster.stats: { } + + - match: { indices.mappings.source_modes.disabled: 1 } + - match: { indices.mappings.source_modes.stored: 1 } + - match: { indices.mappings.source_modes.synthetic: 1 } diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 35d1a44624b0f..63dbac3a72487 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -433,7 +433,8 @@ org.elasticsearch.search.SearchFeatures, org.elasticsearch.script.ScriptFeatures, org.elasticsearch.search.retriever.RetrieversFeatures, - org.elasticsearch.reservedstate.service.FileSettingsFeatures; + org.elasticsearch.reservedstate.service.FileSettingsFeatures, + org.elasticsearch.action.admin.cluster.stats.ClusterStatsFeatures; uses org.elasticsearch.plugins.internal.SettingsExtension; uses RestExtension; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index dda7d7e5d4c4c..a1315ccf66701 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -205,10 +205,13 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_ENRICH_RUNTIME_WARNINGS = def(8_796_00_0); public static final TransportVersion INGEST_PIPELINE_CONFIGURATION_AS_MAP = def(8_797_00_0); public static final TransportVersion LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE_FIX_8_17 = def(8_797_00_1); + public static final TransportVersion SOURCE_MODE_TELEMETRY_FIX_8_17 = def(8_797_00_2); public static final TransportVersion INDEXING_PRESSURE_THROTTLING_STATS = def(8_798_00_0); public static final TransportVersion REINDEX_DATA_STREAMS = def(8_799_00_0); public static final TransportVersion ESQL_REMOVE_NODE_LEVEL_PLAN = def(8_800_00_0); public static final TransportVersion LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE = def(8_801_00_0); + public static final TransportVersion SOURCE_MODE_TELEMETRY = def(8_802_00_0); + /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsFeatures.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsFeatures.java new file mode 100644 index 0000000000000..6e85093a52cdd --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsFeatures.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.action.admin.cluster.stats; + +import org.elasticsearch.features.FeatureSpecification; +import org.elasticsearch.features.NodeFeature; + +import java.util.Set; + +/** + * Spec for cluster stats features. + */ +public class ClusterStatsFeatures implements FeatureSpecification { + + @Override + public Set getFeatures() { + return Set.of(MappingStats.SOURCE_MODES_FEATURE); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/MappingStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/MappingStats.java index d2e5973169919..1bc2e1d13c864 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/MappingStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/MappingStats.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.admin.cluster.stats; +import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MappingMetadata; @@ -19,6 +20,8 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.Nullable; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; @@ -31,6 +34,7 @@ import java.util.HashSet; import java.util.IdentityHashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.OptionalLong; @@ -44,6 +48,8 @@ */ public final class MappingStats implements ToXContentFragment, Writeable { + static final NodeFeature SOURCE_MODES_FEATURE = new NodeFeature("cluster.stats.source_modes"); + private static final Pattern DOC_PATTERN = Pattern.compile("doc[\\[.]"); private static final Pattern SOURCE_PATTERN = Pattern.compile("params\\._source"); @@ -53,6 +59,8 @@ public final class MappingStats implements ToXContentFragment, Writeable { public static MappingStats of(Metadata metadata, Runnable ensureNotCancelled) { Map fieldTypes = new HashMap<>(); Set concreteFieldNames = new HashSet<>(); + // Account different source modes based on index.mapping.source.mode setting: + Map sourceModeUsageCount = new HashMap<>(); Map runtimeFieldTypes = new HashMap<>(); final Map mappingCounts = new IdentityHashMap<>(metadata.getMappingsByHash().size()); for (IndexMetadata indexMetadata : metadata) { @@ -62,6 +70,9 @@ public static MappingStats of(Metadata metadata, Runnable ensureNotCancelled) { continue; } AnalysisStats.countMapping(mappingCounts, indexMetadata); + + var sourceMode = SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.get(indexMetadata.getSettings()); + sourceModeUsageCount.merge(sourceMode.toString().toLowerCase(Locale.ENGLISH), 1, Integer::sum); } final AtomicLong totalFieldCount = new AtomicLong(); final AtomicLong totalDeduplicatedFieldCount = new AtomicLong(); @@ -175,12 +186,14 @@ public static MappingStats of(Metadata metadata, Runnable ensureNotCancelled) { for (MappingMetadata mappingMetadata : metadata.getMappingsByHash().values()) { totalMappingSizeBytes += mappingMetadata.source().compressed().length; } + return new MappingStats( totalFieldCount.get(), totalDeduplicatedFieldCount.get(), totalMappingSizeBytes, fieldTypes.values(), - runtimeFieldTypes.values() + runtimeFieldTypes.values(), + sourceModeUsageCount ); } @@ -215,17 +228,20 @@ private static int countOccurrences(String script, Pattern pattern) { private final List fieldTypeStats; private final List runtimeFieldStats; + private final Map sourceModeUsageCount; MappingStats( long totalFieldCount, long totalDeduplicatedFieldCount, long totalMappingSizeBytes, Collection fieldTypeStats, - Collection runtimeFieldStats + Collection runtimeFieldStats, + Map sourceModeUsageCount ) { this.totalFieldCount = totalFieldCount; this.totalDeduplicatedFieldCount = totalDeduplicatedFieldCount; this.totalMappingSizeBytes = totalMappingSizeBytes; + this.sourceModeUsageCount = sourceModeUsageCount; List stats = new ArrayList<>(fieldTypeStats); stats.sort(Comparator.comparing(IndexFeatureStats::getName)); this.fieldTypeStats = Collections.unmodifiableList(stats); @@ -246,6 +262,10 @@ private static int countOccurrences(String script, Pattern pattern) { } fieldTypeStats = in.readCollectionAsImmutableList(FieldStats::new); runtimeFieldStats = in.readCollectionAsImmutableList(RuntimeFieldStats::new); + var transportVersion = in.getTransportVersion(); + sourceModeUsageCount = canReadOrWriteSourceModeTelemetry(transportVersion) + ? in.readImmutableMap(StreamInput::readString, StreamInput::readVInt) + : Map.of(); } @Override @@ -257,6 +277,15 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeCollection(fieldTypeStats); out.writeCollection(runtimeFieldStats); + var transportVersion = out.getTransportVersion(); + if (canReadOrWriteSourceModeTelemetry(transportVersion)) { + out.writeMap(sourceModeUsageCount, StreamOutput::writeVInt); + } + } + + private static boolean canReadOrWriteSourceModeTelemetry(TransportVersion version) { + return version.isPatchFrom(TransportVersions.SOURCE_MODE_TELEMETRY_FIX_8_17) + || version.onOrAfter(TransportVersions.SOURCE_MODE_TELEMETRY); } private static OptionalLong ofNullable(Long l) { @@ -300,6 +329,10 @@ public List getRuntimeFieldStats() { return runtimeFieldStats; } + public Map getSourceModeUsageCount() { + return sourceModeUsageCount; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject("mappings"); @@ -326,6 +359,12 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws st.toXContent(builder, params); } builder.endArray(); + builder.startObject("source_modes"); + var entries = sourceModeUsageCount.entrySet().stream().sorted(Map.Entry.comparingByKey()).toList(); + for (var entry : entries) { + builder.field(entry.getKey(), entry.getValue()); + } + builder.endObject(); builder.endObject(); return builder; } @@ -344,11 +383,19 @@ public boolean equals(Object o) { && Objects.equals(totalDeduplicatedFieldCount, that.totalDeduplicatedFieldCount) && Objects.equals(totalMappingSizeBytes, that.totalMappingSizeBytes) && fieldTypeStats.equals(that.fieldTypeStats) - && runtimeFieldStats.equals(that.runtimeFieldStats); + && runtimeFieldStats.equals(that.runtimeFieldStats) + && sourceModeUsageCount.equals(that.sourceModeUsageCount); } @Override public int hashCode() { - return Objects.hash(totalFieldCount, totalDeduplicatedFieldCount, totalMappingSizeBytes, fieldTypeStats, runtimeFieldStats); + return Objects.hash( + totalFieldCount, + totalDeduplicatedFieldCount, + totalMappingSizeBytes, + fieldTypeStats, + runtimeFieldStats, + sourceModeUsageCount + ); } } diff --git a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification index 3955fc87bf392..12965152f260c 100644 --- a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -23,3 +23,4 @@ org.elasticsearch.search.retriever.RetrieversFeatures org.elasticsearch.script.ScriptFeatures org.elasticsearch.reservedstate.service.FileSettingsFeatures org.elasticsearch.cluster.routing.RoutingFeatures +org.elasticsearch.action.admin.cluster.stats.ClusterStatsFeatures diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/MappingStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/MappingStatsTests.java index 2c374c7d26dee..96954458c18c4 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/MappingStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/MappingStatsTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.script.Script; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.test.AbstractWireSerializingTestCase; @@ -29,7 +30,15 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Locale; +import java.util.Map; + +import static org.elasticsearch.index.mapper.SourceFieldMapper.Mode.DISABLED; +import static org.elasticsearch.index.mapper.SourceFieldMapper.Mode.STORED; +import static org.elasticsearch.index.mapper.SourceFieldMapper.Mode.SYNTHETIC; +import static org.hamcrest.Matchers.equalTo; public class MappingStatsTests extends AbstractWireSerializingTestCase { @@ -203,7 +212,10 @@ public void testToXContent() { "doc_max" : 0, "doc_total" : 0 } - ] + ], + "source_modes" : { + "stored" : 2 + } } }""", Strings.toString(mappingStats, true, true)); } @@ -332,7 +344,10 @@ public void testToXContentWithSomeSharedMappings() { "doc_max" : 0, "doc_total" : 0 } - ] + ], + "source_modes" : { + "stored" : 3 + } } }""", Strings.toString(mappingStats, true, true)); } @@ -362,7 +377,24 @@ protected MappingStats createTestInstance() { if (randomBoolean()) { runtimeFieldStats.add(randomRuntimeFieldStats("long")); } - return new MappingStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), stats, runtimeFieldStats); + Map sourceModeUsageCount = randomBoolean() + ? Map.of() + : Map.of( + STORED.toString().toLowerCase(Locale.ENGLISH), + randomNonNegativeInt(), + SYNTHETIC.toString().toLowerCase(Locale.ENGLISH), + randomNonNegativeInt(), + DISABLED.toString().toLowerCase(Locale.ENGLISH), + randomNonNegativeInt() + ); + return new MappingStats( + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + stats, + runtimeFieldStats, + sourceModeUsageCount + ); } private static FieldStats randomFieldStats(String type) { @@ -410,7 +442,8 @@ protected MappingStats mutateInstance(MappingStats instance) { long totalFieldCount = instance.getTotalFieldCount().getAsLong(); long totalDeduplicatedFieldCount = instance.getTotalDeduplicatedFieldCount().getAsLong(); long totalMappingSizeBytes = instance.getTotalMappingSizeBytes().getAsLong(); - switch (between(1, 5)) { + var sourceModeUsageCount = new HashMap<>(instance.getSourceModeUsageCount()); + switch (between(1, 6)) { case 1 -> { boolean remove = fieldTypes.size() > 0 && randomBoolean(); if (remove) { @@ -435,8 +468,22 @@ protected MappingStats mutateInstance(MappingStats instance) { case 3 -> totalFieldCount = randomValueOtherThan(totalFieldCount, ESTestCase::randomNonNegativeLong); case 4 -> totalDeduplicatedFieldCount = randomValueOtherThan(totalDeduplicatedFieldCount, ESTestCase::randomNonNegativeLong); case 5 -> totalMappingSizeBytes = randomValueOtherThan(totalMappingSizeBytes, ESTestCase::randomNonNegativeLong); + case 6 -> { + if (sourceModeUsageCount.isEmpty() == false) { + sourceModeUsageCount.remove(sourceModeUsageCount.keySet().stream().findFirst().get()); + } else { + sourceModeUsageCount.put("stored", randomNonNegativeInt()); + } + } } - return new MappingStats(totalFieldCount, totalDeduplicatedFieldCount, totalMappingSizeBytes, fieldTypes, runtimeFieldTypes); + return new MappingStats( + totalFieldCount, + totalDeduplicatedFieldCount, + totalMappingSizeBytes, + fieldTypes, + runtimeFieldTypes, + sourceModeUsageCount + ); } public void testDenseVectorType() { @@ -531,4 +578,39 @@ public void testWriteTo() throws IOException { assertEquals(instance.getFieldTypeStats(), deserialized.getFieldTypeStats()); assertEquals(instance.getRuntimeFieldStats(), deserialized.getRuntimeFieldStats()); } + + public void testSourceModes() { + var builder = Metadata.builder(); + int numStoredIndices = randomIntBetween(1, 5); + int numSyntheticIndices = randomIntBetween(1, 5); + int numDisabledIndices = randomIntBetween(1, 5); + for (int i = 0; i < numSyntheticIndices; i++) { + IndexMetadata.Builder indexMetadata = new IndexMetadata.Builder("foo-synthetic-" + i).settings( + indexSettings(IndexVersion.current(), 4, 1).put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "synthetic") + ); + builder.put(indexMetadata); + } + for (int i = 0; i < numStoredIndices; i++) { + IndexMetadata.Builder indexMetadata; + if (randomBoolean()) { + indexMetadata = new IndexMetadata.Builder("foo-stored-" + i).settings( + indexSettings(IndexVersion.current(), 4, 1).put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "stored") + ); + } else { + indexMetadata = new IndexMetadata.Builder("foo-stored-" + i).settings(indexSettings(IndexVersion.current(), 4, 1)); + } + builder.put(indexMetadata); + } + for (int i = 0; i < numDisabledIndices; i++) { + IndexMetadata.Builder indexMetadata = new IndexMetadata.Builder("foo-disabled-" + i).settings( + indexSettings(IndexVersion.current(), 4, 1).put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "disabled") + ); + builder.put(indexMetadata); + } + var mappingStats = MappingStats.of(builder.build(), () -> {}); + assertThat(mappingStats.getSourceModeUsageCount().get("synthetic"), equalTo(numSyntheticIndices)); + assertThat(mappingStats.getSourceModeUsageCount().get("stored"), equalTo(numStoredIndices)); + assertThat(mappingStats.getSourceModeUsageCount().get("disabled"), equalTo(numDisabledIndices)); + } + } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java index 9458442557694..f4d50df4ff613 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java @@ -572,7 +572,8 @@ public void testToXContent() throws IOException { "total_deduplicated_field_count": 0, "total_deduplicated_mapping_size_in_bytes": 0, "field_types": [], - "runtime_field_types": [] + "runtime_field_types": [], + "source_modes": {} }, "analysis": { "char_filter_types": [], From 64dfed4e1f0610014f01fc7285fccac831a62c74 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Thu, 28 Nov 2024 11:01:52 +0100 Subject: [PATCH 301/386] ESQL: Mute CATEGORIZE optimizer tests on release builds (#117690) --- .../xpack/esql/optimizer/LogicalPlanOptimizerTests.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 2b4fb6ad68972..8373528531902 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils; @@ -1211,6 +1212,8 @@ public void testCombineProjectionWithAggregationFirstAndAliasedGroupingUsedInAgg * \_EsRelation[test][_meta_field{f}#23, emp_no{f}#17, first_name{f}#18, ..] */ public void testCombineProjectionWithCategorizeGrouping() { + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V2.isEnabled()); + var plan = plan(""" from test | eval k = first_name, k1 = k @@ -3946,6 +3949,8 @@ public void testNestedExpressionsInGroups() { * \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] */ public void testNestedExpressionsInGroupsWithCategorize() { + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V2.isEnabled()); + var plan = optimizedPlan(""" from test | stats c = count(salary) by CATEGORIZE(CONCAT(first_name, "abc")) From 146cb39143f93b6ce453229abf5be08335a75366 Mon Sep 17 00:00:00 2001 From: Tommaso Teofili Date: Thu, 28 Nov 2024 13:46:24 +0100 Subject: [PATCH 302/386] ESQL - enabling scoring with METADATA _score (#113120) * ESQL - enabling scoring with METADATA _score Co-authored-by: ChrisHegarty --- docs/changelog/113120.yaml | 5 + muted-tests.yml | 6 + .../search/sort/SortBuilder.java | 15 +- .../core/expression/MetadataAttribute.java | 5 +- .../compute/lucene/LuceneOperator.java | 5 +- .../compute/lucene/LuceneSourceOperator.java | 96 ++++-- .../lucene/LuceneTopNSourceOperator.java | 141 +++++++-- .../elasticsearch/compute/OperatorTests.java | 3 +- .../LuceneQueryExpressionEvaluatorTests.java | 33 +- .../lucene/LuceneSourceOperatorTests.java | 31 +- .../LuceneTopNSourceOperatorScoringTests.java | 151 +++++++++ .../lucene/LuceneTopNSourceOperatorTests.java | 50 ++- .../ValueSourceReaderTypeConversionTests.java | 9 +- .../ValuesSourceReaderOperatorTests.java | 9 +- .../src/main/resources/qstr-function.csv-spec | 1 - .../src/main/resources/scoring.csv-spec | 285 +++++++++++++++++ .../xpack/esql/action/EsqlActionTaskIT.java | 7 +- .../xpack/esql/action/LookupFromIndexIT.java | 3 +- .../xpack/esql/plugin/MatchFunctionIT.java | 299 ++++++++++++++++++ .../xpack/esql/plugin/MatchOperatorIT.java | 51 +++ .../xpack/esql/plugin/QueryStringIT.java | 96 ++++++ .../xpack/esql/action/EsqlCapabilities.java | 7 +- .../xpack/esql/analysis/Verifier.java | 9 + .../local/LucenePushdownPredicates.java | 5 + .../physical/local/PushTopNToSource.java | 18 +- .../local/ReplaceSourceAttributes.java | 14 +- .../xpack/esql/parser/LogicalPlanBuilder.java | 4 +- .../xpack/esql/plan/physical/EsQueryExec.java | 14 + .../planner/EsPhysicalOperationProviders.java | 14 +- .../xpack/esql/analysis/VerifierTests.java | 25 ++ .../optimizer/PhysicalPlanOptimizerTests.java | 62 ++++ .../physical/local/PushTopNToSourceTests.java | 193 ++++++++++- 32 files changed, 1570 insertions(+), 96 deletions(-) create mode 100644 docs/changelog/113120.yaml create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/scoring.csv-spec create mode 100644 x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java diff --git a/docs/changelog/113120.yaml b/docs/changelog/113120.yaml new file mode 100644 index 0000000000000..801167d61c19c --- /dev/null +++ b/docs/changelog/113120.yaml @@ -0,0 +1,5 @@ +pr: 113120 +summary: ESQL - enabling scoring with METADATA `_score` +area: ES|QL +type: enhancement +issues: [] diff --git a/muted-tests.yml b/muted-tests.yml index 5cf16fdf3da0a..fdadc747289bb 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -224,6 +224,12 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/117591 - class: org.elasticsearch.repositories.s3.RepositoryS3ClientYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/117596 +- class: "org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT" + method: "test {scoring.*}" + issue: https://github.com/elastic/elasticsearch/issues/117641 +- class: "org.elasticsearch.xpack.esql.qa.single_node.EsqlSpecIT" + method: "test {scoring.*}" + issue: https://github.com/elastic/elasticsearch/issues/117641 # Examples: # diff --git a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java index 0ac3b42dd5b10..5832b93b9462f 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java @@ -158,6 +158,11 @@ private static void parseCompoundSortField(XContentParser parser, List buildSort(List> sortBuilders, SearchExecutionContext context) throws IOException { + return buildSort(sortBuilders, context, true); + } + + public static Optional buildSort(List> sortBuilders, SearchExecutionContext context, boolean optimize) + throws IOException { List sortFields = new ArrayList<>(sortBuilders.size()); List sortFormats = new ArrayList<>(sortBuilders.size()); for (SortBuilder builder : sortBuilders) { @@ -172,9 +177,13 @@ public static Optional buildSort(List> sortBuilde if (sortFields.size() > 1) { sort = true; } else { - SortField sortField = sortFields.get(0); - if (sortField.getType() == SortField.Type.SCORE && sortField.getReverse() == false) { - sort = false; + if (optimize) { + SortField sortField = sortFields.get(0); + if (sortField.getType() == SortField.Type.SCORE && sortField.getReverse() == false) { + sort = false; + } else { + sort = true; + } } else { sort = true; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java index 6e4e9292bfc99..0f1cfbb85039c 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java @@ -31,6 +31,7 @@ public class MetadataAttribute extends TypedAttribute { public static final String TIMESTAMP_FIELD = "@timestamp"; public static final String TSID_FIELD = "_tsid"; + public static final String SCORE = "_score"; static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( Attribute.class, @@ -50,7 +51,9 @@ public class MetadataAttribute extends TypedAttribute { SourceFieldMapper.NAME, tuple(DataType.SOURCE, false), IndexModeFieldMapper.NAME, - tuple(DataType.KEYWORD, true) + tuple(DataType.KEYWORD, true), + SCORE, + tuple(DataType.DOUBLE, false) ); private final boolean searchable; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java index 6f75298e95dd7..bbc3ace3716ba 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java @@ -79,6 +79,7 @@ public abstract static class Factory implements SourceOperator.SourceOperatorFac protected final DataPartitioning dataPartitioning; protected final int taskConcurrency; protected final int limit; + protected final ScoreMode scoreMode; protected final LuceneSliceQueue sliceQueue; /** @@ -95,6 +96,7 @@ protected Factory( ScoreMode scoreMode ) { this.limit = limit; + this.scoreMode = scoreMode; this.dataPartitioning = dataPartitioning; var weightFunction = weightFunction(queryFunction, scoreMode); this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency); @@ -438,7 +440,8 @@ static Function weightFunction(Function 0) { - --remainingDocs; - docsBuilder.appendInt(doc); - currentPagePos++; - } else { - throw new CollectionTerminatedException(); - } + class LimitingCollector implements LeafCollector { + @Override + public void setScorer(Scorable scorer) {} + + @Override + public void collect(int doc) throws IOException { + if (remainingDocs > 0) { + --remainingDocs; + docsBuilder.appendInt(doc); + currentPagePos++; + } else { + throw new CollectionTerminatedException(); } - }; + } + } + + final class ScoringCollector extends LuceneSourceOperator.LimitingCollector { + private Scorable scorable; + + @Override + public void setScorer(Scorable scorer) { + this.scorable = scorer; + } + + @Override + public void collect(int doc) throws IOException { + super.collect(doc); + scoreBuilder.appendDouble(scorable.score()); + } } @Override @@ -139,15 +179,27 @@ public Page getCheckedOutput() throws IOException { IntBlock shard = null; IntBlock leaf = null; IntVector docs = null; + DoubleVector scores = null; + DocBlock docBlock = null; try { shard = blockFactory.newConstantIntBlockWith(scorer.shardContext().index(), currentPagePos); leaf = blockFactory.newConstantIntBlockWith(scorer.leafReaderContext().ord, currentPagePos); docs = docsBuilder.build(); docsBuilder = blockFactory.newIntVectorBuilder(Math.min(remainingDocs, maxPageSize)); - page = new Page(currentPagePos, new DocVector(shard.asVector(), leaf.asVector(), docs, true).asBlock()); + docBlock = new DocVector(shard.asVector(), leaf.asVector(), docs, true).asBlock(); + shard = null; + leaf = null; + docs = null; + if (scoreBuilder == null) { + page = new Page(currentPagePos, docBlock); + } else { + scores = scoreBuilder.build(); + scoreBuilder = blockFactory.newDoubleVectorBuilder(Math.min(remainingDocs, maxPageSize)); + page = new Page(currentPagePos, docBlock, scores.asBlock()); + } } finally { if (page == null) { - Releasables.closeExpectNoException(shard, leaf, docs); + Releasables.closeExpectNoException(shard, leaf, docs, docBlock, scores); } } currentPagePos = 0; @@ -160,7 +212,7 @@ public Page getCheckedOutput() throws IOException { @Override public void close() { - docsBuilder.close(); + Releasables.close(docsBuilder, scoreBuilder); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java index 0f600958b93b3..8da62963ffb64 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java @@ -10,15 +10,22 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.CollectionTerminatedException; +import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.TopFieldCollector; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; +import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.search.TopFieldCollectorManager; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.elasticsearch.common.Strings; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.DocBlock; import org.elasticsearch.compute.data.DocVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; @@ -29,17 +36,21 @@ import org.elasticsearch.search.sort.SortBuilder; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Optional; import java.util.function.Function; import java.util.stream.Collectors; +import static org.apache.lucene.search.ScoreMode.COMPLETE; +import static org.apache.lucene.search.ScoreMode.TOP_DOCS; + /** * Source operator that builds Pages out of the output of a TopFieldCollector (aka TopN) */ public final class LuceneTopNSourceOperator extends LuceneOperator { - public static final class Factory extends LuceneOperator.Factory { + public static class Factory extends LuceneOperator.Factory { private final int maxPageSize; private final List> sorts; @@ -50,16 +61,17 @@ public Factory( int taskConcurrency, int maxPageSize, int limit, - List> sorts + List> sorts, + boolean scoring ) { - super(contexts, queryFunction, dataPartitioning, taskConcurrency, limit, ScoreMode.TOP_DOCS); + super(contexts, queryFunction, dataPartitioning, taskConcurrency, limit, scoring ? COMPLETE : TOP_DOCS); this.maxPageSize = maxPageSize; this.sorts = sorts; } @Override public SourceOperator get(DriverContext driverContext) { - return new LuceneTopNSourceOperator(driverContext.blockFactory(), maxPageSize, sorts, limit, sliceQueue); + return new LuceneTopNSourceOperator(driverContext.blockFactory(), maxPageSize, sorts, limit, sliceQueue, scoreMode); } public int maxPageSize() { @@ -75,6 +87,8 @@ public String describe() { + maxPageSize + ", limit = " + limit + + ", scoreMode = " + + scoreMode + ", sorts = [" + notPrettySorts + "]]"; @@ -93,17 +107,20 @@ public String describe() { private PerShardCollector perShardCollector; private final List> sorts; private final int limit; + private final ScoreMode scoreMode; public LuceneTopNSourceOperator( BlockFactory blockFactory, int maxPageSize, List> sorts, int limit, - LuceneSliceQueue sliceQueue + LuceneSliceQueue sliceQueue, + ScoreMode scoreMode ) { super(blockFactory, maxPageSize, sliceQueue); this.sorts = sorts; this.limit = limit; + this.scoreMode = scoreMode; } @Override @@ -145,7 +162,7 @@ private Page collect() throws IOException { try { if (perShardCollector == null || perShardCollector.shardContext.index() != scorer.shardContext().index()) { // TODO: share the bottom between shardCollectors - perShardCollector = new PerShardCollector(scorer.shardContext(), sorts, limit); + perShardCollector = newPerShardCollector(scorer.shardContext(), sorts, limit); } var leafCollector = perShardCollector.getLeafCollector(scorer.leafReaderContext()); scorer.scoreNextRange(leafCollector, scorer.leafReaderContext().reader().getLiveDocs(), maxPageSize); @@ -171,7 +188,7 @@ private Page emit(boolean startEmitting) { assert isEmitting() == false : "offset=" + offset + " score_docs=" + Arrays.toString(scoreDocs); offset = 0; if (perShardCollector != null) { - scoreDocs = perShardCollector.topFieldCollector.topDocs().scoreDocs; + scoreDocs = perShardCollector.collector.topDocs().scoreDocs; } else { scoreDocs = new ScoreDoc[0]; } @@ -183,10 +200,13 @@ private Page emit(boolean startEmitting) { IntBlock shard = null; IntVector segments = null; IntVector docs = null; + DocBlock docBlock = null; + DoubleBlock scores = null; Page page = null; try ( IntVector.Builder currentSegmentBuilder = blockFactory.newIntVectorFixedBuilder(size); - IntVector.Builder currentDocsBuilder = blockFactory.newIntVectorFixedBuilder(size) + IntVector.Builder currentDocsBuilder = blockFactory.newIntVectorFixedBuilder(size); + DoubleVector.Builder currentScoresBuilder = scoreVectorOrNull(size); ) { int start = offset; offset += size; @@ -196,53 +216,130 @@ private Page emit(boolean startEmitting) { int segment = ReaderUtil.subIndex(doc, leafContexts); currentSegmentBuilder.appendInt(segment); currentDocsBuilder.appendInt(doc - leafContexts.get(segment).docBase); // the offset inside the segment + if (currentScoresBuilder != null) { + float score = getScore(scoreDocs[i]); + currentScoresBuilder.appendDouble(score); + } } shard = blockFactory.newConstantIntBlockWith(perShardCollector.shardContext.index(), size); segments = currentSegmentBuilder.build(); docs = currentDocsBuilder.build(); - page = new Page(size, new DocVector(shard.asVector(), segments, docs, null).asBlock()); + docBlock = new DocVector(shard.asVector(), segments, docs, null).asBlock(); + shard = null; + segments = null; + docs = null; + if (currentScoresBuilder == null) { + page = new Page(size, docBlock); + } else { + scores = currentScoresBuilder.build().asBlock(); + page = new Page(size, docBlock, scores); + } } finally { if (page == null) { - Releasables.closeExpectNoException(shard, segments, docs); + Releasables.closeExpectNoException(shard, segments, docs, docBlock, scores); } } pagesEmitted++; return page; } + private float getScore(ScoreDoc scoreDoc) { + if (scoreDoc instanceof FieldDoc fieldDoc) { + if (Float.isNaN(fieldDoc.score)) { + if (sorts != null) { + return (Float) fieldDoc.fields[sorts.size() + 1]; + } else { + return (Float) fieldDoc.fields[0]; + } + } else { + return fieldDoc.score; + } + } else { + return scoreDoc.score; + } + } + + private DoubleVector.Builder scoreVectorOrNull(int size) { + if (scoreMode.needsScores()) { + return blockFactory.newDoubleVectorFixedBuilder(size); + } else { + return null; + } + } + @Override protected void describe(StringBuilder sb) { sb.append(", limit = ").append(limit); + sb.append(", scoreMode = ").append(scoreMode); String notPrettySorts = sorts.stream().map(Strings::toString).collect(Collectors.joining(",")); sb.append(", sorts = [").append(notPrettySorts).append("]"); } - static final class PerShardCollector { + PerShardCollector newPerShardCollector(ShardContext shardContext, List> sorts, int limit) throws IOException { + Optional sortAndFormats = shardContext.buildSort(sorts); + if (sortAndFormats.isEmpty()) { + throw new IllegalStateException("sorts must not be disabled in TopN"); + } + if (scoreMode.needsScores() == false) { + return new NonScoringPerShardCollector(shardContext, sortAndFormats.get().sort, limit); + } else { + SortField[] sortFields = sortAndFormats.get().sort.getSort(); + if (sortFields != null && sortFields.length == 1 && sortFields[0].needsScores() && sortFields[0].getReverse() == false) { + // SORT _score DESC + return new ScoringPerShardCollector( + shardContext, + new TopScoreDocCollectorManager(limit, null, limit, false).newCollector() + ); + } else { + // SORT ..., _score, ... + var sort = new Sort(); + if (sortFields != null) { + var l = new ArrayList<>(Arrays.asList(sortFields)); + l.add(SortField.FIELD_DOC); + l.add(SortField.FIELD_SCORE); + sort = new Sort(l.toArray(SortField[]::new)); + } + return new ScoringPerShardCollector( + shardContext, + new TopFieldCollectorManager(sort, limit, null, limit, false).newCollector() + ); + } + } + } + + abstract static class PerShardCollector { private final ShardContext shardContext; - private final TopFieldCollector topFieldCollector; + private final TopDocsCollector collector; private int leafIndex; private LeafCollector leafCollector; private Thread currentThread; - PerShardCollector(ShardContext shardContext, List> sorts, int limit) throws IOException { + PerShardCollector(ShardContext shardContext, TopDocsCollector collector) { this.shardContext = shardContext; - Optional sortAndFormats = shardContext.buildSort(sorts); - if (sortAndFormats.isEmpty()) { - throw new IllegalStateException("sorts must not be disabled in TopN"); - } - - // We don't use CollectorManager here as we don't retrieve the total hits and sort by score. - this.topFieldCollector = new TopFieldCollectorManager(sortAndFormats.get().sort, limit, null, 0, false).newCollector(); + this.collector = collector; } LeafCollector getLeafCollector(LeafReaderContext leafReaderContext) throws IOException { if (currentThread != Thread.currentThread() || leafIndex != leafReaderContext.ord) { - leafCollector = topFieldCollector.getLeafCollector(leafReaderContext); + leafCollector = collector.getLeafCollector(leafReaderContext); leafIndex = leafReaderContext.ord; currentThread = Thread.currentThread(); } return leafCollector; } } + + static final class NonScoringPerShardCollector extends PerShardCollector { + NonScoringPerShardCollector(ShardContext shardContext, Sort sort, int limit) { + // We don't use CollectorManager here as we don't retrieve the total hits and sort by score. + super(shardContext, new TopFieldCollectorManager(sort, limit, null, 0, false).newCollector()); + } + } + + static final class ScoringPerShardCollector extends PerShardCollector { + ScoringPerShardCollector(ShardContext shardContext, TopDocsCollector topDocsCollector) { + super(shardContext, topDocsCollector); + } + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java index 0d39a5bf8227e..e6ef10e53ec7c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java @@ -394,7 +394,8 @@ static LuceneOperator.Factory luceneOperatorFactory(IndexReader reader, Query qu randomFrom(DataPartitioning.values()), randomIntBetween(1, 10), randomPageSize(), - limit + limit, + false // no scoring ); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java index beca522878358..ffaee536b443e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java @@ -27,6 +27,8 @@ import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.DocBlock; +import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluator.DenseCollector; @@ -120,8 +122,9 @@ public void testTermQueryShuffled() throws IOException { private void assertTermQuery(String term, List results) { int matchCount = 0; for (Page page : results) { - BytesRefVector terms = page.getBlock(1).asVector(); - BooleanVector matches = page.getBlock(2).asVector(); + int initialBlockIndex = initialBlockIndex(page); + BytesRefVector terms = page.getBlock(initialBlockIndex).asVector(); + BooleanVector matches = page.getBlock(initialBlockIndex + 1).asVector(); for (int i = 0; i < page.getPositionCount(); i++) { BytesRef termAtPosition = terms.getBytesRef(i, new BytesRef()); assertThat(matches.getBoolean(i), equalTo(termAtPosition.utf8ToString().equals(term))); @@ -155,8 +158,9 @@ private void testTermsQuery(boolean shuffleDocs) throws IOException { List results = runQuery(values, new TermInSetQuery(MultiTermQuery.CONSTANT_SCORE_REWRITE, FIELD, matchingBytes), shuffleDocs); int matchCount = 0; for (Page page : results) { - BytesRefVector terms = page.getBlock(1).asVector(); - BooleanVector matches = page.getBlock(2).asVector(); + int initialBlockIndex = initialBlockIndex(page); + BytesRefVector terms = page.getBlock(initialBlockIndex).asVector(); + BooleanVector matches = page.getBlock(initialBlockIndex + 1).asVector(); for (int i = 0; i < page.getPositionCount(); i++) { BytesRef termAtPosition = terms.getBytesRef(i, new BytesRef()); assertThat(matches.getBoolean(i), equalTo(matching.contains(termAtPosition.utf8ToString()))); @@ -207,7 +211,7 @@ private List runQuery(Set values, Query query, boolean shuffleDocs List results = new ArrayList<>(); Driver driver = new Driver( driverContext, - luceneOperatorFactory(reader, new MatchAllDocsQuery(), LuceneOperator.NO_LIMIT).get(driverContext), + luceneOperatorFactory(reader, new MatchAllDocsQuery(), LuceneOperator.NO_LIMIT, scoring).get(driverContext), operators, new TestResultPageSinkOperator(results::add), () -> {} @@ -248,7 +252,21 @@ private DriverContext driverContext() { return new DriverContext(blockFactory.bigArrays(), blockFactory); } - static LuceneOperator.Factory luceneOperatorFactory(IndexReader reader, Query query, int limit) { + // Scores are not interesting to this test, but enabled conditionally and effectively ignored just for coverage. + private final boolean scoring = randomBoolean(); + + // Returns the initial block index, ignoring the score block if scoring is enabled + private int initialBlockIndex(Page page) { + assert page.getBlock(0) instanceof DocBlock : "expected doc block at index 0"; + if (scoring) { + assert page.getBlock(1) instanceof DoubleBlock : "expected double block at index 1"; + return 2; + } else { + return 1; + } + } + + static LuceneOperator.Factory luceneOperatorFactory(IndexReader reader, Query query, int limit, boolean scoring) { final ShardContext searchContext = new LuceneSourceOperatorTests.MockShardContext(reader, 0); return new LuceneSourceOperator.Factory( List.of(searchContext), @@ -256,7 +274,8 @@ static LuceneOperator.Factory luceneOperatorFactory(IndexReader reader, Query qu randomFrom(DataPartitioning.values()), randomIntBetween(1, 10), randomPageSize(), - limit + limit, + scoring ); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java index 626190c04c501..2dcc5e20d3f98 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java @@ -17,6 +17,8 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.compute.data.DocBlock; +import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; @@ -63,10 +65,10 @@ public void closeIndex() throws IOException { @Override protected LuceneSourceOperator.Factory simple() { - return simple(randomFrom(DataPartitioning.values()), between(1, 10_000), 100); + return simple(randomFrom(DataPartitioning.values()), between(1, 10_000), 100, scoring); } - private LuceneSourceOperator.Factory simple(DataPartitioning dataPartitioning, int numDocs, int limit) { + private LuceneSourceOperator.Factory simple(DataPartitioning dataPartitioning, int numDocs, int limit, boolean scoring) { int commitEvery = Math.max(1, numDocs / 10); try ( RandomIndexWriter writer = new RandomIndexWriter( @@ -91,7 +93,7 @@ private LuceneSourceOperator.Factory simple(DataPartitioning dataPartitioning, i ShardContext ctx = new MockShardContext(reader, 0); Function queryFunction = c -> new MatchAllDocsQuery(); int maxPageSize = between(10, Math.max(10, numDocs)); - return new LuceneSourceOperator.Factory(List.of(ctx), queryFunction, dataPartitioning, 1, maxPageSize, limit); + return new LuceneSourceOperator.Factory(List.of(ctx), queryFunction, dataPartitioning, 1, maxPageSize, limit, scoring); } @Override @@ -101,7 +103,10 @@ protected Matcher expectedToStringOfSimple() { @Override protected Matcher expectedDescriptionOfSimple() { - return matchesRegex("LuceneSourceOperator\\[dataPartitioning = (DOC|SHARD|SEGMENT), maxPageSize = \\d+, limit = 100]"); + return matchesRegex( + "LuceneSourceOperator" + + "\\[dataPartitioning = (DOC|SHARD|SEGMENT), maxPageSize = \\d+, limit = 100, scoreMode = (COMPLETE|COMPLETE_NO_SCORES)]" + ); } // TODO tests for the other data partitioning configurations @@ -149,7 +154,7 @@ public void testShardDataPartitioningWithCranky() { } private void testSimple(DriverContext ctx, int size, int limit) { - LuceneSourceOperator.Factory factory = simple(DataPartitioning.SHARD, size, limit); + LuceneSourceOperator.Factory factory = simple(DataPartitioning.SHARD, size, limit, scoring); Operator.OperatorFactory readS = ValuesSourceReaderOperatorTests.factory(reader, S_FIELD, ElementType.LONG); List results = new ArrayList<>(); @@ -164,7 +169,7 @@ private void testSimple(DriverContext ctx, int size, int limit) { } for (Page page : results) { - LongBlock sBlock = page.getBlock(1); + LongBlock sBlock = page.getBlock(initialBlockIndex(page)); for (int p = 0; p < page.getPositionCount(); p++) { assertThat(sBlock.getLong(sBlock.getFirstValueIndex(p)), both(greaterThanOrEqualTo(0L)).and(lessThan((long) size))); } @@ -174,6 +179,20 @@ private void testSimple(DriverContext ctx, int size, int limit) { assertThat(results, hasSize(both(greaterThanOrEqualTo(minPages)).and(lessThanOrEqualTo(maxPages)))); } + // Scores are not interesting to this test, but enabled conditionally and effectively ignored just for coverage. + private final boolean scoring = randomBoolean(); + + // Returns the initial block index, ignoring the score block if scoring is enabled + private int initialBlockIndex(Page page) { + assert page.getBlock(0) instanceof DocBlock : "expected doc block at index 0"; + if (scoring) { + assert page.getBlock(1) instanceof DoubleBlock : "expected double block at index 1"; + return 2; + } else { + return 1; + } + } + /** * Creates a mock search context with the given index reader. * The returned mock search context can be used to test with {@link LuceneOperator}. diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java new file mode 100644 index 0000000000000..a0fa1c2c01c0a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java @@ -0,0 +1,151 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.lucene; + +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; +import org.apache.lucene.search.SortedNumericSelector; +import org.apache.lucene.search.SortedNumericSortField; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.Driver; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.Operator; +import org.elasticsearch.compute.operator.OperatorTestCase; +import org.elasticsearch.compute.operator.TestResultPageSinkOperator; +import org.elasticsearch.core.IOUtils; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.search.sort.SortAndFormats; +import org.elasticsearch.search.sort.SortBuilder; +import org.hamcrest.Matcher; +import org.junit.After; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.function.Function; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.matchesRegex; + +public class LuceneTopNSourceOperatorScoringTests extends LuceneTopNSourceOperatorTests { + private static final MappedFieldType S_FIELD = new NumberFieldMapper.NumberFieldType("s", NumberFieldMapper.NumberType.LONG); + private Directory directory = newDirectory(); + private IndexReader reader; + + @After + private void closeIndex() throws IOException { + IOUtils.close(reader, directory); + } + + @Override + protected LuceneTopNSourceOperator.Factory simple() { + return simple(DataPartitioning.SHARD, 10_000, 100); + } + + private LuceneTopNSourceOperator.Factory simple(DataPartitioning dataPartitioning, int size, int limit) { + int commitEvery = Math.max(1, size / 10); + try ( + RandomIndexWriter writer = new RandomIndexWriter( + random(), + directory, + newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE) + ) + ) { + for (int d = 0; d < size; d++) { + List doc = new ArrayList<>(); + doc.add(new SortedNumericDocValuesField("s", d)); + writer.addDocument(doc); + if (d % commitEvery == 0) { + writer.commit(); + } + } + reader = writer.getReader(); + } catch (IOException e) { + throw new RuntimeException(e); + } + + ShardContext ctx = new LuceneSourceOperatorTests.MockShardContext(reader, 0) { + @Override + public Optional buildSort(List> sorts) { + SortField field = new SortedNumericSortField("s", SortField.Type.LONG, false, SortedNumericSelector.Type.MIN); + return Optional.of(new SortAndFormats(new Sort(field), new DocValueFormat[] { null })); + } + }; + Function queryFunction = c -> new MatchAllDocsQuery(); + int taskConcurrency = 0; + int maxPageSize = between(10, Math.max(10, size)); + List> sorts = List.of(new FieldSortBuilder("s")); + return new LuceneTopNSourceOperator.Factory( + List.of(ctx), + queryFunction, + dataPartitioning, + taskConcurrency, + maxPageSize, + limit, + sorts, + true // scoring + ); + } + + @Override + protected Matcher expectedToStringOfSimple() { + return matchesRegex("LuceneTopNSourceOperator\\[maxPageSize = \\d+, limit = 100, scoreMode = COMPLETE, sorts = \\[\\{.+}]]"); + } + + @Override + protected Matcher expectedDescriptionOfSimple() { + return matchesRegex( + "LuceneTopNSourceOperator" + + "\\[dataPartitioning = (DOC|SHARD|SEGMENT), maxPageSize = \\d+, limit = 100, scoreMode = COMPLETE, sorts = \\[\\{.+}]]" + ); + } + + @Override + protected void testSimple(DriverContext ctx, int size, int limit) { + LuceneTopNSourceOperator.Factory factory = simple(DataPartitioning.SHARD, size, limit); + Operator.OperatorFactory readS = ValuesSourceReaderOperatorTests.factory(reader, S_FIELD, ElementType.LONG); + + List results = new ArrayList<>(); + OperatorTestCase.runDriver( + new Driver(ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) + ); + OperatorTestCase.assertDriverContext(ctx); + + long expectedS = 0; + int maxPageSize = factory.maxPageSize(); + for (Page page : results) { + if (limit - expectedS < maxPageSize) { + assertThat(page.getPositionCount(), equalTo((int) (limit - expectedS))); + } else { + assertThat(page.getPositionCount(), equalTo(maxPageSize)); + } + DoubleBlock sBlock = page.getBlock(1); + for (int p = 0; p < page.getPositionCount(); p++) { + assertThat(sBlock.getDouble(sBlock.getFirstValueIndex(p)), equalTo(1.0d)); + expectedS++; + } + } + int pages = (int) Math.ceil((float) Math.min(size, limit) / maxPageSize); + assertThat(results, hasSize(pages)); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java index 938c4ce5c9f7d..d9a0b70b7931e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java @@ -20,6 +20,8 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.compute.data.DocBlock; +import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; @@ -56,7 +58,7 @@ public class LuceneTopNSourceOperatorTests extends AnyOperatorTestCase { private IndexReader reader; @After - public void closeIndex() throws IOException { + private void closeIndex() throws IOException { IOUtils.close(reader, directory); } @@ -105,19 +107,25 @@ public Optional buildSort(List> sorts) { taskConcurrency, maxPageSize, limit, - sorts + sorts, + scoring ); } @Override protected Matcher expectedToStringOfSimple() { - return matchesRegex("LuceneTopNSourceOperator\\[maxPageSize = \\d+, limit = 100, sorts = \\[\\{.+}]]"); + var s = scoring ? "COMPLETE" : "TOP_DOCS"; + return matchesRegex("LuceneTopNSourceOperator\\[maxPageSize = \\d+, limit = 100, scoreMode = " + s + ", sorts = \\[\\{.+}]]"); } @Override protected Matcher expectedDescriptionOfSimple() { + var s = scoring ? "COMPLETE" : "TOP_DOCS"; return matchesRegex( - "LuceneTopNSourceOperator\\[dataPartitioning = (DOC|SHARD|SEGMENT), maxPageSize = \\d+, limit = 100, sorts = \\[\\{.+}]]" + "LuceneTopNSourceOperator" + + "\\[dataPartitioning = (DOC|SHARD|SEGMENT), maxPageSize = \\d+, limit = 100, scoreMode = " + + s + + ", sorts = \\[\\{.+}]]" ); } @@ -137,12 +145,24 @@ public void testShardDataPartitioningWithCranky() { } } - private void testShardDataPartitioning(DriverContext context) { + void testShardDataPartitioning(DriverContext context) { int size = between(1_000, 20_000); int limit = between(10, size); testSimple(context, size, limit); } + public void testWithCranky() { + try { + int size = between(1_000, 20_000); + int limit = between(10, size); + testSimple(crankyDriverContext(), size, limit); + logger.info("cranky didn't break"); + } catch (CircuitBreakingException e) { + logger.info("broken", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + } + public void testEmpty() { testEmpty(driverContext()); } @@ -157,11 +177,11 @@ public void testEmptyWithCranky() { } } - private void testEmpty(DriverContext context) { + void testEmpty(DriverContext context) { testSimple(context, 0, between(10, 10_000)); } - private void testSimple(DriverContext ctx, int size, int limit) { + protected void testSimple(DriverContext ctx, int size, int limit) { LuceneTopNSourceOperator.Factory factory = simple(DataPartitioning.SHARD, size, limit); Operator.OperatorFactory readS = ValuesSourceReaderOperatorTests.factory(reader, S_FIELD, ElementType.LONG); @@ -178,7 +198,7 @@ private void testSimple(DriverContext ctx, int size, int limit) { } else { assertThat(page.getPositionCount(), equalTo(factory.maxPageSize())); } - LongBlock sBlock = page.getBlock(1); + LongBlock sBlock = page.getBlock(initialBlockIndex(page)); for (int p = 0; p < page.getPositionCount(); p++) { assertThat(sBlock.getLong(sBlock.getFirstValueIndex(p)), equalTo(expectedS++)); } @@ -186,4 +206,18 @@ private void testSimple(DriverContext ctx, int size, int limit) { int pages = (int) Math.ceil((float) Math.min(size, limit) / factory.maxPageSize()); assertThat(results, hasSize(pages)); } + + // Scores are not interesting to this test, but enabled conditionally and effectively ignored just for coverage. + private final boolean scoring = randomBoolean(); + + // Returns the initial block index, ignoring the score block if scoring is enabled + private int initialBlockIndex(Page page) { + assert page.getBlock(0) instanceof DocBlock : "expected doc block at index 0"; + if (scoring) { + assert page.getBlock(1) instanceof DoubleBlock : "expected double block at index 1"; + return 2; + } else { + return 1; + } + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java index f6d81af7c14e5..f31573f121a71 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java @@ -265,7 +265,8 @@ private SourceOperator simpleInput(DriverContext context, int size, int commitEv DataPartitioning.SHARD, 1,// randomIntBetween(1, 10), pageSize, - LuceneOperator.NO_LIMIT + LuceneOperator.NO_LIMIT, + false // no scoring ); return luceneFactory.get(context); } @@ -1292,7 +1293,8 @@ public void testWithNulls() throws IOException { randomFrom(DataPartitioning.values()), randomIntBetween(1, 10), randomPageSize(), - LuceneOperator.NO_LIMIT + LuceneOperator.NO_LIMIT, + false // no scoring ); var vsShardContext = new ValuesSourceReaderOperator.ShardContext(reader(indexKey), () -> SourceLoader.FROM_STORED_SOURCE); try ( @@ -1450,7 +1452,8 @@ public void testManyShards() throws IOException { DataPartitioning.SHARD, randomIntBetween(1, 10), 1000, - LuceneOperator.NO_LIMIT + LuceneOperator.NO_LIMIT, + false // no scoring ); // TODO add index2 MappedFieldType ft = mapperService(indexKey).fieldType("key"); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java index c8dd6f87be5fc..95b313b0b5412 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java @@ -170,7 +170,8 @@ private SourceOperator simpleInput(DriverContext context, int size, int commitEv DataPartitioning.SHARD, randomIntBetween(1, 10), pageSize, - LuceneOperator.NO_LIMIT + LuceneOperator.NO_LIMIT, + false // no scoring ); return luceneFactory.get(context); } @@ -1301,7 +1302,8 @@ public void testWithNulls() throws IOException { randomFrom(DataPartitioning.values()), randomIntBetween(1, 10), randomPageSize(), - LuceneOperator.NO_LIMIT + LuceneOperator.NO_LIMIT, + false // no scoring ); try ( Driver driver = new Driver( @@ -1524,7 +1526,8 @@ public void testManyShards() throws IOException { DataPartitioning.SHARD, randomIntBetween(1, 10), 1000, - LuceneOperator.NO_LIMIT + LuceneOperator.NO_LIMIT, + false // no scoring ); MappedFieldType ft = mapperService.fieldType("key"); var readerFactory = new ValuesSourceReaderOperator.Factory( diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec index 6039dc05b6c44..2c84bdae6b32e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/qstr-function.csv-spec @@ -100,7 +100,6 @@ book_no:keyword | title:text 7140 | The Lord of the Rings Poster Collection: Six Paintings by Alan Lee (No. 1) ; - qstrWithMultivaluedTextField required_capability: qstr_function diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/scoring.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/scoring.csv-spec new file mode 100644 index 0000000000000..d4c7b8c59fdbc --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/scoring.csv-spec @@ -0,0 +1,285 @@ +############################################### +# Tests for scoring support +# + +singleQstrBoostScoringSorted +required_capability: metadata_score +required_capability: qstr_function + +from books metadata _score +| where qstr("author:Lord Rings^2") +| eval c_score = ceil(_score) +| keep book_no, title, c_score +| sort c_score desc, book_no asc +| LIMIT 2; + +book_no:keyword | title:text | c_score:double +2675 | The Lord of the Rings - Boxed Set | 6.0 +4023 | A Tolkien Compass: Including J. R. R. Tolkien's Guide to the Names in The Lord of the Rings | 6.0 +; + +singleMatchWithKeywordFieldScoring +required_capability: metadata_score +required_capability: match_operator_colon + +from books metadata _score +| where author.keyword:"William Faulkner" +| keep book_no, author, _score +| sort book_no; + +book_no:keyword | author:text | _score:double +2713 | William Faulkner | 2.3142893314361572 +2883 | William Faulkner | 2.3142893314361572 +4724 | William Faulkner | 2.3142893314361572 +4977 | William Faulkner | 2.3142893314361572 +5119 | William Faulkner | 2.3142893314361572 +5404 | William Faulkner | 2.3142893314361572 +5578 | William Faulkner | 2.3142893314361572 +8077 | William Faulkner | 2.3142893314361572 +9896 | William Faulkner | 2.3142893314361572 +; + +qstrWithFieldAndScoringSortedEval +required_capability: qstr_function +required_capability: metadata_score + +from books metadata _score +| where qstr("title:rings") +| sort _score desc +| eval _score::long +| keep book_no, title, _score +| limit 3; + +book_no:keyword | title:text | _score:double +2675 | The Lord of the Rings - Boxed Set | 2.7583377361297607 +7140 | The Lord of the Rings Poster Collection: Six Paintings by Alan Lee (No. 1) | 1.9239964485168457 +2714 | Return of the King Being the Third Part of The Lord of the Rings | 1.9239964485168457 +; + +qstrWithFieldAndScoringSorted +required_capability: qstr_function +required_capability: metadata_score + +from books metadata _score +| where qstr("title:rings") +| sort _score desc, book_no desc +| keep book_no, title, _score +| limit 3; + +book_no:keyword | title:text | _score:double +2675 | The Lord of the Rings - Boxed Set | 2.7583377361297607 +7140 | The Lord of the Rings Poster Collection: Six Paintings by Alan Lee (No. 1) | 1.9239964485168457 +2714 | Return of the King Being the Third Part of The Lord of the Rings | 1.9239964485168457 +; + +singleQstrScoringManipulated +required_capability: metadata_score +required_capability: qstr_function + +from books metadata _score +| where qstr("author:William Faulkner") +| eval add_score = ceil(_score) + 1 +| keep book_no, author, add_score +| sort book_no +| LIMIT 2; + +book_no:keyword | author:text | add_score:double +2378 | [Carol Faulkner, Holly Byers Ochoa, Lucretia Mott] | 2.0 +2713 | William Faulkner | 7.0 +; + +testMultiValuedFieldWithConjunctionWithScore +required_capability: match_function +required_capability: metadata_score + +from employees metadata _score +| where match(job_positions, "Data Scientist") and match(job_positions, "Support Engineer") +| keep emp_no, first_name, last_name, job_positions, _score; + +emp_no:integer | first_name:keyword | last_name:keyword | job_positions:keyword | _score:double +10043 | Yishay | Tzvieli | [Data Scientist, Python Developer, Support Engineer] | 5.233309745788574 +; + +testMatchAndQueryStringFunctionsWithScore +required_capability: match_function +required_capability: metadata_score + +from employees metadata _score +| where match(job_positions, "Data Scientist") and qstr("job_positions: (Support Engineer) and gender: F") +| keep emp_no, first_name, last_name, job_positions, _score; +ignoreOrder:true + +emp_no:integer | first_name:keyword | last_name:keyword | job_positions:keyword | _score:double +10041 | Uri | Lenart | [Data Scientist, Head Human Resources, Internship, Senior Team Lead] | 3.509873867034912 +10043 | Yishay | Tzvieli | [Data Scientist, Python Developer, Support Engineer] | 5.233309745788574 +; + +multipleWhereWithMatchScoringNoSort +required_capability: metadata_score +required_capability: match_operator_colon + +from books metadata _score +| where title:"short stories" +| where author:"Ursula K. Le Guin" +| keep book_no, title, author, _score; + +ignoreOrder:true +book_no:keyword | title:text | author:text | _score:double +8480 | The wind's twelve quarters: Short stories | Ursula K. Le Guin | 14.489097595214844 +; + +multipleWhereWithMatchScoring +required_capability: metadata_score +required_capability: match_operator_colon + +from books metadata _score +| where title:"short stories" +| where author:"Ursula K. Le Guin" +| keep book_no, title, author, _score +| sort book_no; + +book_no:keyword | title:text | author:text | _score:double +8480 | The wind's twelve quarters: Short stories | Ursula K. Le Guin | 14.489097595214844 +; + +combinedMatchWithFunctionsScoring +required_capability: metadata_score +required_capability: match_operator_colon + +from books metadata _score +| where title:"Tolkien" AND author:"Tolkien" AND year > 2000 +| where mv_count(author) == 1 +| keep book_no, title, author, year, _score +| sort book_no; + +book_no:keyword | title:text | author:text | year:integer | _score:double +5335 | Letters of J R R Tolkien | J.R.R. Tolkien | 2014 | 5.448054313659668 +; + +singleQstrScoring +required_capability: metadata_score +required_capability: qstr_function + +from books metadata _score +| where qstr("author:William Faulkner") +| keep book_no, author, _score +| sort book_no +| LIMIT 2; + +book_no:keyword | author:text | _score:double +2378 | [Carol Faulkner, Holly Byers Ochoa, Lucretia Mott] | 0.9976131916046143 +2713 | William Faulkner | 5.9556169509887695 +; + +singleQstrScoringGrok +required_capability: metadata_score +required_capability: qstr_function + +from books metadata _score +| where qstr("author:Lord Rings") +| GROK title "%{WORD:title} %{WORD}" +| sort _score desc +| keep book_no, title, _score +| LIMIT 3; + +book_no:keyword | title:keyword | _score:double +8875 | The | 2.9505908489227295 +4023 | A | 2.8327860832214355 +2675 | The | 2.7583377361297607 +; + +combinedMatchWithScoringEvalNoSort +required_capability: metadata_score +required_capability: match_operator_colon + +from books metadata _score +| where title:"Tolkien" AND author:"Tolkien" AND year > 2000 +| where mv_count(author) == 1 +| eval c_score = ceil(_score) +| keep book_no, title, author, year, c_score; + +ignoreOrder:true +book_no:keyword | title:text | author:text | year:integer | c_score:double +5335 | Letters of J R R Tolkien | J.R.R. Tolkien | 2014 | 6 +; + +singleQstrScoringRename +required_capability: metadata_score +required_capability: qstr_function + +from books metadata _score +| where qstr("author:Lord Rings") +| rename _score as rank +| sort rank desc +| keep book_no, rank +| LIMIT 3; + +book_no:keyword | rank:double +8875 | 2.9505908489227295 +4023 | 2.8327860832214355 +2675 | 2.7583377361297607 +; + +singleMatchWithTextFieldScoring +required_capability: metadata_score +required_capability: match_operator_colon + +from books metadata _score +| where author:"William Faulkner" +| sort book_no +| keep book_no, author, _score +| limit 5; + +book_no:keyword | author:text | _score:double +2378 | [Carol Faulkner, Holly Byers Ochoa, Lucretia Mott] | 0.9976131916046143 +2713 | William Faulkner | 4.272439002990723 +2847 | Colleen Faulkner | 1.7401835918426514 +2883 | William Faulkner | 4.272439002990723 +3293 | Danny Faulkner | 1.7401835918426514 +; + +combinedMatchWithFunctionsScoringNoSort +required_capability: metadata_score +required_capability: match_operator_colon + +from books metadata _score +| where title:"Tolkien" AND author:"Tolkien" AND year > 2000 +| where mv_count(author) == 1 +| keep book_no, title, author, year, _score; + +ignoreOrder:true +book_no:keyword | title:text | author:text | year:integer | _score:double +5335 | Letters of J R R Tolkien | J.R.R. Tolkien | 2014 | 5.448054313659668 +; + +combinedMatchWithScoringEval +required_capability: metadata_score +required_capability: match_operator_colon + +from books metadata _score +| where title:"Tolkien" AND author:"Tolkien" AND year > 2000 +| where mv_count(author) == 1 +| eval c_score = ceil(_score) +| keep book_no, title, author, year, c_score +| sort book_no; + +book_no:keyword | title:text | author:text | year:integer | c_score:double +5335 | Letters of J R R Tolkien | J.R.R. Tolkien | 2014 | 6 +; + +singleQstrScoringEval +required_capability: metadata_score +required_capability: qstr_function + +from books metadata _score +| where qstr("author:Lord Rings") +| eval c_score = ceil(_score) +| keep book_no, c_score +| sort book_no desc +| LIMIT 3; + +book_no:keyword | c_score:double +8875 | 3.0 +7350 | 2.0 +7140 | 3.0 +; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index 56453a291ea81..1939f81353c0e 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -89,7 +89,7 @@ public void setup() { assumeTrue("requires query pragmas", canUseQueryPragmas()); nodeLevelReduction = randomBoolean(); READ_DESCRIPTION = """ - \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647] + \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647, scoreMode = COMPLETE_NO_SCORES] \\_ValuesSourceReaderOperator[fields = [pause_me]] \\_AggregationOperator[mode = INITIAL, aggs = sum of longs] \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())); @@ -448,6 +448,7 @@ protected void doRun() throws Exception { public void testTaskContentsForTopNQuery() throws Exception { READ_DESCRIPTION = ("\\_LuceneTopNSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 1000, " + + "scoreMode = TOP_DOCS, " + "sorts = [{\"pause_me\":{\"order\":\"asc\",\"missing\":\"_last\",\"unmapped_type\":\"long\"}}]]\n" + "\\_ValuesSourceReaderOperator[fields = [pause_me]]\n" + "\\_ProjectOperator[projection = [1]]\n" @@ -482,7 +483,7 @@ public void testTaskContentsForTopNQuery() throws Exception { public void testTaskContentsForLimitQuery() throws Exception { String limit = Integer.toString(randomIntBetween(pageSize() + 1, 2 * numberOfDocs())); READ_DESCRIPTION = """ - \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = limit()] + \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = limit(), scoreMode = COMPLETE_NO_SCORES] \\_ValuesSourceReaderOperator[fields = [pause_me]] \\_ProjectOperator[projection = [1]] \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())).replace("limit()", limit); @@ -511,7 +512,7 @@ public void testTaskContentsForLimitQuery() throws Exception { public void testTaskContentsForGroupingStatsQuery() throws Exception { READ_DESCRIPTION = """ - \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647] + \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647, scoreMode = COMPLETE_NO_SCORES] \\_ValuesSourceReaderOperator[fields = [foo]] \\_OrdinalsGroupingOperator(aggs = max of longs) \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java index 5c0c13b48df3b..3b9359fe66d40 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java @@ -148,7 +148,8 @@ public void testLookupIndex() throws IOException { DataPartitioning.SEGMENT, 1, 10000, - DocIdSetIterator.NO_MORE_DOCS + DocIdSetIterator.NO_MORE_DOCS, + false // no scoring ); ValuesSourceReaderOperator.Factory reader = new ValuesSourceReaderOperator.Factory( List.of( diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java new file mode 100644 index 0000000000000..99f7d48a0d636 --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java @@ -0,0 +1,299 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; +import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; +import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; +import org.junit.Before; + +import java.util.List; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.CoreMatchers.containsString; + +//@TestLogging(value = "org.elasticsearch.xpack.esql:TRACE,org.elasticsearch.compute:TRACE", reason = "debug") +public class MatchFunctionIT extends AbstractEsqlIntegTestCase { + + @Before + public void setupIndex() { + createAndPopulateIndex(); + } + + @Override + protected EsqlQueryResponse run(EsqlQueryRequest request) { + assumeTrue("match function capability not available", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled()); + return super.run(request); + } + + public void testSimpleWhereMatch() { + var query = """ + FROM test + | WHERE match(content, "fox") + | KEEP id + | SORT id + """; + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id")); + assertColumnTypes(resp.columns(), List.of("integer")); + assertValues(resp.values(), List.of(List.of(1), List.of(6))); + } + } + + public void testCombinedWhereMatch() { + var query = """ + FROM test + | WHERE match(content, "fox") AND id > 5 + | KEEP id + | SORT id + """; + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id")); + assertColumnTypes(resp.columns(), List.of("integer")); + assertValues(resp.values(), List.of(List.of(6))); + } + } + + public void testMultipleMatch() { + var query = """ + FROM test + | WHERE match(content, "fox") AND match(content, "brown") + | KEEP id + | SORT id + """; + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id")); + assertColumnTypes(resp.columns(), List.of("integer")); + assertValues(resp.values(), List.of(List.of(1), List.of(6))); + } + } + + public void testMultipleWhereMatch() { + var query = """ + FROM test + | WHERE match(content, "fox") AND match(content, "brown") + | EVAL summary = CONCAT("document with id: ", to_str(id), "and content: ", content) + | SORT summary + | LIMIT 4 + | WHERE match(content, "brown fox") + | KEEP id + """; + + var error = expectThrows(ElasticsearchException.class, () -> run(query)); + assertThat(error.getMessage(), containsString("[MATCH] function cannot be used after LIMIT")); + } + + public void testNotWhereMatch() { + var query = """ + FROM test + | WHERE NOT match(content, "brown fox") + | KEEP id + | SORT id + """; + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id")); + assertColumnTypes(resp.columns(), List.of("integer")); + assertValues(resp.values(), List.of(List.of(5))); + } + } + + public void testWhereMatchWithScoring() { + assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); + var query = """ + FROM test + METADATA _score + | WHERE match(content, "fox") + | KEEP id, _score + | SORT id ASC + """; + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id", "_score")); + assertColumnTypes(resp.columns(), List.of("integer", "double")); + assertValues(resp.values(), List.of(List.of(1, 1.156558871269226), List.of(6, 0.9114001989364624))); + } + } + + public void testWhereMatchWithScoringDifferentSort() { + assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); + var query = """ + FROM test + METADATA _score + | WHERE match(content, "fox") + | KEEP id, _score + | SORT id DESC + """; + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id", "_score")); + assertColumnTypes(resp.columns(), List.of("integer", "double")); + assertValues(resp.values(), List.of(List.of(6, 0.9114001989364624), List.of(1, 1.156558871269226))); + } + } + + public void testWhereMatchWithScoringSortScore() { + assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); + var query = """ + FROM test + METADATA _score + | WHERE match(content, "fox") + | KEEP id, _score + | SORT _score DESC + """; + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id", "_score")); + assertColumnTypes(resp.columns(), List.of("integer", "double")); + assertValues(resp.values(), List.of(List.of(1, 1.156558871269226), List.of(6, 0.9114001989364624))); + } + } + + public void testWhereMatchWithScoringNoSort() { + assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); + var query = """ + FROM test + METADATA _score + | WHERE content:"fox" + | KEEP id, _score + """; + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id", "_score")); + assertColumnTypes(resp.columns(), List.of("integer", "double")); + assertValuesInAnyOrder(resp.values(), List.of(List.of(1, 1.156558871269226), List.of(6, 0.9114001989364624))); + } + } + + public void testNonExistingColumn() { + var query = """ + FROM test + | WHERE something:"fox" + """; + + var error = expectThrows(VerificationException.class, () -> run(query)); + assertThat(error.getMessage(), containsString("Unknown column [something]")); + } + + public void testWhereMatchEvalColumn() { + var query = """ + FROM test + | EVAL upper_content = to_upper(content) + | WHERE upper_content:"FOX" + | KEEP id + """; + + var error = expectThrows(VerificationException.class, () -> run(query)); + assertThat( + error.getMessage(), + containsString("[:] operator cannot operate on [upper_content], which is not a field from an index mapping") + ); + } + + public void testWhereMatchOverWrittenColumn() { + var query = """ + FROM test + | DROP content + | EVAL content = CONCAT("document with ID ", to_str(id)) + | WHERE content:"document" + """; + + var error = expectThrows(VerificationException.class, () -> run(query)); + assertThat( + error.getMessage(), + containsString("[:] operator cannot operate on [content], which is not a field from an index mapping") + ); + } + + public void testWhereMatchAfterStats() { + var query = """ + FROM test + | STATS count(*) + | WHERE content:"fox" + """; + + var error = expectThrows(VerificationException.class, () -> run(query)); + assertThat(error.getMessage(), containsString("Unknown column [content]")); + } + + public void testWhereMatchWithFunctions() { + var query = """ + FROM test + | WHERE content:"fox" OR to_upper(content) == "FOX" + """; + var error = expectThrows(ElasticsearchException.class, () -> run(query)); + assertThat( + error.getMessage(), + containsString( + "Invalid condition [content:\"fox\" OR to_upper(content) == \"FOX\"]. " + + "[:] operator can't be used as part of an or condition" + ) + ); + } + + public void testWhereMatchWithRow() { + var query = """ + ROW content = "a brown fox" + | WHERE content:"fox" + """; + + var error = expectThrows(ElasticsearchException.class, () -> run(query)); + assertThat( + error.getMessage(), + containsString("[:] operator cannot operate on [\"a brown fox\"], which is not a field from an index mapping") + ); + } + + public void testMatchWithinEval() { + var query = """ + FROM test + | EVAL matches_query = content:"fox" + """; + + var error = expectThrows(VerificationException.class, () -> run(query)); + assertThat(error.getMessage(), containsString("[:] operator is only supported in WHERE commands")); + } + + public void testMatchWithNonTextField() { + var query = """ + FROM test + | WHERE id:"fox" + """; + + var error = expectThrows(VerificationException.class, () -> run(query)); + assertThat(error.getMessage(), containsString("first argument of [id:\"fox\"] must be [string], found value [id] type [integer]")); + } + + private void createAndPopulateIndex() { + var indexName = "test"; + var client = client().admin().indices(); + var CreateRequest = client.prepareCreate(indexName) + .setSettings(Settings.builder().put("index.number_of_shards", 1)) + .setMapping("id", "type=integer", "content", "type=text"); + assertAcked(CreateRequest); + client().prepareBulk() + .add(new IndexRequest(indexName).id("1").source("id", 1, "content", "This is a brown fox")) + .add(new IndexRequest(indexName).id("2").source("id", 2, "content", "This is a brown dog")) + .add(new IndexRequest(indexName).id("3").source("id", 3, "content", "This dog is really brown")) + .add(new IndexRequest(indexName).id("4").source("id", 4, "content", "The dog is brown but this document is very very long")) + .add(new IndexRequest(indexName).id("5").source("id", 5, "content", "There is also a white cat")) + .add(new IndexRequest(indexName).id("6").source("id", 6, "content", "The quick brown fox jumps over the lazy dog")) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); + ensureYellow(indexName); + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java index 3b647583f1129..6a360eb319abb 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java @@ -14,6 +14,7 @@ import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.junit.Before; import java.util.List; @@ -105,6 +106,56 @@ public void testNotWhereMatch() { } } + public void testWhereMatchWithScoring() { + assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); + var query = """ + FROM test + METADATA _score + | WHERE content:"fox" + | KEEP id, _score + | SORT id ASC + """; + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id", "_score")); + assertColumnTypes(resp.columns(), List.of("integer", "double")); + assertValues(resp.values(), List.of(List.of(1, 1.156558871269226), List.of(6, 0.9114001989364624))); + } + } + + public void testWhereMatchWithScoringDifferentSort() { + assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); + var query = """ + FROM test + METADATA _score + | WHERE content:"fox" + | KEEP id, _score + | SORT id + """; + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id", "_score")); + assertColumnTypes(resp.columns(), List.of("integer", "double")); + assertValues(resp.values(), List.of(List.of(1, 1.156558871269226), List.of(6, 0.9114001989364624))); + } + } + + public void testWhereMatchWithScoringNoSort() { + assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); + var query = """ + FROM test + METADATA _score + | WHERE content:"fox" + | KEEP id, _score + """; + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id", "_score")); + assertColumnTypes(resp.columns(), List.of("integer", "double")); + assertValuesInAnyOrder(resp.values(), List.of(List.of(1, 1.156558871269226), List.of(6, 0.9114001989364624))); + } + } + public void testNonExistingColumn() { var query = """ FROM test diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java index 03af16d29e9b4..a3d1ac931528c 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.junit.Before; import java.util.List; @@ -137,4 +138,99 @@ private void createAndPopulateIndex() { .get(); ensureYellow(indexName); } + + public void testWhereQstrWithScoring() { + assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); + var query = """ + FROM test + METADATA _score + | WHERE qstr("content: fox") + | KEEP id, _score + """; + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id", "_score")); + assertColumnTypes(resp.columns(), List.of("integer", "double")); + assertValuesInAnyOrder( + resp.values(), + List.of( + List.of(2, 0.3028995096683502), + List.of(3, 0.3028995096683502), + List.of(4, 0.2547692656517029), + List.of(5, 0.28161853551864624) + ) + ); + + } + } + + public void testWhereQstrWithScoringSorted() { + assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); + var query = """ + FROM test + METADATA _score + | WHERE qstr("content:fox fox") + | KEEP id, _score + | SORT _score DESC + """; + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id", "_score")); + assertColumnTypes(resp.columns(), List.of("integer", "double")); + assertValues( + resp.values(), + List.of( + List.of(3, 1.5605685710906982), + List.of(2, 0.6057990193367004), + List.of(5, 0.5632370710372925), + List.of(4, 0.5095385313034058) + ) + ); + + } + } + + public void testWhereQstrWithScoringNoSort() { + assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); + var query = """ + FROM test + METADATA _score + | WHERE qstr("content: fox") + | KEEP id, _score + """; + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id", "_score")); + assertColumnTypes(resp.columns(), List.of("integer", "double")); + assertValuesInAnyOrder( + resp.values(), + List.of( + List.of(2, 0.3028995096683502), + List.of(3, 0.3028995096683502), + List.of(4, 0.2547692656517029), + List.of(5, 0.28161853551864624) + ) + ); + } + } + + public void testWhereQstrWithNonPushableAndScoring() { + assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); + var query = """ + FROM test + METADATA _score + | WHERE qstr("content: fox") + AND abs(id) > 0 + | EVAL c_score = ceil(_score) + | KEEP id, c_score + | SORT id DESC + | LIMIT 2 + """; + + try (var resp = run(query)) { + assertColumnNames(resp.columns(), List.of("id", "c_score")); + assertColumnTypes(resp.columns(), List.of("integer", "double")); + assertValuesInAnyOrder(resp.values(), List.of(List.of(5, 1.0), List.of(4, 1.0))); + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index d8004f73f613f..9bd4211855699 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -539,7 +539,12 @@ public enum Cap { /** * Fix for https://github.com/elastic/elasticsearch/issues/114714, again */ - FIX_STATS_BY_FOLDABLE_EXPRESSION_2,; + FIX_STATS_BY_FOLDABLE_EXPRESSION_2, + + /** + * Support the "METADATA _score" directive to enable _score column. + */ + METADATA_SCORE(Build.current().isSnapshot()); private final boolean enabled; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index 2be13398dab2f..5f8c011cff53a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; @@ -221,6 +222,7 @@ else if (p instanceof Lookup lookup) { checkFullTextQueryFunctions(p, failures); }); checkRemoteEnrich(plan, failures); + checkMetadataScoreNameReserved(plan, failures); if (failures.isEmpty()) { checkLicense(plan, licenseState, failures); @@ -234,6 +236,13 @@ else if (p instanceof Lookup lookup) { return failures; } + private static void checkMetadataScoreNameReserved(LogicalPlan p, Set failures) { + // _score can only be set as metadata attribute + if (p.inputSet().stream().anyMatch(a -> MetadataAttribute.SCORE.equals(a.name()) && (a instanceof MetadataAttribute) == false)) { + failures.add(fail(p, "`" + MetadataAttribute.SCORE + "` is a reserved METADATA attribute")); + } + } + private void checkSort(LogicalPlan p, Set failures) { if (p instanceof OrderBy ob) { ob.order().forEach(o -> { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/LucenePushdownPredicates.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/LucenePushdownPredicates.java index feb8717f007b7..8046d6bc56607 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/LucenePushdownPredicates.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/LucenePushdownPredicates.java @@ -9,6 +9,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.stats.SearchStats; @@ -59,6 +60,10 @@ default boolean isPushableFieldAttribute(Expression exp) { return false; } + default boolean isPushableMetadataAttribute(Expression exp) { + return exp instanceof MetadataAttribute ma && ma.name().equals(MetadataAttribute.SCORE); + } + /** * The default implementation of this has no access to SearchStats, so it can only make decisions based on the FieldAttribute itself. * In particular, it assumes TEXT fields have no exact subfields (underlying keyword field), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java index 925e144b69fcc..2b531257e594a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.AttributeMap; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.expression.Order; @@ -57,6 +58,7 @@ * */ public class PushTopNToSource extends PhysicalOptimizerRules.ParameterizedOptimizerRule { + @Override protected PhysicalPlan rule(TopNExec topNExec, LocalPhysicalOptimizerContext ctx) { Pushable pushable = evaluatePushable(topNExec, LucenePushdownPredicates.from(ctx.searchStats())); @@ -155,6 +157,8 @@ && canPushDownOrders(topNExec.order(), lucenePushdownPredicates)) { order.nullsPosition() ) ); + } else if (lucenePushdownPredicates.isPushableMetadataAttribute(order.child())) { + pushableSorts.add(new EsQueryExec.ScoreSort(order.direction())); } else if (order.child() instanceof ReferenceAttribute referenceAttribute) { Attribute resolvedAttribute = aliasReplacedBy.resolve(referenceAttribute, referenceAttribute); if (distances.containsKey(resolvedAttribute.id())) { @@ -192,13 +196,23 @@ && canPushDownOrders(topNExec.order(), lucenePushdownPredicates)) { private static boolean canPushDownOrders(List orders, LucenePushdownPredicates lucenePushdownPredicates) { // allow only exact FieldAttributes (no expressions) for sorting - return orders.stream().allMatch(o -> lucenePushdownPredicates.isPushableFieldAttribute(o.child())); + return orders.stream() + .allMatch( + o -> lucenePushdownPredicates.isPushableFieldAttribute(o.child()) + || lucenePushdownPredicates.isPushableMetadataAttribute(o.child()) + ); } private static List buildFieldSorts(List orders) { List sorts = new ArrayList<>(orders.size()); for (Order o : orders) { - sorts.add(new EsQueryExec.FieldSort(((FieldAttribute) o.child()).exactAttribute(), o.direction(), o.nullsPosition())); + if (o.child() instanceof FieldAttribute fa) { + sorts.add(new EsQueryExec.FieldSort(fa.exactAttribute(), o.direction(), o.nullsPosition())); + } else if (o.child() instanceof MetadataAttribute ma && MetadataAttribute.SCORE.equals(ma.name())) { + sorts.add(new EsQueryExec.ScoreSort(o.direction())); + } else { + assert false : "unexpected ordering on expression type " + o.child().getClass(); + } } return sorts; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/ReplaceSourceAttributes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/ReplaceSourceAttributes.java index 74ea6f99e5e59..11e386ddd046c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/ReplaceSourceAttributes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/ReplaceSourceAttributes.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; +import java.util.ArrayList; import java.util.List; import static org.elasticsearch.xpack.esql.optimizer.rules.logical.OptimizerRules.TransformDirection.UP; @@ -29,6 +30,8 @@ public ReplaceSourceAttributes() { @Override protected PhysicalPlan rule(EsSourceExec plan) { var docId = new FieldAttribute(plan.source(), EsQueryExec.DOC_ID_FIELD.getName(), EsQueryExec.DOC_ID_FIELD); + final List attributes = new ArrayList<>(); + attributes.add(docId); if (plan.indexMode() == IndexMode.TIME_SERIES) { Attribute tsid = null, timestamp = null; for (Attribute attr : plan.output()) { @@ -42,9 +45,14 @@ protected PhysicalPlan rule(EsSourceExec plan) { if (tsid == null || timestamp == null) { throw new IllegalStateException("_tsid or @timestamp are missing from the time-series source"); } - return new EsQueryExec(plan.source(), plan.index(), plan.indexMode(), List.of(docId, tsid, timestamp), plan.query()); - } else { - return new EsQueryExec(plan.source(), plan.index(), plan.indexMode(), List.of(docId), plan.query()); + attributes.add(tsid); + attributes.add(timestamp); } + plan.output().forEach(attr -> { + if (attr instanceof MetadataAttribute ma && ma.name().equals(MetadataAttribute.SCORE)) { + attributes.add(ma); + } + }); + return new EsQueryExec(plan.source(), plan.index(), plan.indexMode(), attributes, plan.query()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index 99e03b3653f79..24398afa18010 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -16,6 +16,7 @@ import org.elasticsearch.dissect.DissectParser; import org.elasticsearch.index.IndexMode; import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; @@ -276,7 +277,8 @@ public LogicalPlan visitFromCommand(EsqlBaseParser.FromCommandContext ctx) { for (var c : metadataOptionContext.UNQUOTED_SOURCE()) { String id = c.getText(); Source src = source(c); - if (MetadataAttribute.isSupported(id) == false) { + if (MetadataAttribute.isSupported(id) == false // TODO: drop check below once METADATA_SCORE is no longer snapshot-only + || (EsqlCapabilities.Cap.METADATA_SCORE.isEnabled() == false && MetadataAttribute.SCORE.equals(id))) { throw new ParsingException(src, "unsupported metadata field [" + id + "]"); } Attribute a = metadataMap.put(id, MetadataAttribute.create(src, id)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExec.java index 82848fb2f1062..267b9e613abef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExec.java @@ -15,6 +15,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.GeoDistanceSortBuilder; +import org.elasticsearch.search.sort.ScoreSortBuilder; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xpack.esql.core.expression.Attribute; @@ -94,6 +95,19 @@ public SortBuilder sortBuilder() { } } + public record ScoreSort(Order.OrderDirection direction) implements Sort { + @Override + public SortBuilder sortBuilder() { + return new ScoreSortBuilder(); + } + + @Override + public FieldAttribute field() { + // TODO: refactor this: not all Sorts are backed by FieldAttributes + return null; + } + } + public EsQueryExec(Source source, EsIndex index, IndexMode indexMode, List attributes, QueryBuilder query) { this(source, index, indexMode, attributes, query, null, null, null); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java index ab0d68b152262..15f5b6579098d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java @@ -51,6 +51,7 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; @@ -165,7 +166,10 @@ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, assert esQueryExec.estimatedRowSize() != null : "estimated row size not initialized"; int rowEstimatedSize = esQueryExec.estimatedRowSize(); int limit = esQueryExec.limit() != null ? (Integer) esQueryExec.limit().fold() : NO_LIMIT; - if (sorts != null && sorts.isEmpty() == false) { + boolean scoring = esQueryExec.attrs() + .stream() + .anyMatch(a -> a instanceof MetadataAttribute && a.name().equals(MetadataAttribute.SCORE)); + if ((sorts != null && sorts.isEmpty() == false)) { List> sortBuilders = new ArrayList<>(sorts.size()); for (Sort sort : sorts) { sortBuilders.add(sort.sortBuilder()); @@ -177,7 +181,8 @@ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, context.queryPragmas().taskConcurrency(), context.pageSize(rowEstimatedSize), limit, - sortBuilders + sortBuilders, + scoring ); } else { if (esQueryExec.indexMode() == IndexMode.TIME_SERIES) { @@ -195,7 +200,8 @@ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, context.queryPragmas().dataPartitioning(), context.queryPragmas().taskConcurrency(), context.pageSize(rowEstimatedSize), - limit + limit, + scoring ); } } @@ -273,7 +279,7 @@ public IndexSearcher searcher() { @Override public Optional buildSort(List> sorts) throws IOException { - return SortBuilder.buildSort(sorts, ctx); + return SortBuilder.buildSort(sorts, ctx, false); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 355073fcc873f..6074601535477 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; +import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.type.InvalidMappedField; @@ -21,6 +22,7 @@ import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.parser.QueryParam; import org.elasticsearch.xpack.esql.parser.QueryParams; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import java.util.ArrayList; import java.util.LinkedHashMap; @@ -1754,6 +1756,29 @@ public void testToDatePeriodToTimeDurationWithInvalidType() { ); } + public void testNonMetadataScore() { + assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); + assertEquals("1:12: `_score` is a reserved METADATA attribute", error("from foo | eval _score = 10")); + + assertEquals( + "1:48: `_score` is a reserved METADATA attribute", + error("from foo metadata _score | where qstr(\"bar\") | eval _score = _score + 1") + ); + } + + public void testScoreRenaming() { + assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); + assertEquals("1:33: `_score` is a reserved METADATA attribute", error("from foo METADATA _id, _score | rename _id as _score")); + + assertTrue(passes("from foo metadata _score | rename _score as foo").stream().anyMatch(a -> a.name().equals("foo"))); + } + + private List passes(String query) { + LogicalPlan logicalPlan = defaultAnalyzer.analyze(parser.createStatement(query)); + assertTrue(logicalPlan.resolved()); + return logicalPlan.output(); + } + public void testIntervalAsString() { // DateTrunc for (String interval : List.of("1 minu", "1 dy", "1.5 minutes", "0.5 days", "minutes 1", "day 5")) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index f3ba11457a715..1f131f79c3d0e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.xpack.esql.EsqlTestUtils.TestConfigurableSearchStats; import org.elasticsearch.xpack.esql.EsqlTestUtils.TestConfigurableSearchStats.Config; import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; @@ -63,6 +64,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialAggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialContains; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialDisjoint; @@ -6581,6 +6583,66 @@ public void testLookupThenTopN() { ); } + public void testScore() { + assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); + var plan = physicalPlan(""" + from test metadata _score + | where match(first_name, "john") + | keep _score + """); + + ProjectExec outerProject = as(plan, ProjectExec.class); + LimitExec limitExec = as(outerProject.child(), LimitExec.class); + ExchangeExec exchange = as(limitExec.child(), ExchangeExec.class); + FragmentExec frag = as(exchange.child(), FragmentExec.class); + + LogicalPlan opt = logicalOptimizer.optimize(frag.fragment()); + Limit limit = as(opt, Limit.class); + Filter filter = as(limit.child(), Filter.class); + + Match match = as(filter.condition(), Match.class); + assertTrue(match.field() instanceof FieldAttribute); + assertEquals("first_name", ((FieldAttribute) match.field()).field().getName()); + + EsRelation esRelation = as(filter.child(), EsRelation.class); + assertTrue(esRelation.optimized()); + assertTrue(esRelation.resolved()); + assertTrue(esRelation.output().stream().anyMatch(a -> a.name().equals(MetadataAttribute.SCORE) && a instanceof MetadataAttribute)); + } + + public void testScoreTopN() { + assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); + var plan = physicalPlan(""" + from test metadata _score + | where match(first_name, "john") + | keep _score + | sort _score desc + """); + + ProjectExec projectExec = as(plan, ProjectExec.class); + TopNExec topNExec = as(projectExec.child(), TopNExec.class); + ExchangeExec exchange = as(topNExec.child(), ExchangeExec.class); + FragmentExec frag = as(exchange.child(), FragmentExec.class); + + LogicalPlan opt = logicalOptimizer.optimize(frag.fragment()); + TopN topN = as(opt, TopN.class); + List order = topN.order(); + Order scoreOrer = order.getFirst(); + assertEquals(Order.OrderDirection.DESC, scoreOrer.direction()); + Expression child = scoreOrer.child(); + assertTrue(child instanceof MetadataAttribute ma && ma.name().equals(MetadataAttribute.SCORE)); + Filter filter = as(topN.child(), Filter.class); + + Match match = as(filter.condition(), Match.class); + assertTrue(match.field() instanceof FieldAttribute); + assertEquals("first_name", ((FieldAttribute) match.field()).field().getName()); + + EsRelation esRelation = as(filter.child(), EsRelation.class); + assertTrue(esRelation.optimized()); + assertTrue(esRelation.resolved()); + assertTrue(esRelation.output().stream().anyMatch(a -> a.name().equals(MetadataAttribute.SCORE) && a instanceof MetadataAttribute)); + } + @SuppressWarnings("SameParameterValue") private static void assertFilterCondition( Filter filter, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSourceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSourceTests.java index 98f0af8e4b8e6..2429bcb1a1b04 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSourceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSourceTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -64,6 +65,13 @@ public void testSimpleSortField() { assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); } + public void testSimpleScoreSortField() { + // FROM index METADATA _score | SORT _score | LIMIT 10 + var query = from("index").metadata("_score", DOUBLE, false).scoreSort().limit(10); + assertPushdownSort(query); + assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); + } + public void testSimpleSortMultipleFields() { // FROM index | SORT field, integer, double | LIMIT 10 var query = from("index").sort("field").sort("integer").sort("double").limit(10); @@ -71,6 +79,13 @@ public void testSimpleSortMultipleFields() { assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); } + public void testSimpleSortMultipleFieldsAndScore() { + // FROM index | SORT field, integer, double, _score | LIMIT 10 + var query = from("index").metadata("_score", DOUBLE, false).sort("field").sort("integer").sort("double").scoreSort().limit(10); + assertPushdownSort(query); + assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); + } + public void testSimpleSortFieldAndEvalLiteral() { // FROM index | EVAL x = 1 | SORT field | LIMIT 10 var query = from("index").eval("x", e -> e.i(1)).sort("field").limit(10); @@ -78,6 +93,13 @@ public void testSimpleSortFieldAndEvalLiteral() { assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); } + public void testSimpleSortFieldScoreAndEvalLiteral() { + // FROM index METADATA _score | EVAL x = 1 | SORT field, _score | LIMIT 10 + var query = from("index").metadata("_score", DOUBLE, false).eval("x", e -> e.i(1)).sort("field").scoreSort().limit(10); + assertPushdownSort(query, List.of(EvalExec.class, EsQueryExec.class)); + assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); + } + public void testSimpleSortFieldWithAlias() { // FROM index | EVAL x = field | SORT field | LIMIT 10 var query = from("index").eval("x", b -> b.field("field")).sort("field").limit(10); @@ -98,6 +120,21 @@ public void testSimpleSortMultipleFieldsWithAliases() { assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); } + public void testSimpleSortMultipleFieldsWithAliasesAndScore() { + // FROM index | EVAL x = field, y = integer, z = double | SORT field, integer, double, _score | LIMIT 10 + var query = from("index").metadata("_score", DOUBLE, false) + .eval("x", b -> b.field("field")) + .eval("y", b -> b.field("integer")) + .eval("z", b -> b.field("double")) + .sort("field") + .sort("integer") + .sort("double") + .scoreSort() + .limit(10); + assertPushdownSort(query, List.of(EvalExec.class, EsQueryExec.class)); + assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); + } + public void testSimpleSortFieldAsAlias() { // FROM index | EVAL x = field | SORT x | LIMIT 10 var query = from("index").eval("x", b -> b.field("field")).sort("x").limit(10); @@ -105,6 +142,13 @@ public void testSimpleSortFieldAsAlias() { assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); } + public void testSimpleSortFieldAsAliasAndScore() { + // FROM index METADATA _score | EVAL x = field | SORT x, _score | LIMIT 10 + var query = from("index").metadata("_score", DOUBLE, false).eval("x", b -> b.field("field")).sort("x").scoreSort().limit(10); + assertPushdownSort(query, Map.of("x", "field"), List.of(EvalExec.class, EsQueryExec.class)); + assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); + } + public void testSimpleSortFieldAndEvalSumLiterals() { // FROM index | EVAL sum = 1 + 2 | SORT field | LIMIT 10 var query = from("index").eval("sum", b -> b.add(b.i(1), b.i(2))).sort("field").limit(10); @@ -112,6 +156,17 @@ public void testSimpleSortFieldAndEvalSumLiterals() { assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); } + public void testSimpleSortFieldAndEvalSumLiteralsAndScore() { + // FROM index METADATA _score | EVAL sum = 1 + 2 | SORT field, _score | LIMIT 10 + var query = from("index").metadata("_score", DOUBLE, false) + .eval("sum", b -> b.add(b.i(1), b.i(2))) + .sort("field") + .scoreSort() + .limit(10); + assertPushdownSort(query, List.of(EvalExec.class, EsQueryExec.class)); + assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); + } + public void testSimpleSortFieldAndEvalSumLiteralAndField() { // FROM index | EVAL sum = 1 + integer | SORT integer | LIMIT 10 var query = from("index").eval("sum", b -> b.add(b.i(1), b.field("integer"))).sort("integer").limit(10); @@ -119,6 +174,17 @@ public void testSimpleSortFieldAndEvalSumLiteralAndField() { assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); } + public void testSimpleSortFieldAndEvalSumLiteralAndFieldAndScore() { + // FROM index METADATA _score | EVAL sum = 1 + integer | SORT integer, _score | LIMIT 10 + var query = from("index").metadata("_score", DOUBLE, false) + .eval("sum", b -> b.add(b.i(1), b.field("integer"))) + .sort("integer") + .scoreSort() + .limit(10); + assertPushdownSort(query, List.of(EvalExec.class, EsQueryExec.class)); + assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); + } + public void testSimpleSortEvalSumLiteralAndField() { // FROM index | EVAL sum = 1 + integer | SORT sum | LIMIT 10 var query = from("index").eval("sum", b -> b.add(b.i(1), b.field("integer"))).sort("sum").limit(10); @@ -144,6 +210,14 @@ public void testSortGeoPointField() { assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); } + public void testSortGeoPointFieldAnsScore() { + // FROM index METADATA _score | SORT location, _score | LIMIT 10 + var query = from("index").metadata("_score", DOUBLE, false).sort("location", Order.OrderDirection.ASC).scoreSort().limit(10); + // NOTE: while geo_point is not sortable, this is checked during logical planning and the physical planner does not know or care + assertPushdownSort(query); + assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); + } + public void testSortGeoDistanceFunction() { // FROM index | EVAL distance = ST_DISTANCE(location, POINT(1 2)) | SORT distance | LIMIT 10 var query = from("index").eval("distance", b -> b.distance("location", "POINT(1 2)")) @@ -154,6 +228,18 @@ public void testSortGeoDistanceFunction() { assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); } + public void testSortGeoDistanceFunctionAndScore() { + // FROM index METADATA _score | EVAL distance = ST_DISTANCE(location, POINT(1 2)) | SORT distance, _score | LIMIT 10 + var query = from("index").metadata("_score", DOUBLE, false) + .eval("distance", b -> b.distance("location", "POINT(1 2)")) + .sort("distance", Order.OrderDirection.ASC) + .scoreSort() + .limit(10); + // The pushed-down sort will use the underlying field 'location', not the sorted reference field 'distance' + assertPushdownSort(query, Map.of("distance", "location"), List.of(EvalExec.class, EsQueryExec.class)); + assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); + } + public void testSortGeoDistanceFunctionInverted() { // FROM index | EVAL distance = ST_DISTANCE(POINT(1 2), location) | SORT distance | LIMIT 10 var query = from("index").eval("distance", b -> b.distance("POINT(1 2)", "location")) @@ -164,6 +250,18 @@ public void testSortGeoDistanceFunctionInverted() { assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); } + public void testSortGeoDistanceFunctionInvertedAndScore() { + // FROM index METADATA _score | EVAL distance = ST_DISTANCE(POINT(1 2), location) | SORT distance, _score | LIMIT 10 + var query = from("index").metadata("_score", DOUBLE, false) + .eval("distance", b -> b.distance("POINT(1 2)", "location")) + .sort("distance", Order.OrderDirection.ASC) + .scoreSort() + .limit(10); + // The pushed-down sort will use the underlying field 'location', not the sorted reference field 'distance' + assertPushdownSort(query, Map.of("distance", "location"), List.of(EvalExec.class, EsQueryExec.class)); + assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); + } + public void testSortGeoDistanceFunctionLiterals() { // FROM index | EVAL distance = ST_DISTANCE(POINT(2 1), POINT(1 2)) | SORT distance | LIMIT 10 var query = from("index").eval("distance", b -> b.distance("POINT(2 1)", "POINT(1 2)")) @@ -174,6 +272,18 @@ public void testSortGeoDistanceFunctionLiterals() { assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); } + public void testSortGeoDistanceFunctionLiteralsAndScore() { + // FROM index METADATA _score | EVAL distance = ST_DISTANCE(POINT(2 1), POINT(1 2)) | SORT distance, _score | LIMIT 10 + var query = from("index").metadata("_score", DOUBLE, false) + .eval("distance", b -> b.distance("POINT(2 1)", "POINT(1 2)")) + .sort("distance", Order.OrderDirection.ASC) + .scoreSort() + .limit(10); + // The pushed-down sort will use the underlying field 'location', not the sorted reference field 'distance' + assertNoPushdownSort(query, "sort on foldable distance function"); + assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); + } + public void testSortGeoDistanceFunctionAndFieldsWithAliases() { // FROM index | EVAL distance = ST_DISTANCE(location, POINT(1 2)), x = field | SORT distance, field, integer | LIMIT 10 var query = from("index").eval("distance", b -> b.distance("location", "POINT(1 2)")) @@ -187,6 +297,21 @@ public void testSortGeoDistanceFunctionAndFieldsWithAliases() { assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); } + public void testSortGeoDistanceFunctionAndFieldsWithAliasesAndScore() { + // FROM index | EVAL distance = ST_DISTANCE(location, POINT(1 2)), x = field | SORT distance, field, integer, _score | LIMIT 10 + var query = from("index").metadata("_score", DOUBLE, false) + .eval("distance", b -> b.distance("location", "POINT(1 2)")) + .eval("x", b -> b.field("field")) + .sort("distance", Order.OrderDirection.ASC) + .sort("field", Order.OrderDirection.DESC) + .sort("integer", Order.OrderDirection.DESC) + .scoreSort() + .limit(10); + // The pushed-down sort will use the underlying field 'location', not the sorted reference field 'distance' + assertPushdownSort(query, query.orders, Map.of("distance", "location"), List.of(EvalExec.class, EsQueryExec.class)); + assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); + } + public void testSortGeoDistanceFunctionAndFieldsAndAliases() { // FROM index | EVAL distance = ST_DISTANCE(location, POINT(1 2)), x = field | SORT distance, x, integer | LIMIT 10 var query = from("index").eval("distance", b -> b.distance("location", "POINT(1 2)")) @@ -200,6 +325,21 @@ public void testSortGeoDistanceFunctionAndFieldsAndAliases() { assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); } + public void testSortGeoDistanceFunctionAndFieldsAndAliasesAndScore() { + // FROM index | EVAL distance = ST_DISTANCE(location, POINT(1 2)), x = field | SORT distance, x, integer, _score | LIMIT 10 + var query = from("index").metadata("_score", DOUBLE, false) + .eval("distance", b -> b.distance("location", "POINT(1 2)")) + .eval("x", b -> b.field("field")) + .sort("distance", Order.OrderDirection.ASC) + .sort("x", Order.OrderDirection.DESC) + .sort("integer", Order.OrderDirection.DESC) + .scoreSort() + .limit(10); + // The pushed-down sort will use the underlying field 'location', not the sorted reference field 'distance' + assertPushdownSort(query, query.orders, Map.of("distance", "location", "x", "field"), List.of(EvalExec.class, EsQueryExec.class)); + assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); + } + public void testSortGeoDistanceFunctionAndFieldsAndManyAliases() { // FROM index // | EVAL loc = location, loc2 = loc, loc3 = loc2, distance = ST_DISTANCE(loc3, POINT(1 2)), x = field @@ -219,6 +359,27 @@ public void testSortGeoDistanceFunctionAndFieldsAndManyAliases() { assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); } + public void testSortGeoDistanceFunctionAndFieldsAndManyAliasesAndScore() { + // FROM index METADATA _score + // | EVAL loc = location, loc2 = loc, loc3 = loc2, distance = ST_DISTANCE(loc3, POINT(1 2)), x = field + // | SORT distance, x, integer, _score + // | LIMIT 10 + var query = from("index").metadata("_score", DOUBLE, false) + .eval("loc", b -> b.field("location")) + .eval("loc2", b -> b.ref("loc")) + .eval("loc3", b -> b.ref("loc2")) + .eval("distance", b -> b.distance("loc3", "POINT(1 2)")) + .eval("x", b -> b.field("field")) + .sort("distance", Order.OrderDirection.ASC) + .sort("x", Order.OrderDirection.DESC) + .sort("integer", Order.OrderDirection.DESC) + .scoreSort() + .limit(10); + // The pushed-down sort will use the underlying field 'location', not the sorted reference field 'distance' + assertPushdownSort(query, Map.of("distance", "location", "x", "field"), List.of(EvalExec.class, EsQueryExec.class)); + assertNoPushdownSort(query.asTimeSeries(), "for time series index mode"); + } + private static void assertPushdownSort(TestPhysicalPlanBuilder builder) { assertPushdownSort(builder, null, List.of(EsQueryExec.class)); } @@ -289,9 +450,12 @@ private static void assertPushdownSort( assertThat("Expect sorts count to match", sorts.size(), is(expectedSorts.size())); for (int i = 0; i < expectedSorts.size(); i++) { String name = ((Attribute) expectedSorts.get(i).child()).name(); - String fieldName = sorts.get(i).field().fieldName(); - assertThat("Expect sort[" + i + "] name to match", fieldName, is(sortName(name, fieldMap))); - assertThat("Expect sort[" + i + "] direction to match", sorts.get(i).direction(), is(expectedSorts.get(i).direction())); + EsQueryExec.Sort sort = sorts.get(i); + if (sort.field() != null) { + String fieldName = sort.field().fieldName(); + assertThat("Expect sort[" + i + "] name to match", fieldName, is(sortName(name, fieldMap))); + } + assertThat("Expect sort[" + i + "] direction to match", sort.direction(), is(expectedSorts.get(i).direction())); } } @@ -317,6 +481,7 @@ static class TestPhysicalPlanBuilder { private final String index; private final LinkedHashMap fields; private final LinkedHashMap refs; + private final LinkedHashMap metadata; private IndexMode indexMode; private final List aliases = new ArrayList<>(); private final List orders = new ArrayList<>(); @@ -327,6 +492,7 @@ private TestPhysicalPlanBuilder(String index, IndexMode indexMode) { this.indexMode = indexMode; this.fields = new LinkedHashMap<>(); this.refs = new LinkedHashMap<>(); + this.metadata = new LinkedHashMap<>(); addSortableFieldAttributes(this.fields); } @@ -346,6 +512,11 @@ static TestPhysicalPlanBuilder from(String index) { return new TestPhysicalPlanBuilder(index, IndexMode.STANDARD); } + TestPhysicalPlanBuilder metadata(String metadataAttribute, DataType dataType, boolean searchable) { + metadata.put(metadataAttribute, new MetadataAttribute(Source.EMPTY, metadataAttribute, dataType, searchable)); + return this; + } + public TestPhysicalPlanBuilder eval(Alias... aliases) { if (orders.isEmpty() == false) { throw new IllegalArgumentException("Eval must be before sort"); @@ -376,6 +547,22 @@ public TestPhysicalPlanBuilder sort(String field) { return sort(field, Order.OrderDirection.ASC); } + public TestPhysicalPlanBuilder scoreSort(Order.OrderDirection direction) { + orders.add( + new Order( + Source.EMPTY, + MetadataAttribute.create(Source.EMPTY, MetadataAttribute.SCORE), + direction, + Order.NullsPosition.LAST + ) + ); + return this; + } + + public TestPhysicalPlanBuilder scoreSort() { + return scoreSort(Order.OrderDirection.DESC); + } + public TestPhysicalPlanBuilder sort(String field, Order.OrderDirection direction) { Attribute attr = refs.get(field); if (attr == null) { From 6b94a91633fc846fe02ac8cf3173d613af27bc01 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Iv=C3=A1n=20Cea=20Fontenla?= Date: Thu, 28 Nov 2024 16:07:07 +0100 Subject: [PATCH 303/386] ESQL: Add nulls support to Categorize (#117655) Handle nulls and empty strings (Which resolve to null) on Categorize grouping function. Also, implement `seenGroupIds()`, which would fail some queries with nulls otherwise. --- docs/changelog/117655.yaml | 5 + .../AbstractCategorizeBlockHash.java | 37 +++++- .../blockhash/CategorizeRawBlockHash.java | 12 +- .../CategorizedIntermediateBlockHash.java | 19 ++- .../blockhash/CategorizeBlockHashTests.java | 72 +++++++---- .../src/main/resources/categorize.csv-spec | 122 ++++++++++-------- .../xpack/esql/action/EsqlCapabilities.java | 5 +- .../xpack/esql/analysis/VerifierTests.java | 6 +- .../optimizer/LogicalPlanOptimizerTests.java | 4 +- .../categorization/TokenListCategorizer.java | 2 + 10 files changed, 186 insertions(+), 98 deletions(-) create mode 100644 docs/changelog/117655.yaml diff --git a/docs/changelog/117655.yaml b/docs/changelog/117655.yaml new file mode 100644 index 0000000000000..f2afd3570f104 --- /dev/null +++ b/docs/changelog/117655.yaml @@ -0,0 +1,5 @@ +pr: 117655 +summary: Add nulls support to Categorize +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/AbstractCategorizeBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/AbstractCategorizeBlockHash.java index 22d3a10facb06..0e89d77820883 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/AbstractCategorizeBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/AbstractCategorizeBlockHash.java @@ -13,8 +13,10 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.common.util.BytesRefHash; +import org.elasticsearch.compute.aggregation.SeenGroupIds; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -31,11 +33,21 @@ * Base BlockHash implementation for {@code Categorize} grouping function. */ public abstract class AbstractCategorizeBlockHash extends BlockHash { + protected static final int NULL_ORD = 0; + // TODO: this should probably also take an emitBatchSize private final int channel; private final boolean outputPartial; protected final TokenListCategorizer.CloseableTokenListCategorizer categorizer; + /** + * Store whether we've seen any {@code null} values. + *

    + * Null gets the {@link #NULL_ORD} ord. + *

    + */ + protected boolean seenNull = false; + AbstractCategorizeBlockHash(BlockFactory blockFactory, int channel, boolean outputPartial) { super(blockFactory); this.channel = channel; @@ -58,12 +70,12 @@ public Block[] getKeys() { @Override public IntVector nonEmpty() { - return IntVector.range(0, categorizer.getCategoryCount(), blockFactory); + return IntVector.range(seenNull ? 0 : 1, categorizer.getCategoryCount() + 1, blockFactory); } @Override public BitArray seenGroupIds(BigArrays bigArrays) { - throw new UnsupportedOperationException(); + return new SeenGroupIds.Range(seenNull ? 0 : 1, Math.toIntExact(categorizer.getCategoryCount() + 1)).seenGroupIds(bigArrays); } @Override @@ -76,24 +88,39 @@ public final ReleasableIterator lookup(Page page, ByteSizeValue target */ private Block buildIntermediateBlock() { if (categorizer.getCategoryCount() == 0) { - return blockFactory.newConstantNullBlock(0); + return blockFactory.newConstantNullBlock(seenNull ? 1 : 0); } try (BytesStreamOutput out = new BytesStreamOutput()) { // TODO be more careful here. + out.writeBoolean(seenNull); out.writeVInt(categorizer.getCategoryCount()); for (SerializableTokenListCategory category : categorizer.toCategoriesById()) { category.writeTo(out); } // We're returning a block with N positions just because the Page must have all blocks with the same position count! - return blockFactory.newConstantBytesRefBlockWith(out.bytes().toBytesRef(), categorizer.getCategoryCount()); + int positionCount = categorizer.getCategoryCount() + (seenNull ? 1 : 0); + return blockFactory.newConstantBytesRefBlockWith(out.bytes().toBytesRef(), positionCount); } catch (IOException e) { throw new RuntimeException(e); } } private Block buildFinalBlock() { + BytesRefBuilder scratch = new BytesRefBuilder(); + + if (seenNull) { + try (BytesRefBlock.Builder result = blockFactory.newBytesRefBlockBuilder(categorizer.getCategoryCount())) { + result.appendNull(); + for (SerializableTokenListCategory category : categorizer.toCategoriesById()) { + scratch.copyChars(category.getRegex()); + result.appendBytesRef(scratch.get()); + scratch.clear(); + } + return result.build(); + } + } + try (BytesRefVector.Builder result = blockFactory.newBytesRefVectorBuilder(categorizer.getCategoryCount())) { - BytesRefBuilder scratch = new BytesRefBuilder(); for (SerializableTokenListCategory category : categorizer.toCategoriesById()) { scratch.copyChars(category.getRegex()); result.appendBytesRef(scratch.get()); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeRawBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeRawBlockHash.java index bf633e0454384..0d0a2fef2f82b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeRawBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeRawBlockHash.java @@ -64,7 +64,7 @@ public void close() { /** * Similar implementation to an Evaluator. */ - public static final class CategorizeEvaluator implements Releasable { + public final class CategorizeEvaluator implements Releasable { private final CategorizationAnalyzer analyzer; private final TokenListCategorizer.CloseableTokenListCategorizer categorizer; @@ -95,7 +95,8 @@ public IntBlock eval(int positionCount, BytesRefBlock vBlock) { BytesRef vScratch = new BytesRef(); for (int p = 0; p < positionCount; p++) { if (vBlock.isNull(p)) { - result.appendNull(); + seenNull = true; + result.appendInt(NULL_ORD); continue; } int first = vBlock.getFirstValueIndex(p); @@ -126,7 +127,12 @@ public IntVector eval(int positionCount, BytesRefVector vVector) { } private int process(BytesRef v) { - return categorizer.computeCategory(v.utf8ToString(), analyzer).getId(); + var category = categorizer.computeCategory(v.utf8ToString(), analyzer); + if (category == null) { + seenNull = true; + return NULL_ORD; + } + return category.getId() + 1; } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizedIntermediateBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizedIntermediateBlockHash.java index 1bca34a70e5fa..c774d3b26049d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizedIntermediateBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizedIntermediateBlockHash.java @@ -40,9 +40,19 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { return; } BytesRefBlock categorizerState = page.getBlock(channel()); + if (categorizerState.areAllValuesNull()) { + seenNull = true; + try (var newIds = blockFactory.newConstantIntVector(NULL_ORD, 1)) { + addInput.add(0, newIds); + } + return; + } + Map idMap = readIntermediate(categorizerState.getBytesRef(0, new BytesRef())); try (IntBlock.Builder newIdsBuilder = blockFactory.newIntBlockBuilder(idMap.size())) { - for (int i = 0; i < idMap.size(); i++) { + int fromId = idMap.containsKey(0) ? 0 : 1; + int toId = fromId + idMap.size(); + for (int i = fromId; i < toId; i++) { newIdsBuilder.appendInt(idMap.get(i)); } try (IntBlock newIds = newIdsBuilder.build()) { @@ -59,10 +69,15 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { private Map readIntermediate(BytesRef bytes) { Map idMap = new HashMap<>(); try (StreamInput in = new BytesArray(bytes).streamInput()) { + if (in.readBoolean()) { + seenNull = true; + idMap.put(NULL_ORD, NULL_ORD); + } int count = in.readVInt(); for (int oldCategoryId = 0; oldCategoryId < count; oldCategoryId++) { int newCategoryId = categorizer.mergeWireCategory(new SerializableTokenListCategory(in)).getId(); - idMap.put(oldCategoryId, newCategoryId); + // +1 because the 0 ordinal is reserved for null + idMap.put(oldCategoryId + 1, newCategoryId + 1); } return idMap; } catch (IOException e) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java index de8a2a44266fe..dd7a87dc4a574 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java @@ -52,7 +52,8 @@ public class CategorizeBlockHashTests extends BlockHashTestCase { public void testCategorizeRaw() { final Page page; - final int positions = 7; + boolean withNull = randomBoolean(); + final int positions = 7 + (withNull ? 1 : 0); try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(positions)) { builder.appendBytesRef(new BytesRef("Connected to 10.1.0.1")); builder.appendBytesRef(new BytesRef("Connection error")); @@ -61,6 +62,13 @@ public void testCategorizeRaw() { builder.appendBytesRef(new BytesRef("Disconnected")); builder.appendBytesRef(new BytesRef("Connected to 10.1.0.2")); builder.appendBytesRef(new BytesRef("Connected to 10.1.0.3")); + if (withNull) { + if (randomBoolean()) { + builder.appendNull(); + } else { + builder.appendBytesRef(new BytesRef("")); + } + } page = new Page(builder.build()); } @@ -70,13 +78,16 @@ public void testCategorizeRaw() { public void add(int positionOffset, IntBlock groupIds) { assertEquals(groupIds.getPositionCount(), positions); - assertEquals(0, groupIds.getInt(0)); - assertEquals(1, groupIds.getInt(1)); - assertEquals(1, groupIds.getInt(2)); - assertEquals(1, groupIds.getInt(3)); - assertEquals(2, groupIds.getInt(4)); - assertEquals(0, groupIds.getInt(5)); - assertEquals(0, groupIds.getInt(6)); + assertEquals(1, groupIds.getInt(0)); + assertEquals(2, groupIds.getInt(1)); + assertEquals(2, groupIds.getInt(2)); + assertEquals(2, groupIds.getInt(3)); + assertEquals(3, groupIds.getInt(4)); + assertEquals(1, groupIds.getInt(5)); + assertEquals(1, groupIds.getInt(6)); + if (withNull) { + assertEquals(0, groupIds.getInt(7)); + } } @Override @@ -100,7 +111,8 @@ public void close() { public void testCategorizeIntermediate() { Page page1; - int positions1 = 7; + boolean withNull = randomBoolean(); + int positions1 = 7 + (withNull ? 1 : 0); try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(positions1)) { builder.appendBytesRef(new BytesRef("Connected to 10.1.0.1")); builder.appendBytesRef(new BytesRef("Connection error")); @@ -109,6 +121,13 @@ public void testCategorizeIntermediate() { builder.appendBytesRef(new BytesRef("Connection error")); builder.appendBytesRef(new BytesRef("Connected to 10.1.0.3")); builder.appendBytesRef(new BytesRef("Connected to 10.1.0.4")); + if (withNull) { + if (randomBoolean()) { + builder.appendNull(); + } else { + builder.appendBytesRef(new BytesRef("")); + } + } page1 = new Page(builder.build()); } Page page2; @@ -133,13 +152,16 @@ public void testCategorizeIntermediate() { @Override public void add(int positionOffset, IntBlock groupIds) { assertEquals(groupIds.getPositionCount(), positions1); - assertEquals(0, groupIds.getInt(0)); - assertEquals(1, groupIds.getInt(1)); - assertEquals(1, groupIds.getInt(2)); - assertEquals(0, groupIds.getInt(3)); - assertEquals(1, groupIds.getInt(4)); - assertEquals(0, groupIds.getInt(5)); - assertEquals(0, groupIds.getInt(6)); + assertEquals(1, groupIds.getInt(0)); + assertEquals(2, groupIds.getInt(1)); + assertEquals(2, groupIds.getInt(2)); + assertEquals(1, groupIds.getInt(3)); + assertEquals(2, groupIds.getInt(4)); + assertEquals(1, groupIds.getInt(5)); + assertEquals(1, groupIds.getInt(6)); + if (withNull) { + assertEquals(0, groupIds.getInt(7)); + } } @Override @@ -158,11 +180,11 @@ public void close() { @Override public void add(int positionOffset, IntBlock groupIds) { assertEquals(groupIds.getPositionCount(), positions2); - assertEquals(0, groupIds.getInt(0)); - assertEquals(1, groupIds.getInt(1)); - assertEquals(0, groupIds.getInt(2)); - assertEquals(1, groupIds.getInt(3)); - assertEquals(2, groupIds.getInt(4)); + assertEquals(1, groupIds.getInt(0)); + assertEquals(2, groupIds.getInt(1)); + assertEquals(1, groupIds.getInt(2)); + assertEquals(2, groupIds.getInt(3)); + assertEquals(3, groupIds.getInt(4)); } @Override @@ -189,7 +211,11 @@ public void add(int positionOffset, IntBlock groupIds) { .map(groupIds::getInt) .boxed() .collect(Collectors.toSet()); - assertEquals(values, Set.of(0, 1)); + if (withNull) { + assertEquals(Set.of(0, 1, 2), values); + } else { + assertEquals(Set.of(1, 2), values); + } } @Override @@ -212,7 +238,7 @@ public void add(int positionOffset, IntBlock groupIds) { .collect(Collectors.toSet()); // The category IDs {0, 1, 2} should map to groups {0, 2, 3}, because // 0 matches an existing category (Connected to ...), and the others are new. - assertEquals(values, Set.of(0, 2, 3)); + assertEquals(Set.of(1, 3, 4), values); } @Override diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec index 89d9026423204..547c430ed7518 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec @@ -1,5 +1,5 @@ standard aggs -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | STATS count=COUNT(), @@ -17,7 +17,7 @@ count:long | sum:long | avg:double | count_distinct:long | category:keyw ; values aggs -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | STATS values=MV_SORT(VALUES(message)), @@ -33,7 +33,7 @@ values:keyword | top ; mv -required_capability: categorize_v2 +required_capability: categorize_v3 FROM mv_sample_data | STATS COUNT(), SUM(event_duration) BY category=CATEGORIZE(message) @@ -48,7 +48,7 @@ COUNT():long | SUM(event_duration):long | category:keyword ; row mv -required_capability: categorize_v2 +required_capability: categorize_v3 ROW message = ["connected to a", "connected to b", "disconnected"], str = ["a", "b", "c"] | STATS COUNT(), VALUES(str) BY category=CATEGORIZE(message) @@ -61,7 +61,7 @@ COUNT():long | VALUES(str):keyword | category:keyword ; with multiple indices -required_capability: categorize_v2 +required_capability: categorize_v3 required_capability: union_types FROM sample_data* @@ -76,7 +76,7 @@ COUNT():long | category:keyword ; mv with many values -required_capability: categorize_v2 +required_capability: categorize_v3 FROM employees | STATS COUNT() BY category=CATEGORIZE(job_positions) @@ -92,24 +92,37 @@ COUNT():long | category:keyword 10 | .*?Head.+?Human.+?Resources.*? ; -# Throws when calling AbstractCategorizeBlockHash.seenGroupIds() - Requires nulls support? -mv with many values-Ignore -required_capability: categorize_v2 +mv with many values and SUM +required_capability: categorize_v3 FROM employees | STATS SUM(languages) BY category=CATEGORIZE(job_positions) - | SORT category DESC + | SORT category | LIMIT 3 ; -SUM(languages):integer | category:keyword - 43 | .*?Accountant.*? - 46 | .*?Architect.*? - 35 | .*?Business.+?Analyst.*? +SUM(languages):long | category:keyword + 43 | .*?Accountant.*? + 46 | .*?Architect.*? + 35 | .*?Business.+?Analyst.*? +; + +mv with many values and nulls and SUM +required_capability: categorize_v3 + +FROM employees + | STATS SUM(languages) BY category=CATEGORIZE(job_positions) + | SORT category DESC + | LIMIT 2 +; + +SUM(languages):long | category:keyword + 27 | null + 46 | .*?Tech.+?Lead.*? ; mv via eval -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | EVAL message = MV_APPEND(message, "Banana") @@ -125,7 +138,7 @@ COUNT():long | category:keyword ; mv via eval const -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | EVAL message = ["Banana", "Bread"] @@ -139,7 +152,7 @@ COUNT():long | category:keyword ; mv via eval const without aliases -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | EVAL message = ["Banana", "Bread"] @@ -153,7 +166,7 @@ COUNT():long | CATEGORIZE(message):keyword ; mv const in parameter -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | STATS COUNT() BY c = CATEGORIZE(["Banana", "Bread"]) @@ -166,7 +179,7 @@ COUNT():long | c:keyword ; agg alias shadowing -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | STATS c = COUNT() BY c = CATEGORIZE(["Banana", "Bread"]) @@ -181,7 +194,7 @@ c:keyword ; chained aggregations using categorize -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(message) @@ -196,7 +209,7 @@ COUNT():long | category:keyword ; stats without aggs -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | STATS BY category=CATEGORIZE(message) @@ -210,7 +223,7 @@ category:keyword ; text field -required_capability: categorize_v2 +required_capability: categorize_v3 FROM hosts | STATS COUNT() BY category=CATEGORIZE(host_group) @@ -221,10 +234,11 @@ COUNT():long | category:keyword 2 | .*?DB.+?servers.*? 2 | .*?Gateway.+?instances.*? 5 | .*?Kubernetes.+?cluster.*? + 1 | null ; on TO_UPPER -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(TO_UPPER(message)) @@ -238,7 +252,7 @@ COUNT():long | category:keyword ; on CONCAT -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(CONCAT(message, " banana")) @@ -252,7 +266,7 @@ COUNT():long | category:keyword ; on CONCAT with unicode -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(CONCAT(message, " 👍🏽😊")) @@ -266,7 +280,7 @@ COUNT():long | category:keyword ; on REVERSE(CONCAT()) -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(REVERSE(CONCAT(message, " 👍🏽😊"))) @@ -280,7 +294,7 @@ COUNT():long | category:keyword ; and then TO_LOWER -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(message) @@ -294,9 +308,8 @@ COUNT():long | category:keyword 1 | .*?disconnected.*? ; -# Throws NPE - Requires nulls support -on const empty string-Ignore -required_capability: categorize_v2 +on const empty string +required_capability: categorize_v3 FROM sample_data | STATS COUNT() BY category=CATEGORIZE("") @@ -304,12 +317,11 @@ FROM sample_data ; COUNT():long | category:keyword - 7 | .*?.*? + 7 | null ; -# Throws NPE - Requires nulls support -on const empty string from eval-Ignore -required_capability: categorize_v2 +on const empty string from eval +required_capability: categorize_v3 FROM sample_data | EVAL x = "" @@ -318,26 +330,24 @@ FROM sample_data ; COUNT():long | category:keyword - 7 | .*?.*? + 7 | null ; -# Doesn't give the correct results - Requires nulls support -on null-Ignore -required_capability: categorize_v2 +on null +required_capability: categorize_v3 FROM sample_data | EVAL x = null - | STATS COUNT() BY category=CATEGORIZE(x) + | STATS COUNT(), SUM(event_duration) BY category=CATEGORIZE(x) | SORT category ; -COUNT():long | category:keyword - 7 | null +COUNT():long | SUM(event_duration):long | category:keyword + 7 | 23231327 | null ; -# Doesn't give the correct results - Requires nulls support -on null string-Ignore -required_capability: categorize_v2 +on null string +required_capability: categorize_v3 FROM sample_data | EVAL x = null::string @@ -350,7 +360,7 @@ COUNT():long | category:keyword ; filtering out all data -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | WHERE @timestamp < "2023-10-23T00:00:00Z" @@ -362,7 +372,7 @@ COUNT():long | category:keyword ; filtering out all data with constant -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(message) @@ -373,7 +383,7 @@ COUNT():long | category:keyword ; drop output columns -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | STATS count=COUNT() BY category=CATEGORIZE(message) @@ -388,7 +398,7 @@ x:integer ; category value processing -required_capability: categorize_v2 +required_capability: categorize_v3 ROW message = ["connected to a", "connected to b", "disconnected"] | STATS COUNT() BY category=CATEGORIZE(message) @@ -402,7 +412,7 @@ COUNT():long | category:keyword ; row aliases -required_capability: categorize_v2 +required_capability: categorize_v3 ROW message = "connected to a" | EVAL x = message @@ -416,7 +426,7 @@ COUNT():long | category:keyword | y:keyword ; from aliases -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | EVAL x = message @@ -432,7 +442,7 @@ COUNT():long | category:keyword | y:keyword ; row aliases with keep -required_capability: categorize_v2 +required_capability: categorize_v3 ROW message = "connected to a" | EVAL x = message @@ -448,7 +458,7 @@ COUNT():long | y:keyword ; from aliases with keep -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | EVAL x = message @@ -466,7 +476,7 @@ COUNT():long | y:keyword ; row rename -required_capability: categorize_v2 +required_capability: categorize_v3 ROW message = "connected to a" | RENAME message as x @@ -480,7 +490,7 @@ COUNT():long | y:keyword ; from rename -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | RENAME message as x @@ -496,7 +506,7 @@ COUNT():long | y:keyword ; row drop -required_capability: categorize_v2 +required_capability: categorize_v3 ROW message = "connected to a" | STATS c = COUNT() BY category=CATEGORIZE(message) @@ -509,7 +519,7 @@ c:long ; from drop -required_capability: categorize_v2 +required_capability: categorize_v3 FROM sample_data | STATS c = COUNT() BY category=CATEGORIZE(message) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 9bd4211855699..77a3e2840977f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -402,11 +402,8 @@ public enum Cap { /** * Supported the text categorization function "CATEGORIZE". - *

    - * This capability was initially named `CATEGORIZE`, and got renamed after the function started correctly returning keywords. - *

    */ - CATEGORIZE_V2(Build.current().isSnapshot()), + CATEGORIZE_V3(Build.current().isSnapshot()), /** * QSTR function diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 6074601535477..dd14e8dd82123 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1846,7 +1846,7 @@ public void testIntervalAsString() { } public void testCategorizeSingleGrouping() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V2.isEnabled()); + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V3.isEnabled()); query("from test | STATS COUNT(*) BY CATEGORIZE(first_name)"); query("from test | STATS COUNT(*) BY cat = CATEGORIZE(first_name)"); @@ -1875,7 +1875,7 @@ public void testCategorizeSingleGrouping() { } public void testCategorizeNestedGrouping() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V2.isEnabled()); + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V3.isEnabled()); query("from test | STATS COUNT(*) BY CATEGORIZE(LENGTH(first_name)::string)"); @@ -1890,7 +1890,7 @@ public void testCategorizeNestedGrouping() { } public void testCategorizeWithinAggregations() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V2.isEnabled()); + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V3.isEnabled()); query("from test | STATS MV_COUNT(cat), COUNT(*) BY cat = CATEGORIZE(first_name)"); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 8373528531902..e98f2b88b33c9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -1212,7 +1212,7 @@ public void testCombineProjectionWithAggregationFirstAndAliasedGroupingUsedInAgg * \_EsRelation[test][_meta_field{f}#23, emp_no{f}#17, first_name{f}#18, ..] */ public void testCombineProjectionWithCategorizeGrouping() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V2.isEnabled()); + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V3.isEnabled()); var plan = plan(""" from test @@ -3949,7 +3949,7 @@ public void testNestedExpressionsInGroups() { * \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] */ public void testNestedExpressionsInGroupsWithCategorize() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V2.isEnabled()); + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V3.isEnabled()); var plan = optimizedPlan(""" from test diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/TokenListCategorizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/TokenListCategorizer.java index e4257270ce641..7fef6cdafa372 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/TokenListCategorizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/TokenListCategorizer.java @@ -115,6 +115,7 @@ public TokenListCategorizer( cacheRamUsage(0); } + @Nullable public TokenListCategory computeCategory(String s, CategorizationAnalyzer analyzer) { try (TokenStream ts = analyzer.tokenStream("text", s)) { return computeCategory(ts, s.length(), 1); @@ -123,6 +124,7 @@ public TokenListCategory computeCategory(String s, CategorizationAnalyzer analyz } } + @Nullable public TokenListCategory computeCategory(TokenStream ts, int unfilteredStringLen, long numDocs) throws IOException { assert partOfSpeechDictionary != null : "This version of computeCategory should only be used when a part-of-speech dictionary is available"; From 3c70cd081d40c36a5ac375b009932a0ce5eff1bd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mariusz=20J=C3=B3zala?= <377355+jozala@users.noreply.github.com> Date: Thu, 28 Nov 2024 16:20:05 +0100 Subject: [PATCH 304/386] Revert "[CI] Ignore error about missing UBI artifact (#117506)" (#117704) This reverts commit 219372efaaf46a3b496df2142d3091d3434e67ec. This ignore is no longer necessary since the change to release-manager has been applied. --- .buildkite/scripts/dra-workflow.sh | 15 +-------------- 1 file changed, 1 insertion(+), 14 deletions(-) diff --git a/.buildkite/scripts/dra-workflow.sh b/.buildkite/scripts/dra-workflow.sh index bbfa81f51b286..f2dc40ca1927f 100755 --- a/.buildkite/scripts/dra-workflow.sh +++ b/.buildkite/scripts/dra-workflow.sh @@ -75,7 +75,6 @@ find "$WORKSPACE" -type d -path "*/build/distributions" -exec chmod a+w {} \; echo --- Running release-manager -set +e # Artifacts should be generated docker run --rm \ --name release-manager \ @@ -92,16 +91,4 @@ docker run --rm \ --version "$ES_VERSION" \ --artifact-set main \ --dependency "beats:https://artifacts-${WORKFLOW}.elastic.co/beats/${BEATS_BUILD_ID}/manifest-${ES_VERSION}${VERSION_SUFFIX}.json" \ - --dependency "ml-cpp:https://artifacts-${WORKFLOW}.elastic.co/ml-cpp/${ML_CPP_BUILD_ID}/manifest-${ES_VERSION}${VERSION_SUFFIX}.json" \ -2>&1 | tee release-manager.log -EXIT_CODE=$? -set -e - -# This failure is just generating a ton of noise right now, so let's just ignore it -# This should be removed once this issue has been fixed -if grep "elasticsearch-ubi-9.0.0-SNAPSHOT-docker-image.tar.gz" release-manager.log; then - echo "Ignoring error about missing ubi artifact" - exit 0 -fi - -exit "$EXIT_CODE" + --dependency "ml-cpp:https://artifacts-${WORKFLOW}.elastic.co/ml-cpp/${ML_CPP_BUILD_ID}/manifest-${ES_VERSION}${VERSION_SUFFIX}.json" From 54db9470207df11f07475a6e8d4837b29515a4d7 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Thu, 28 Nov 2024 07:33:35 -0800 Subject: [PATCH 305/386] Fix scaled_float test (#117662) --- .../index/mapper/extras/ScaledFloatFieldMapperTests.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java index dc9bc96f107a0..83fe07170d6e7 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java @@ -527,7 +527,13 @@ protected Number randomNumber() { public void testEncodeDecodeExactScalingFactor() { double v = randomValue(); - assertThat(encodeDecode(1 / v, v), equalTo(1 / v)); + double expected = 1 / v; + // We don't produce infinities while decoding. See #testDecodeHandlingInfinity(). + if (Double.isInfinite(expected)) { + var sign = expected == Double.POSITIVE_INFINITY ? 1 : -1; + expected = sign * Double.MAX_VALUE; + } + assertThat(encodeDecode(1 / v, v), equalTo(expected)); } /** From ab604ada78d779a18b82465d51829006540ce546 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Thu, 28 Nov 2024 16:34:57 +0100 Subject: [PATCH 306/386] [DOCS] Update tutorial example (#117538) --- .../full-text-filtering-tutorial.asciidoc | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/docs/reference/quickstart/full-text-filtering-tutorial.asciidoc b/docs/reference/quickstart/full-text-filtering-tutorial.asciidoc index fee4b797da724..a024305588cae 100644 --- a/docs/reference/quickstart/full-text-filtering-tutorial.asciidoc +++ b/docs/reference/quickstart/full-text-filtering-tutorial.asciidoc @@ -511,8 +511,9 @@ In this tutorial scenario it's useful for when users have complex requirements f Let's create a query that addresses the following user needs: -* Must be a vegetarian main course +* Must be a vegetarian recipe * Should contain "curry" or "spicy" in the title or description +* Should be a main course * Must not be a dessert * Must have a rating of at least 4.5 * Should prefer recipes published in the last month @@ -524,16 +525,7 @@ GET /cooking_blog/_search "query": { "bool": { "must": [ - { - "term": { - "category.keyword": "Main Course" - } - }, - { - "term": { - "tags": "vegetarian" - } - }, + { "term": { "tags": "vegetarian" } }, { "range": { "rating": { @@ -543,10 +535,18 @@ GET /cooking_blog/_search } ], "should": [ + { + "term": { + "category": "Main Course" + } + }, { "multi_match": { "query": "curry spicy", - "fields": ["title^2", "description"] + "fields": [ + "title^2", + "description" + ] } }, { @@ -590,12 +590,12 @@ GET /cooking_blog/_search "value": 1, "relation": "eq" }, - "max_score": 7.9835095, + "max_score": 7.444513, "hits": [ { "_index": "cooking_blog", "_id": "2", - "_score": 7.9835095, + "_score": 7.444513, "_source": { "title": "Spicy Thai Green Curry: A Vegetarian Adventure", <1> "description": "Dive into the flavors of Thailand with this vibrant green curry. Packed with vegetables and aromatic herbs, this dish is both healthy and satisfying. Don't worry about the heat - you can easily adjust the spice level to your liking.", <2> @@ -619,8 +619,8 @@ GET /cooking_blog/_search <1> The title contains "Spicy" and "Curry", matching our should condition. With the default <> behavior, this field contributes most to the relevance score. <2> While the description also contains matching terms, only the best matching field's score is used by default. <3> The recipe was published within the last month, satisfying our recency preference. -<4> The "Main Course" category matches our `must` condition. -<5> The "vegetarian" tag satisfies another `must` condition, while "curry" and "spicy" tags align with our `should` preferences. +<4> The "Main Course" category satisfies another `should` condition. +<5> The "vegetarian" tag satisfies a `must` condition, while "curry" and "spicy" tags align with our `should` preferences. <6> The rating of 4.6 meets our minimum rating requirement of 4.5. ============== From f096c317c06052dc26c00b72448eda4743ab5965 Mon Sep 17 00:00:00 2001 From: Dimitris Rempapis Date: Thu, 28 Nov 2024 19:38:37 +0200 Subject: [PATCH 307/386] fix/SearchStatesIt_failures (#117618) Investigate and unmute automatically muted tests --- docs/changelog/117618.yaml | 5 +++++ muted-tests.yml | 6 ------ 2 files changed, 5 insertions(+), 6 deletions(-) create mode 100644 docs/changelog/117618.yaml diff --git a/docs/changelog/117618.yaml b/docs/changelog/117618.yaml new file mode 100644 index 0000000000000..5de29e2fe768c --- /dev/null +++ b/docs/changelog/117618.yaml @@ -0,0 +1,5 @@ +pr: 117618 +summary: SearchStatesIt failures reported by CI +area: Search +type: bug +issues: [116617, 116618] diff --git a/muted-tests.yml b/muted-tests.yml index fdadc747289bb..d703cfaa1b9aa 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -156,12 +156,6 @@ tests: - class: org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshotsCanMatchOnCoordinatorIntegTests method: testSearchableSnapshotShardsAreSkippedBySearchRequestWithoutQueryingAnyNodeWhenTheyAreOutsideOfTheQueryRange issue: https://github.com/elastic/elasticsearch/issues/116523 -- class: org.elasticsearch.upgrades.SearchStatesIT - method: testBWCSearchStates - issue: https://github.com/elastic/elasticsearch/issues/116617 -- class: org.elasticsearch.upgrades.SearchStatesIT - method: testCanMatch - issue: https://github.com/elastic/elasticsearch/issues/116618 - class: org.elasticsearch.reservedstate.service.RepositoriesFileSettingsIT method: testSettingsApplied issue: https://github.com/elastic/elasticsearch/issues/116694 From 8350ff29ba18c7d03d652b107532415705426da9 Mon Sep 17 00:00:00 2001 From: John Verwolf Date: Thu, 28 Nov 2024 13:25:02 -0800 Subject: [PATCH 308/386] Extensible Completion Postings Formats (#111494) Allows the Completion Postings Format to be extensible by providing an implementation of the CompletionsPostingsFormatExtension SPIs. --- docs/changelog/111494.yaml | 5 ++++ server/src/main/java/module-info.java | 6 +++- .../index/codec/PerFieldFormatSupplier.java | 24 ++++++++++++++-- .../index/mapper/CompletionFieldMapper.java | 5 ---- .../index/mapper/MappingLookup.java | 17 ----------- .../CompletionsPostingsFormatExtension.java | 28 +++++++++++++++++++ 6 files changed, 59 insertions(+), 26 deletions(-) create mode 100644 docs/changelog/111494.yaml create mode 100644 server/src/main/java/org/elasticsearch/internal/CompletionsPostingsFormatExtension.java diff --git a/docs/changelog/111494.yaml b/docs/changelog/111494.yaml new file mode 100644 index 0000000000000..6c7b84bb04798 --- /dev/null +++ b/docs/changelog/111494.yaml @@ -0,0 +1,5 @@ +pr: 111494 +summary: Extensible Completion Postings Formats +area: "Suggesters" +type: enhancement +issues: [] diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 63dbac3a72487..d572d3b90fec8 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -7,6 +7,7 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ +import org.elasticsearch.internal.CompletionsPostingsFormatExtension; import org.elasticsearch.plugins.internal.RestExtension; /** The Elasticsearch Server Module. */ @@ -288,7 +289,8 @@ to org.elasticsearch.serverless.version, org.elasticsearch.serverless.buildinfo, - org.elasticsearch.serverless.constants; + org.elasticsearch.serverless.constants, + org.elasticsearch.serverless.codec; exports org.elasticsearch.lucene.analysis.miscellaneous; exports org.elasticsearch.lucene.grouping; exports org.elasticsearch.lucene.queries; @@ -395,6 +397,7 @@ org.elasticsearch.stateless, org.elasticsearch.settings.secure, org.elasticsearch.serverless.constants, + org.elasticsearch.serverless.codec, org.elasticsearch.serverless.apifiltering, org.elasticsearch.internal.security; @@ -414,6 +417,7 @@ uses org.elasticsearch.node.internal.TerminationHandlerProvider; uses org.elasticsearch.internal.VersionExtension; uses org.elasticsearch.internal.BuildExtension; + uses CompletionsPostingsFormatExtension; uses org.elasticsearch.features.FeatureSpecification; uses org.elasticsearch.plugins.internal.LoggingDataProvider; diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java index 9c2a08a69002c..4d3d37ab4f3af 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldFormatSupplier.java @@ -20,10 +20,15 @@ import org.elasticsearch.index.codec.bloomfilter.ES87BloomFilterPostingsFormat; import org.elasticsearch.index.codec.postings.ES812PostingsFormat; import org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat; +import org.elasticsearch.index.mapper.CompletionFieldMapper; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; +import org.elasticsearch.internal.CompletionsPostingsFormatExtension; +import org.elasticsearch.plugins.ExtensionLoader; + +import java.util.ServiceLoader; /** * Class that encapsulates the logic of figuring out the most appropriate file format for a given field, across postings, doc values and @@ -53,15 +58,28 @@ public PostingsFormat getPostingsFormatForField(String field) { private PostingsFormat internalGetPostingsFormatForField(String field) { if (mapperService != null) { - final PostingsFormat format = mapperService.mappingLookup().getPostingsFormat(field); - if (format != null) { - return format; + Mapper mapper = mapperService.mappingLookup().getMapper(field); + if (mapper instanceof CompletionFieldMapper) { + return PostingsFormatHolder.POSTINGS_FORMAT; } } // return our own posting format using PFOR return es812PostingsFormat; } + private static class PostingsFormatHolder { + private static final PostingsFormat POSTINGS_FORMAT = getPostingsFormat(); + + private static PostingsFormat getPostingsFormat() { + String defaultName = "Completion912"; // Caution: changing this name will result in exceptions if a field is created during a + // rolling upgrade and the new codec (specified by the name) is not available on all nodes in the cluster. + String codecName = ExtensionLoader.loadSingleton(ServiceLoader.load(CompletionsPostingsFormatExtension.class)) + .map(CompletionsPostingsFormatExtension::getFormatName) + .orElse(defaultName); + return PostingsFormat.forName(codecName); + } + } + boolean useBloomFilter(String field) { if (mapperService == null) { return false; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java index 53ccccdbd4bab..bb229c795a83e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java @@ -8,7 +8,6 @@ */ package org.elasticsearch.index.mapper; -import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; @@ -344,10 +343,6 @@ public CompletionFieldType fieldType() { return (CompletionFieldType) super.fieldType(); } - static PostingsFormat postingsFormat() { - return PostingsFormat.forName("Completion912"); - } - @Override public boolean parsesArrayValue() { return true; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java index 2f78e11761448..ce3f8cfb53184 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java @@ -9,7 +9,6 @@ package org.elasticsearch.index.mapper; -import org.apache.lucene.codecs.PostingsFormat; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; import org.elasticsearch.index.IndexSettings; @@ -21,7 +20,6 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -58,7 +56,6 @@ private CacheKey() {} private final Map indexAnalyzersMap; private final List indexTimeScriptMappers; private final Mapping mapping; - private final Set completionFields; private final int totalFieldsCount; /** @@ -161,7 +158,6 @@ private MappingLookup( this.nestedLookup = NestedLookup.build(nestedMappers); final Map indexAnalyzersMap = new HashMap<>(); - final Set completionFields = new HashSet<>(); final List indexTimeScriptMappers = new ArrayList<>(); for (FieldMapper mapper : mappers) { if (objects.containsKey(mapper.fullPath())) { @@ -174,9 +170,6 @@ private MappingLookup( if (mapper.hasScript()) { indexTimeScriptMappers.add(mapper); } - if (mapper instanceof CompletionFieldMapper) { - completionFields.add(mapper.fullPath()); - } } for (FieldAliasMapper aliasMapper : aliasMappers) { @@ -211,7 +204,6 @@ private MappingLookup( this.objectMappers = Map.copyOf(objects); this.runtimeFieldMappersCount = runtimeFields.size(); this.indexAnalyzersMap = Map.copyOf(indexAnalyzersMap); - this.completionFields = Set.copyOf(completionFields); this.indexTimeScriptMappers = List.copyOf(indexTimeScriptMappers); runtimeFields.stream().flatMap(RuntimeField::asMappedFieldTypes).map(MappedFieldType::name).forEach(this::validateDoesNotShadow); @@ -285,15 +277,6 @@ public Iterable fieldMappers() { return fieldMappers.values(); } - /** - * Gets the postings format for a particular field - * @param field the field to retrieve a postings format for - * @return the postings format for the field, or {@code null} if the default format should be used - */ - public PostingsFormat getPostingsFormat(String field) { - return completionFields.contains(field) ? CompletionFieldMapper.postingsFormat() : null; - } - void checkLimits(IndexSettings settings) { checkFieldLimit(settings.getMappingTotalFieldsLimit()); checkObjectDepthLimit(settings.getMappingDepthLimit()); diff --git a/server/src/main/java/org/elasticsearch/internal/CompletionsPostingsFormatExtension.java b/server/src/main/java/org/elasticsearch/internal/CompletionsPostingsFormatExtension.java new file mode 100644 index 0000000000000..bb28d4dd6c901 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/internal/CompletionsPostingsFormatExtension.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.internal; + +import org.apache.lucene.search.suggest.document.CompletionPostingsFormat; + +/** + * Allows plugging-in the Completions Postings Format. + */ +public interface CompletionsPostingsFormatExtension { + + /** + * Returns the name of the {@link CompletionPostingsFormat} that Elasticsearch should use. Should return null if the extension + * is not enabled. + *

    + * Note that the name must match a codec that is available on all nodes in the cluster, otherwise IndexCorruptionExceptions will occur. + * A feature can be used to protect against this scenario, or alternatively, the codec code can be rolled out prior to its usage by this + * extension. + */ + String getFormatName(); +} From 2895f1e900b2f41704fd507845102a281cff437e Mon Sep 17 00:00:00 2001 From: Ed Savage Date: Fri, 29 Nov 2024 11:37:45 +1300 Subject: [PATCH 309/386] [ML] Remove deprecated sort from reindex operation (#117606) Sort in reindex is deprecated. This PR removes its use from within the reindexing step of dataframe analytics. Testing indicates that having the destination index sorted is a "nice to have" and not necessary for the DFA functionality to succeed. --- docs/changelog/117606.yaml | 5 +++++ .../xpack/ml/dataframe/steps/ReindexingStep.java | 3 --- 2 files changed, 5 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/117606.yaml diff --git a/docs/changelog/117606.yaml b/docs/changelog/117606.yaml new file mode 100644 index 0000000000000..ea61099a1a6b4 --- /dev/null +++ b/docs/changelog/117606.yaml @@ -0,0 +1,5 @@ +pr: 117606 +summary: Remove deprecated sort from reindex operation within dataframe analytics procedure +area: Machine Learning +type: enhancement +issues: [] diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/ReindexingStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/ReindexingStep.java index 0ccdd1eb64601..2a6d6eb329503 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/ReindexingStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/ReindexingStep.java @@ -27,13 +27,11 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.BulkByScrollTask; import org.elasticsearch.index.reindex.ReindexAction; import org.elasticsearch.index.reindex.ReindexRequest; import org.elasticsearch.script.Script; -import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.tasks.TaskId; @@ -147,7 +145,6 @@ protected void doExecute(ActionListener listener) { reindexRequest.setSourceQuery(config.getSource().getParsedQuery()); reindexRequest.getSearchRequest().allowPartialSearchResults(false); reindexRequest.getSearchRequest().source().fetchSource(config.getSource().getSourceFiltering()); - reindexRequest.getSearchRequest().source().sort(SeqNoFieldMapper.NAME, SortOrder.ASC); reindexRequest.setDestIndex(config.getDest().getIndex()); // We explicitly set slices to 1 as we cannot parallelize in order to have the incremental id From c35777a175f10a49ae860d28aa16b40d6f66c49a Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Fri, 29 Nov 2024 02:26:34 +0100 Subject: [PATCH 310/386] [Build] Declare mirror for eclipse p2 repository (#117732) The spotlight plugin directly resolves dependencies from p2 which causes `java.io.IOException: Failed to load eclipse jdt formatter` issues if that repo is not accessible. This is a workaround for the eclipse p2 default repository being down resulting in all our ci jobs to fail. The artifacts in question we wanna cache live in `~/.m2/repository` --- .../conventions/precommit/FormattingPrecommitPlugin.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/FormattingPrecommitPlugin.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/FormattingPrecommitPlugin.java index ea9009172c7e2..41c0b4d67e1df 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/FormattingPrecommitPlugin.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/FormattingPrecommitPlugin.java @@ -17,6 +17,8 @@ import org.gradle.api.Project; import java.io.File; +import java.util.Arrays; +import java.util.Map; /** * This plugin configures formatting for Java source using Spotless @@ -64,7 +66,8 @@ public void apply(Project project) { java.importOrderFile(new File(elasticsearchWorkspace, importOrderPath)); // Most formatting is done through the Eclipse formatter - java.eclipse().configFile(new File(elasticsearchWorkspace, formatterConfigPath)); + java.eclipse().withP2Mirrors(Map.of("https://download.eclipse.org/", "https://mirror.umd.edu/eclipse/")) + .configFile(new File(elasticsearchWorkspace, formatterConfigPath)); // Ensure blank lines are actually empty. Since formatters are applied in // order, apply this one last, otherwise non-empty blank lines can creep From e54c7cf5edd4ffd24725412015b5d3db1e7ce5a4 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Fri, 29 Nov 2024 02:19:48 +0000 Subject: [PATCH 311/386] [ML] Disable machine learning on macOS x86_64 (#104125) As previously advised in #104087, machine learning functionality will no longer be available on macOS x86_64. Machine learning functionality is still available on macOS by using an arm64 machine (Apple silicon). It is also possible to run Elasticsearch with machine learning functionality within a Docker container on macOS x86_64. This PR should be merged to main after the branch is split for the last minor release scheduled for before December 2024. For example, suppose 8.17.0 is scheduled for release in November 2024 and 8.18.0 is scheduled for release in January 2025. Then this PR should be merged to main after the 8.17 branch is split. One this PR is merged a followup PR should be opened against the ml-cpp repo to remove the build system for darwin-x86_64. It has been confirmed that with this change in place the Elasticsearch build system works with an ml-cpp bundle that does not contain a platform/darwin-x86_64 directory. It still produces an Elasticsearch build that will run providing xpack.ml.enabled is not explicitly set to true. After the build system for darwin-x86_64 has been removed from the ml-cpp repo, we will be able to do another PyTorch upgrade without having to worry about tweaking the build system to work on Intel macOS. --------- Co-authored-by: Ed Savage Co-authored-by: Valeriy Khakhutskyy <1292899+valeriy42@users.noreply.github.com> --- docs/changelog/104125.yaml | 18 +++++++++++++++ .../xpack/core/XPackSettings.java | 22 +++++++++++++++++-- .../xpack/ml/MachineLearning.java | 11 ---------- 3 files changed, 38 insertions(+), 13 deletions(-) create mode 100644 docs/changelog/104125.yaml diff --git a/docs/changelog/104125.yaml b/docs/changelog/104125.yaml new file mode 100644 index 0000000000000..e5c5ea6a3f1cd --- /dev/null +++ b/docs/changelog/104125.yaml @@ -0,0 +1,18 @@ +pr: 104125 +summary: Disable machine learning on macOS x86_64 +area: Machine Learning +type: breaking +issues: [] +breaking: + title: Disable machine learning on macOS x86_64 + area: Packaging + details: The machine learning plugin is permanently disabled on macOS x86_64. + For the last three years Apple has been selling hardware based on the arm64 + architecture, and support will increasingly focus on this architecture in + the future. Changes to upstream dependencies of Elastic's machine learning + functionality have made it unviable for Elastic to continue to build machine + learning on macOS x86_64. + impact: To continue to use machine learning functionality on macOS please switch to + an arm64 machine (Apple silicon). Alternatively, it will still be possible to run + Elasticsearch with machine learning enabled in a Docker container on macOS x86_64. + notable: false diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java index 72e8805e96fc4..6aef618288fd2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java @@ -7,12 +7,16 @@ package org.elasticsearch.xpack.core; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.ssl.SslClientAuthenticationMode; import org.elasticsearch.common.ssl.SslVerificationMode; import org.elasticsearch.core.Strings; +import org.elasticsearch.plugins.Platforms; import org.elasticsearch.transport.RemoteClusterPortSettings; import org.elasticsearch.xpack.core.security.SecurityField; import org.elasticsearch.xpack.core.security.authc.support.Hasher; @@ -26,6 +30,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Set; import java.util.function.Function; import javax.crypto.SecretKeyFactory; @@ -40,6 +45,8 @@ */ public class XPackSettings { + private static final Logger logger = LogManager.getLogger(XPackSettings.class); + private XPackSettings() { throw new IllegalStateException("Utility class should not be instantiated"); } @@ -76,10 +83,21 @@ public Iterator> settings() { /** Setting for enabling or disabling graph. Defaults to true. */ public static final Setting GRAPH_ENABLED = Setting.boolSetting("xpack.graph.enabled", true, Setting.Property.NodeScope); - /** Setting for enabling or disabling machine learning. Defaults to true. */ + public static final Set ML_NATIVE_CODE_PLATFORMS = Set.of("darwin-aarch64", "linux-aarch64", "linux-x86_64", "windows-x86_64"); + + /** Setting for enabling or disabling machine learning. Defaults to true on platforms that have the ML native code available. */ public static final Setting MACHINE_LEARNING_ENABLED = Setting.boolSetting( "xpack.ml.enabled", - true, + ML_NATIVE_CODE_PLATFORMS.contains(Platforms.PLATFORM_NAME), + enabled -> { + if (enabled && ML_NATIVE_CODE_PLATFORMS.contains(Platforms.PLATFORM_NAME) == false) { + SettingsException e = new SettingsException("xpack.ml.enabled cannot be set to [true] on [{}]", Platforms.PLATFORM_NAME); + // The exception doesn't get logged nicely on the console because it's thrown during initial plugin loading, + // so log separately here to make absolutely clear what happened + logger.fatal(e.getMessage()); + throw e; + } + }, Setting.Property.NodeScope ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 1feb95661f33a..8363e0f5c19a1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -32,7 +32,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; @@ -69,7 +68,6 @@ import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.PersistentTaskPlugin; -import org.elasticsearch.plugins.Platforms; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.plugins.ShutdownAwarePlugin; @@ -931,15 +929,6 @@ public Collection createComponents(PluginServices services) { return List.of(new JobManagerHolder(), new MachineLearningExtensionHolder()); } - if ("darwin-x86_64".equals(Platforms.PLATFORM_NAME)) { - String msg = "The machine learning plugin will be permanently disabled on macOS x86_64 in new minor versions released " - + "from December 2024 onwards. To continue to use machine learning functionality on macOS please switch to an arm64 " - + "machine (Apple silicon). Alternatively, it will still be possible to run Elasticsearch with machine learning " - + "enabled in a Docker container on macOS x86_64."; - logger.warn(msg); - deprecationLogger.warn(DeprecationCategory.PLUGINS, "ml-darwin-x86_64", msg); - } - machineLearningExtension.get().configure(environment.settings()); this.mlUpgradeModeActionFilter.set(new MlUpgradeModeActionFilter(clusterService)); From 56637285a8f2bacc88a12c7824b8b88d06752b07 Mon Sep 17 00:00:00 2001 From: Nick Tindall Date: Fri, 29 Nov 2024 13:47:40 +1100 Subject: [PATCH 312/386] Implement CAS support in Azure test fixture (#117104) Closes ES-5680 --- .../azure/AzureBlobStoreRepositoryTests.java | 8 +- .../AzureStorageCleanupThirdPartyTests.java | 4 +- .../azure/AzureBlobContainer.java | 2 +- .../repositories/azure/AzureBlobStore.java | 26 +- .../azure/AzureBlobContainerStatsTests.java | 3 +- .../RepositoryAzureClientYamlTestSuiteIT.java | 4 +- .../test/repository_azure/20_repository.yml | 14 + .../java/fixture/azure/AzureHttpFixture.java | 15 +- .../java/fixture/azure/AzureHttpHandler.java | 333 ++++++++---- .../fixture/azure/MockAzureBlobStore.java | 484 ++++++++++++++++++ .../azure/AzureRepositoriesMeteringIT.java | 4 +- .../AzureSearchableSnapshotsIT.java | 4 +- .../AzureSnapshotBasedRecoveryIT.java | 4 +- .../AzureRepositoryAnalysisRestIT.java | 12 +- 14 files changed, 800 insertions(+), 117 deletions(-) create mode 100644 test/fixtures/azure-fixture/src/main/java/fixture/azure/MockAzureBlobStore.java diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java index bd21f208faac4..3fa4f7de7e717 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.repositories.azure; import fixture.azure.AzureHttpHandler; +import fixture.azure.MockAzureBlobStore; import com.azure.storage.common.policy.RequestRetryOptions; import com.azure.storage.common.policy.RetryPolicyType; @@ -184,7 +185,12 @@ long getUploadBlockSize() { @SuppressForbidden(reason = "this test uses a HttpHandler to emulate an Azure endpoint") private static class AzureBlobStoreHttpHandler extends AzureHttpHandler implements BlobStoreHttpHandler { AzureBlobStoreHttpHandler(final String account, final String container) { - super(account, container, null /* no auth header validation - sometimes it's omitted in these tests (TODO why?) */); + super( + account, + container, + null /* no auth header validation - sometimes it's omitted in these tests (TODO why?) */, + MockAzureBlobStore.LeaseExpiryPredicate.NEVER_EXPIRE + ); } } diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java index 6d5c17c392141..40be0f8ca78c4 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.repositories.azure; import fixture.azure.AzureHttpFixture; +import fixture.azure.MockAzureBlobStore; import com.azure.core.exception.HttpResponseException; import com.azure.storage.blob.BlobContainerClient; @@ -60,7 +61,8 @@ public class AzureStorageCleanupThirdPartyTests extends AbstractThirdPartyReposi System.getProperty("test.azure.container"), System.getProperty("test.azure.tenant_id"), System.getProperty("test.azure.client_id"), - AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_ACCOUNT) + AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_ACCOUNT), + MockAzureBlobStore.LeaseExpiryPredicate.NEVER_EXPIRE ); @Override diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java index 52bc1ee1399d4..73936d82fc204 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java @@ -180,7 +180,7 @@ protected String buildKey(String blobName) { } private boolean skipRegisterOperation(ActionListener listener) { - return skipCas(listener) || skipIfNotPrimaryOnlyLocationMode(listener); + return skipIfNotPrimaryOnlyLocationMode(listener); } private boolean skipIfNotPrimaryOnlyLocationMode(ActionListener listener) { diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java index 3c64bb9f3b830..b4567a92184fc 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java @@ -40,6 +40,7 @@ import com.azure.storage.blob.models.ListBlobsOptions; import com.azure.storage.blob.options.BlobParallelUploadOptions; import com.azure.storage.blob.options.BlockBlobSimpleUploadOptions; +import com.azure.storage.blob.specialized.BlobLeaseClient; import com.azure.storage.blob.specialized.BlobLeaseClientBuilder; import com.azure.storage.blob.specialized.BlockBlobAsyncClient; @@ -1010,7 +1011,7 @@ private static BytesReference innerCompareAndExchangeRegister( } return currentValue; } finally { - leaseClient.releaseLease(); + bestEffortRelease(leaseClient); } } else { if (expected.length() == 0) { @@ -1020,6 +1021,29 @@ private static BytesReference innerCompareAndExchangeRegister( } } + /** + * Release the lease, ignoring conflicts due to expiry + * + * @see Outcomes of lease operations by lease state + * @param leaseClient The client for the lease + */ + private static void bestEffortRelease(BlobLeaseClient leaseClient) { + try { + leaseClient.releaseLease(); + } catch (BlobStorageException blobStorageException) { + if (blobStorageException.getStatusCode() == RestStatus.CONFLICT.getStatus()) { + // This is OK, we tried to release a lease that was expired/re-acquired + logger.debug( + "Ignored conflict on release: errorCode={}, message={}", + blobStorageException.getErrorCode(), + blobStorageException.getMessage() + ); + } else { + throw blobStorageException; + } + } + } + private static BytesReference downloadRegisterBlob( String containerPath, String blobKey, diff --git a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerStatsTests.java b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerStatsTests.java index 6730e5c3c81bd..812d519e60260 100644 --- a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerStatsTests.java +++ b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerStatsTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.repositories.azure; import fixture.azure.AzureHttpHandler; +import fixture.azure.MockAzureBlobStore; import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesReference; @@ -26,7 +27,7 @@ public class AzureBlobContainerStatsTests extends AbstractAzureServerTestCase { @SuppressForbidden(reason = "use a http server") @Before public void configureAzureHandler() { - httpServer.createContext("/", new AzureHttpHandler(ACCOUNT, CONTAINER, null)); + httpServer.createContext("/", new AzureHttpHandler(ACCOUNT, CONTAINER, null, MockAzureBlobStore.LeaseExpiryPredicate.NEVER_EXPIRE)); } public void testOperationPurposeIsReflectedInBlobStoreStats() throws IOException { diff --git a/modules/repository-azure/src/yamlRestTest/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java b/modules/repository-azure/src/yamlRestTest/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java index 64dde0248ad2c..b24574da36825 100644 --- a/modules/repository-azure/src/yamlRestTest/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java +++ b/modules/repository-azure/src/yamlRestTest/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java @@ -10,6 +10,7 @@ package org.elasticsearch.repositories.azure; import fixture.azure.AzureHttpFixture; +import fixture.azure.MockAzureBlobStore; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; @@ -47,7 +48,8 @@ public class RepositoryAzureClientYamlTestSuiteIT extends ESClientYamlSuiteTestC AZURE_TEST_CONTAINER, AZURE_TEST_TENANT_ID, AZURE_TEST_CLIENT_ID, - decideAuthHeaderPredicate() + decideAuthHeaderPredicate(), + MockAzureBlobStore.LeaseExpiryPredicate.NEVER_EXPIRE ); private static Predicate decideAuthHeaderPredicate() { diff --git a/modules/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml b/modules/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml index a4a7d0b22a0ed..968e93cf9fc55 100644 --- a/modules/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml +++ b/modules/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml @@ -193,6 +193,20 @@ setup: container: zHHkfSqlbnBsbpSgvCYtxrEfFLqghXtyPvvvKPNBnRCicNHQLE client: integration_test +--- +"Register a read-only repository with a non existing container": + + - do: + catch: /repository_verification_exception/ + snapshot.create_repository: + repository: repository + body: + type: azure + settings: + container: zHHkfSqlbnBsbpSgvCYtxrEfFLqghXtyPvvvKPNBnRCicNHQLE + client: integration_test + readonly: true + --- "Register a repository with a non existing client": diff --git a/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpFixture.java b/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpFixture.java index 39105e0a27dc9..ab4d54f4fc451 100644 --- a/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpFixture.java +++ b/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpFixture.java @@ -45,6 +45,7 @@ public class AzureHttpFixture extends ExternalResource { private final String clientId; private final String tenantId; private final Predicate authHeaderPredicate; + private final MockAzureBlobStore.LeaseExpiryPredicate leaseExpiryPredicate; private HttpServer server; private HttpServer metadataServer; @@ -116,7 +117,8 @@ public AzureHttpFixture( String container, @Nullable String rawTenantId, @Nullable String rawClientId, - Predicate authHeaderPredicate + Predicate authHeaderPredicate, + MockAzureBlobStore.LeaseExpiryPredicate leaseExpiryPredicate ) { final var tenantId = Strings.hasText(rawTenantId) ? rawTenantId : null; final var clientId = Strings.hasText(rawClientId) ? rawClientId : null; @@ -135,6 +137,7 @@ public AzureHttpFixture( this.tenantId = tenantId; this.clientId = clientId; this.authHeaderPredicate = authHeaderPredicate; + this.leaseExpiryPredicate = leaseExpiryPredicate; } private String scheme() { @@ -193,7 +196,10 @@ protected void before() { } case HTTP -> { server = HttpServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); - server.createContext("/" + account, new AzureHttpHandler(account, container, actualAuthHeaderPredicate)); + server.createContext( + "/" + account, + new AzureHttpHandler(account, container, actualAuthHeaderPredicate, leaseExpiryPredicate) + ); server.start(); oauthTokenServiceServer = HttpServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); @@ -222,7 +228,10 @@ protected void before() { final var httpsServer = HttpsServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); this.server = httpsServer; httpsServer.setHttpsConfigurator(new HttpsConfigurator(sslContext)); - httpsServer.createContext("/" + account, new AzureHttpHandler(account, container, actualAuthHeaderPredicate)); + httpsServer.createContext( + "/" + account, + new AzureHttpHandler(account, container, actualAuthHeaderPredicate, leaseExpiryPredicate) + ); httpsServer.start(); } { diff --git a/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java b/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java index bbcfe1f75dc06..904f4581ad2c9 100644 --- a/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java +++ b/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java @@ -15,7 +15,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.regex.Regex; @@ -27,7 +26,6 @@ import org.elasticsearch.xcontent.XContentType; import java.io.BufferedReader; -import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; @@ -43,11 +41,11 @@ import java.util.Objects; import java.util.Set; import java.util.UUID; -import java.util.concurrent.ConcurrentHashMap; import java.util.function.Predicate; import java.util.regex.Matcher; import java.util.regex.Pattern; +import static fixture.azure.MockAzureBlobStore.failTestWithAssertionError; import static org.elasticsearch.repositories.azure.AzureFixtureHelper.assertValidBlockId; /** @@ -56,17 +54,29 @@ @SuppressForbidden(reason = "Uses a HttpServer to emulate an Azure endpoint") public class AzureHttpHandler implements HttpHandler { private static final Logger logger = LogManager.getLogger(AzureHttpHandler.class); + private static final Pattern RANGE_HEADER_PATTERN = Pattern.compile("^bytes=([0-9]+)-([0-9]+)$"); + static final String X_MS_LEASE_ID = "x-ms-lease-id"; + static final String X_MS_PROPOSED_LEASE_ID = "x-ms-proposed-lease-id"; + static final String X_MS_LEASE_DURATION = "x-ms-lease-duration"; + static final String X_MS_LEASE_BREAK_PERIOD = "x-ms-lease-break-period"; + static final String X_MS_BLOB_TYPE = "x-ms-blob-type"; + static final String X_MS_BLOB_CONTENT_LENGTH = "x-ms-blob-content-length"; - private final Map blobs; private final String account; private final String container; private final Predicate authHeaderPredicate; - - public AzureHttpHandler(final String account, final String container, @Nullable Predicate authHeaderPredicate) { + private final MockAzureBlobStore mockAzureBlobStore; + + public AzureHttpHandler( + final String account, + final String container, + @Nullable Predicate authHeaderPredicate, + MockAzureBlobStore.LeaseExpiryPredicate leaseExpiryPredicate + ) { this.account = Objects.requireNonNull(account); this.container = Objects.requireNonNull(container); this.authHeaderPredicate = authHeaderPredicate; - this.blobs = new ConcurrentHashMap<>(); + this.mockAzureBlobStore = new MockAzureBlobStore(leaseExpiryPredicate); } private static List getAuthHeader(HttpExchange exchange) { @@ -134,7 +144,7 @@ public void handle(final HttpExchange exchange) throws IOException { final String blockId = params.get("blockid"); assert assertValidBlockId(blockId); - blobs.put(blockId, Streams.readFully(exchange.getRequestBody())); + mockAzureBlobStore.putBlock(blobPath(exchange), blockId, Streams.readFully(exchange.getRequestBody()), leaseId(exchange)); exchange.sendResponseHeaders(RestStatus.CREATED.getStatus(), -1); } else if (Regex.simpleMatch("PUT /" + account + "/" + container + "/*comp=blocklist*", request)) { @@ -145,83 +155,124 @@ public void handle(final HttpExchange exchange) throws IOException { .map(line -> line.substring(0, line.indexOf(""))) .toList(); - final ByteArrayOutputStream blob = new ByteArrayOutputStream(); - for (String blockId : blockIds) { - BytesReference block = blobs.remove(blockId); - assert block != null; - block.writeTo(blob); - } - blobs.put(exchange.getRequestURI().getPath(), new BytesArray(blob.toByteArray())); + mockAzureBlobStore.putBlockList(blobPath(exchange), blockIds, leaseId(exchange)); exchange.getResponseHeaders().add("x-ms-request-server-encrypted", "false"); exchange.sendResponseHeaders(RestStatus.CREATED.getStatus(), -1); + } else if (Regex.simpleMatch("PUT /" + account + "/" + container + "*comp=lease*", request)) { + // Lease Blob (https://learn.microsoft.com/en-us/rest/api/storageservices/lease-blob) + final String leaseAction = requireHeader(exchange, "x-ms-lease-action"); + + switch (leaseAction) { + case "acquire" -> { + final int leaseDurationSeconds = requireIntegerHeader(exchange, X_MS_LEASE_DURATION); + final String proposedLeaseId = exchange.getRequestHeaders().getFirst(X_MS_PROPOSED_LEASE_ID); + final String newLeaseId = mockAzureBlobStore.acquireLease( + blobPath(exchange), + leaseDurationSeconds, + proposedLeaseId + ); + exchange.getResponseHeaders().set(X_MS_LEASE_ID, newLeaseId); + exchange.sendResponseHeaders(RestStatus.CREATED.getStatus(), -1); + } + case "release" -> { + final String leaseId = requireHeader(exchange, X_MS_LEASE_ID); + mockAzureBlobStore.releaseLease(blobPath(exchange), leaseId); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), -1); + } + case "break" -> { + mockAzureBlobStore.breakLease(blobPath(exchange), getOptionalIntegerHeader(exchange, X_MS_LEASE_BREAK_PERIOD)); + exchange.sendResponseHeaders(RestStatus.ACCEPTED.getStatus(), -1); + } + case "renew", "change" -> { + failTestWithAssertionError("Attempt was made to use not-implemented lease action: " + leaseAction); + throw new MockAzureBlobStore.AzureBlobStoreError( + RestStatus.NOT_IMPLEMENTED, + "NotImplemented", + "Attempted to use unsupported lease API: " + leaseAction + ); + } + default -> { + failTestWithAssertionError("Unrecognized lease action: " + leaseAction); + throw new MockAzureBlobStore.BadRequestException( + "InvalidHeaderValue", + "Invalid x-ms-lease-action header: " + leaseAction + ); + } + } } else if (Regex.simpleMatch("PUT /" + account + "/" + container + "/*", request)) { // PUT Blob (see https://docs.microsoft.com/en-us/rest/api/storageservices/put-blob) + final String blobType = requireHeader(exchange, X_MS_BLOB_TYPE); final String ifNoneMatch = exchange.getRequestHeaders().getFirst("If-None-Match"); - if ("*".equals(ifNoneMatch)) { - if (blobs.putIfAbsent(exchange.getRequestURI().getPath(), Streams.readFully(exchange.getRequestBody())) != null) { - sendError(exchange, RestStatus.CONFLICT); - return; - } - } else { - blobs.put(exchange.getRequestURI().getPath(), Streams.readFully(exchange.getRequestBody())); - } + mockAzureBlobStore.putBlob( + blobPath(exchange), + Streams.readFully(exchange.getRequestBody()), + blobType, + ifNoneMatch, + leaseId(exchange) + ); exchange.getResponseHeaders().add("x-ms-request-server-encrypted", "false"); exchange.sendResponseHeaders(RestStatus.CREATED.getStatus(), -1); } else if (Regex.simpleMatch("HEAD /" + account + "/" + container + "/*", request)) { // Get Blob Properties (see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties) - final BytesReference blob = blobs.get(exchange.getRequestURI().getPath()); - if (blob == null) { - sendError(exchange, RestStatus.NOT_FOUND); - return; - } - exchange.getResponseHeaders().add("x-ms-blob-content-length", String.valueOf(blob.length())); - exchange.getResponseHeaders().add("Content-Length", String.valueOf(blob.length())); - exchange.getResponseHeaders().add("x-ms-blob-type", "BlockBlob"); + final MockAzureBlobStore.AzureBlockBlob blob = mockAzureBlobStore.getBlob(blobPath(exchange), leaseId(exchange)); + + final Headers responseHeaders = exchange.getResponseHeaders(); + final BytesReference blobContents = blob.getContents(); + responseHeaders.add(X_MS_BLOB_CONTENT_LENGTH, String.valueOf(blobContents.length())); + responseHeaders.add("Content-Length", String.valueOf(blobContents.length())); + responseHeaders.add(X_MS_BLOB_TYPE, blob.type()); exchange.sendResponseHeaders(RestStatus.OK.getStatus(), -1); } else if (Regex.simpleMatch("GET /" + account + "/" + container + "/*", request)) { - // GET Object (https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectGET.html) - final BytesReference blob = blobs.get(exchange.getRequestURI().getPath()); - if (blob == null) { - sendError(exchange, RestStatus.NOT_FOUND); - return; - } + // Get Blob (https://learn.microsoft.com/en-us/rest/api/storageservices/get-blob) + final MockAzureBlobStore.AzureBlockBlob blob = mockAzureBlobStore.getBlob(blobPath(exchange), leaseId(exchange)); + final BytesReference responseContent; + final RestStatus successStatus; // see Constants.HeaderConstants.STORAGE_RANGE_HEADER final String range = exchange.getRequestHeaders().getFirst("x-ms-range"); - final Matcher matcher = Pattern.compile("^bytes=([0-9]+)-([0-9]+)$").matcher(range); - if (matcher.matches() == false) { - throw new AssertionError("Range header does not match expected format: " + range); - } + if (range != null) { + final Matcher matcher = RANGE_HEADER_PATTERN.matcher(range); + if (matcher.matches() == false) { + throw new MockAzureBlobStore.BadRequestException( + "InvalidHeaderValue", + "Range header does not match expected format: " + range + ); + } - final long start = Long.parseLong(matcher.group(1)); - final long end = Long.parseLong(matcher.group(2)); + final long start = Long.parseLong(matcher.group(1)); + final long end = Long.parseLong(matcher.group(2)); - if (blob.length() <= start) { - exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); - exchange.sendResponseHeaders(RestStatus.REQUESTED_RANGE_NOT_SATISFIED.getStatus(), -1); - return; - } + final BytesReference blobContents = blob.getContents(); + if (blobContents.length() <= start) { + exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); + exchange.sendResponseHeaders(RestStatus.REQUESTED_RANGE_NOT_SATISFIED.getStatus(), -1); + return; + } - var responseBlob = blob.slice(Math.toIntExact(start), Math.toIntExact(Math.min(end - start + 1, blob.length() - start))); + responseContent = blobContents.slice( + Math.toIntExact(start), + Math.toIntExact(Math.min(end - start + 1, blobContents.length() - start)) + ); + successStatus = RestStatus.PARTIAL_CONTENT; + } else { + responseContent = blob.getContents(); + successStatus = RestStatus.OK; + } exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); - exchange.getResponseHeaders().add("x-ms-blob-content-length", String.valueOf(responseBlob.length())); - exchange.getResponseHeaders().add("x-ms-blob-type", "blockblob"); + exchange.getResponseHeaders().add(X_MS_BLOB_CONTENT_LENGTH, String.valueOf(responseContent.length())); + exchange.getResponseHeaders().add(X_MS_BLOB_TYPE, blob.type()); exchange.getResponseHeaders().add("ETag", "\"blockblob\""); - exchange.sendResponseHeaders(RestStatus.OK.getStatus(), responseBlob.length()); - responseBlob.writeTo(exchange.getResponseBody()); + exchange.sendResponseHeaders(successStatus.getStatus(), responseContent.length() == 0 ? -1 : responseContent.length()); + responseContent.writeTo(exchange.getResponseBody()); } else if (Regex.simpleMatch("DELETE /" + account + "/" + container + "/*", request)) { // Delete Blob (https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob) - final boolean deleted = blobs.entrySet().removeIf(blob -> blob.getKey().startsWith(exchange.getRequestURI().getPath())); - if (deleted) { - exchange.sendResponseHeaders(RestStatus.ACCEPTED.getStatus(), -1); - } else { - exchange.sendResponseHeaders(RestStatus.NOT_FOUND.getStatus(), -1); - } + mockAzureBlobStore.deleteBlob(blobPath(exchange), leaseId(exchange)); + exchange.sendResponseHeaders(RestStatus.ACCEPTED.getStatus(), -1); } else if (Regex.simpleMatch("GET /" + account + "/" + container + "?*restype=container*comp=list*", request)) { // List Blobs (https://docs.microsoft.com/en-us/rest/api/storageservices/list-blobs) @@ -239,11 +290,12 @@ public void handle(final HttpExchange exchange) throws IOException { list.append("").append(delimiter).append(""); } list.append(""); - for (Map.Entry blob : blobs.entrySet()) { - if (prefix != null && blob.getKey().startsWith("/" + account + "/" + container + "/" + prefix) == false) { - continue; - } - String blobPath = blob.getKey().replace("/" + account + "/" + container + "/", ""); + final Map matchingBlobs = mockAzureBlobStore.listBlobs( + prefix, + leaseId(exchange) + ); + for (Map.Entry blob : matchingBlobs.entrySet()) { + final String blobPath = blob.getKey(); if (delimiter != null) { int fromIndex = (prefix != null ? prefix.length() : 0); int delimiterPosition = blobPath.indexOf(delimiter, fromIndex); @@ -259,7 +311,7 @@ public void handle(final HttpExchange exchange) throws IOException { %s BlockBlob - """, blobPath, blob.getValue().length())); + """, blobPath, blob.getValue().getContents().length())); } if (blobPrefixes.isEmpty() == false) { blobPrefixes.forEach(p -> list.append("").append(p).append("")); @@ -294,7 +346,8 @@ public void handle(final HttpExchange exchange) throws IOException { } // Process the deletion - if (blobs.remove("/" + account + toDelete) != null) { + try { + mockAzureBlobStore.deleteBlob(toDelete, leaseId(exchange)); final String acceptedPart = Strings.format(""" --%s Content-Type: application/http @@ -307,32 +360,43 @@ public void handle(final HttpExchange exchange) throws IOException { """, responseBoundary, contentId, requestId).replaceAll("\n", "\r\n"); response.append(acceptedPart); - } else { - final String notFoundBody = Strings.format( + } catch (MockAzureBlobStore.AzureBlobStoreError e) { + final String errorResponseBody = Strings.format( """ - BlobNotFoundThe specified blob does not exist. + %s%s RequestId:%s Time:%s""", + e.getErrorCode(), + e.getMessage(), requestId, DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now(ZoneId.of("UTC"))) ); - final String notFoundPart = Strings.format(""" - --%s - Content-Type: application/http - Content-ID: %s - - HTTP/1.1 404 The specified blob does not exist. - x-ms-error-code: BlobNotFound - x-ms-request-id: %s - x-ms-version: 2018-11-09 - Content-Length: %d - Content-Type: application/xml - - %s - """, responseBoundary, contentId, requestId, notFoundBody.length(), notFoundBody) - .replaceAll("\n", "\r\n"); - response.append(notFoundPart); + final String errorResponsePart = Strings.format( + """ + --%s + Content-Type: application/http + Content-ID: %s + + HTTP/1.1 %s %s + x-ms-error-code: %s + x-ms-request-id: %s + x-ms-version: 2018-11-09 + Content-Length: %d + Content-Type: application/xml + + %s + """, + responseBoundary, + contentId, + e.getRestStatus().getStatus(), + e.getMessage(), + e.getErrorCode(), + requestId, + errorResponseBody.length(), + errorResponseBody + ).replaceAll("\n", "\r\n"); + response.append(errorResponsePart); } // Clear the state @@ -350,19 +414,18 @@ public void handle(final HttpExchange exchange) throws IOException { } contentId = line.split("\\s")[1]; } else if (Regex.simpleMatch("DELETE /" + container + "/*", line)) { - String blobName = RestUtils.decodeComponent(line.split("(\\s|\\?)")[1]); + final String path = RestUtils.decodeComponent(line.split("(\\s|\\?)")[1]); if (toDelete != null) { throw new IllegalStateException("Got multiple deletes in a single request?"); } - toDelete = blobName; + toDelete = stripPrefix("/" + container + "/", path); } else if (Regex.simpleMatch("DELETE /" + account + "/" + container + "/*", line)) { // possible alternative DELETE url, depending on which method is used in the batch client String path = RestUtils.decodeComponent(line.split("(\\s|\\?)")[1]); - String blobName = path.split(account)[1]; if (toDelete != null) { throw new IllegalStateException("Got multiple deletes in a single request?"); } - toDelete = blobName; + toDelete = stripPrefix("/" + account + "/" + container + "/", path); } } response.append("--").append(responseBoundary).append("--\r\n0\r\n"); @@ -372,20 +435,90 @@ public void handle(final HttpExchange exchange) throws IOException { logger.debug("--> Sending response:\n{}", response); exchange.getResponseBody().write(response.toString().getBytes(StandardCharsets.UTF_8)); } - } else { - logger.warn("--> Unrecognised request received: {}", request); - sendError(exchange, RestStatus.BAD_REQUEST); - } + } else if (Regex.simpleMatch("PUT /*/*/*master.dat", request) + && Regex.simpleMatch("PUT /" + account + "/" + container + "*", request) == false) { + // An attempt to put master.dat to a different container. This is probably + // org.elasticsearch.repositories.blobstore.BlobStoreRepository#startVerification + throw new MockAzureBlobStore.AzureBlobStoreError( + RestStatus.NOT_FOUND, + "ContainerNotFound", + "The specified container does not exist." + ); + } else if (Regex.simpleMatch("GET /*/*restype=container*comp=list*", request) + && Regex.simpleMatch("GET /" + account + "/" + container + "*", request) == false) { + // An attempt to list the contents of a different container. This is probably + // org.elasticsearch.repositories.blobstore.BlobStoreRepository#startVerification for a read-only + // repository + throw new MockAzureBlobStore.AzureBlobStoreError( + RestStatus.NOT_FOUND, + "ContainerNotFound", + "The specified container does not exist." + ); + } else { + final String message = "You sent a request that is not supported by AzureHttpHandler: " + request; + failTestWithAssertionError(message); + throw new MockAzureBlobStore.BadRequestException("UnrecognisedRequest", message); + } + } catch (MockAzureBlobStore.AzureBlobStoreError e) { + sendError(exchange, e); + } catch (Exception e) { + failTestWithAssertionError("Uncaught exception", e); + sendError(exchange, RestStatus.INTERNAL_SERVER_ERROR, "InternalError", e.getMessage()); } finally { exchange.close(); } } + private String requireHeader(HttpExchange exchange, String headerName) { + final String headerValue = exchange.getRequestHeaders().getFirst(headerName); + if (headerValue == null) { + throw new MockAzureBlobStore.BadRequestException("MissingRequiredHeader", "Missing " + headerName + " header"); + } + return headerValue; + } + + private int requireIntegerHeader(HttpExchange exchange, String headerName) { + final String headerValue = requireHeader(exchange, headerName); + try { + return Integer.parseInt(headerValue); + } catch (NumberFormatException e) { + throw new MockAzureBlobStore.BadRequestException("InvalidHeaderValue", "Invalid " + headerName + " header"); + } + } + + @Nullable + private Integer getOptionalIntegerHeader(HttpExchange exchange, String headerName) { + final String headerValue = exchange.getRequestHeaders().getFirst(headerName); + try { + return headerValue == null ? null : Integer.parseInt(headerValue); + } catch (NumberFormatException e) { + throw new MockAzureBlobStore.BadRequestException("InvalidHeaderValue", "Invalid " + headerName + " header"); + } + } + + @Nullable + private String leaseId(HttpExchange exchange) { + return exchange.getRequestHeaders().getFirst(X_MS_LEASE_ID); + } + + private String blobPath(HttpExchange exchange) { + return stripPrefix("/" + account + "/" + container + "/", exchange.getRequestURI().getPath()); + } + public Map blobs() { - return blobs; + return mockAzureBlobStore.blobs(); + } + + public static void sendError(HttpExchange exchange, MockAzureBlobStore.AzureBlobStoreError error) throws IOException { + sendError(exchange, error.getRestStatus(), error.getErrorCode(), error.getMessage()); } public static void sendError(final HttpExchange exchange, final RestStatus status) throws IOException { + final String errorCode = toAzureErrorCode(status); + sendError(exchange, status, errorCode, status.toString()); + } + + public static void sendError(HttpExchange exchange, RestStatus restStatus, String errorCode, String errorMessage) throws IOException { final Headers headers = exchange.getResponseHeaders(); headers.add("Content-Type", "application/xml"); @@ -396,20 +529,19 @@ public static void sendError(final HttpExchange exchange, final RestStatus statu headers.add("x-ms-request-id", requestId); } - final String errorCode = toAzureErrorCode(status); // see Constants.HeaderConstants.ERROR_CODE headers.add("x-ms-error-code", errorCode); if ("HEAD".equals(exchange.getRequestMethod())) { - exchange.sendResponseHeaders(status.getStatus(), -1L); + exchange.sendResponseHeaders(restStatus.getStatus(), -1L); } else { final byte[] response = (String.format(Locale.ROOT, """ %s %s - """, errorCode, status)).getBytes(StandardCharsets.UTF_8); - exchange.sendResponseHeaders(status.getStatus(), response.length); + """, errorCode, errorMessage)).getBytes(StandardCharsets.UTF_8); + exchange.sendResponseHeaders(restStatus.getStatus(), response.length); exchange.getResponseBody().write(response); } } @@ -428,4 +560,9 @@ private static String toAzureErrorCode(final RestStatus status) { ); }; } + + private String stripPrefix(String prefix, String toStrip) { + assert toStrip.startsWith(prefix); + return toStrip.substring(prefix.length()); + } } diff --git a/test/fixtures/azure-fixture/src/main/java/fixture/azure/MockAzureBlobStore.java b/test/fixtures/azure-fixture/src/main/java/fixture/azure/MockAzureBlobStore.java new file mode 100644 index 0000000000000..c694c27c1293b --- /dev/null +++ b/test/fixtures/azure-fixture/src/main/java/fixture/azure/MockAzureBlobStore.java @@ -0,0 +1,484 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package fixture.azure; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.CompositeBytesReference; +import org.elasticsearch.common.util.Maps; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.rest.RestStatus; + +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; + +public class MockAzureBlobStore { + + private static final Logger logger = LogManager.getLogger(MockAzureBlobStore.class); + private static final String BLOCK_BLOB_TYPE = "BlockBlob"; + private static final String PAGE_BLOB_TYPE = "PageBlob"; + private static final String APPEND_BLOB_TYPE = "AppendBlob"; + + private final LeaseExpiryPredicate leaseExpiryPredicate; + private final Map blobs; + + /** + * Provide the means of triggering lease expiration + * + * @param leaseExpiryPredicate A Predicate that takes an active lease ID and returns true when it should be expired, or null to never fail leases + */ + public MockAzureBlobStore(LeaseExpiryPredicate leaseExpiryPredicate) { + this.blobs = new ConcurrentHashMap<>(); + this.leaseExpiryPredicate = Objects.requireNonNull(leaseExpiryPredicate); + } + + public void putBlock(String path, String blockId, BytesReference content, @Nullable String leaseId) { + blobs.compute(path, (p, existing) -> { + if (existing != null) { + existing.putBlock(blockId, content, leaseId); + return existing; + } else { + final AzureBlockBlob azureBlockBlob = new AzureBlockBlob(); + azureBlockBlob.putBlock(blockId, content, leaseId); + return azureBlockBlob; + } + }); + } + + public void putBlockList(String path, List blockIds, @Nullable String leaseId) { + final AzureBlockBlob blob = getExistingBlob(path); + blob.putBlockList(blockIds, leaseId); + } + + public void putBlob(String path, BytesReference contents, String blobType, @Nullable String ifNoneMatch, @Nullable String leaseId) { + blobs.compute(path, (p, existingValue) -> { + if (existingValue != null) { + existingValue.setContents(contents, leaseId, ifNoneMatch); + return existingValue; + } else { + validateBlobType(blobType); + final AzureBlockBlob newBlob = new AzureBlockBlob(); + newBlob.setContents(contents, leaseId); + return newBlob; + } + }); + } + + private void validateBlobType(String blobType) { + if (BLOCK_BLOB_TYPE.equals(blobType)) { + return; + } + final String errorMessage; + if (PAGE_BLOB_TYPE.equals(blobType) || APPEND_BLOB_TYPE.equals(blobType)) { + errorMessage = "Only BlockBlob is supported. This is a limitation of the MockAzureBlobStore"; + } else { + errorMessage = "Invalid blobType: " + blobType; + } + // Fail the test and respond with an error + failTestWithAssertionError(errorMessage); + throw new MockAzureBlobStore.BadRequestException("InvalidHeaderValue", errorMessage); + } + + public AzureBlockBlob getBlob(String path, @Nullable String leaseId) { + final AzureBlockBlob blob = getExistingBlob(path); + // This is the public implementation of "get blob" which will 404 for uncommitted block blobs + if (blob.isCommitted() == false) { + throw new BlobNotFoundException(); + } + blob.checkLeaseForRead(leaseId); + return blob; + } + + public void deleteBlob(String path, @Nullable String leaseId) { + final AzureBlockBlob blob = getExistingBlob(path); + blob.checkLeaseForWrite(leaseId); + blobs.remove(path); + } + + public Map listBlobs(String prefix, @Nullable String leaseId) { + return blobs.entrySet().stream().filter(e -> { + if (prefix == null || e.getKey().startsWith(prefix)) { + return true; + } + return false; + }) + .filter(e -> e.getValue().isCommitted()) + .peek(e -> e.getValue().checkLeaseForRead(leaseId)) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + public String acquireLease(String path, int leaseTimeSeconds, @Nullable String proposedLeaseId) { + final AzureBlockBlob blob = getExistingBlob(path); + return blob.acquireLease(proposedLeaseId, leaseTimeSeconds); + } + + public void releaseLease(String path, @Nullable String leaseId) { + final AzureBlockBlob blob = getExistingBlob(path); + blob.releaseLease(leaseId); + } + + public void breakLease(String path, @Nullable Integer leaseBreakPeriod) { + final AzureBlockBlob blob = getExistingBlob(path); + blob.breakLease(leaseBreakPeriod); + } + + public Map blobs() { + return Maps.transformValues(blobs, AzureBlockBlob::getContents); + } + + private AzureBlockBlob getExistingBlob(String path) { + final AzureBlockBlob blob = blobs.get(path); + if (blob == null) { + throw new BlobNotFoundException(); + } + return blob; + } + + static void failTestWithAssertionError(String message) { + ExceptionsHelper.maybeDieOnAnotherThread(new AssertionError(message)); + } + + static void failTestWithAssertionError(String message, Throwable throwable) { + ExceptionsHelper.maybeDieOnAnotherThread(new AssertionError(message, throwable)); + } + + public class AzureBlockBlob { + private final Object writeLock = new Object(); + private final Lease lease = new Lease(); + private final Map blocks; + private volatile BytesReference contents; + + private AzureBlockBlob() { + this.blocks = new ConcurrentHashMap<>(); + } + + public void putBlock(String blockId, BytesReference content, @Nullable String leaseId) { + synchronized (writeLock) { + lease.checkLeaseForWrite(leaseId); + this.blocks.put(blockId, content); + } + } + + public void putBlockList(List blockIds, @Nullable String leaseId) throws BadRequestException { + synchronized (writeLock) { + lease.checkLeaseForWrite(leaseId); + final List unresolvedBlocks = blockIds.stream().filter(bId -> blocks.containsKey(bId) == false).toList(); + if (unresolvedBlocks.isEmpty() == false) { + logger.warn("Block list contained non-existent block IDs: {}", unresolvedBlocks); + throw new BadRequestException("InvalidBlockList", "The specified blocklist is invalid."); + } + final BytesReference[] resolvedContents = blockIds.stream().map(blocks::get).toList().toArray(new BytesReference[0]); + contents = CompositeBytesReference.of(resolvedContents); + } + } + + private boolean matches(String ifNoneMatchHeaderValue) { + if (ifNoneMatchHeaderValue == null) { + return false; + } + // We only support * + if ("*".equals(ifNoneMatchHeaderValue)) { + return true; + } + // Fail the test, trigger an internal server error + failTestWithAssertionError("We've only implemented 'If-None-Match: *' in the MockAzureBlobStore"); + throw new AzureBlobStoreError( + RestStatus.INTERNAL_SERVER_ERROR, + "UnsupportedHeader", + "The test fixture only supports * for If-None-Match" + ); + } + + public synchronized void setContents(BytesReference contents, @Nullable String leaseId) { + synchronized (writeLock) { + lease.checkLeaseForWrite(leaseId); + this.contents = contents; + this.blocks.clear(); + } + } + + public void setContents(BytesReference contents, @Nullable String leaseId, @Nullable String ifNoneMatchHeaderValue) { + synchronized (writeLock) { + if (matches(ifNoneMatchHeaderValue)) { + throw new PreconditionFailedException( + "TargetConditionNotMet", + "The target condition specified using HTTP conditional header(s) is not met." + ); + } + setContents(contents, leaseId); + } + } + + /** + * Get the committed contents of the blob + * + * @return The last committed contents of the blob, or null if the blob is uncommitted + */ + @Nullable + public BytesReference getContents() { + return contents; + } + + public String type() { + return BLOCK_BLOB_TYPE; + } + + public boolean isCommitted() { + return contents != null; + } + + @Override + public String toString() { + return "MockAzureBlockBlob{" + "blocks=" + blocks + ", contents=" + contents + '}'; + } + + public String acquireLease(@Nullable String proposedLeaseId, int leaseTimeSeconds) { + synchronized (writeLock) { + return lease.acquire(proposedLeaseId, leaseTimeSeconds); + } + } + + public void releaseLease(String leaseId) { + synchronized (writeLock) { + lease.release(leaseId); + } + } + + public void breakLease(@Nullable Integer leaseBreakPeriod) { + synchronized (writeLock) { + lease.breakLease(leaseBreakPeriod); + } + } + + public void checkLeaseForRead(@Nullable String leaseId) { + lease.checkLeaseForRead(leaseId); + } + + public void checkLeaseForWrite(@Nullable String leaseId) { + lease.checkLeaseForWrite(leaseId); + } + } + + /** + * @see acquire/release rules + * @see read/write rules + */ + public class Lease { + + /** + * Minimal set of states, we don't support breaking/broken + */ + enum State { + Available, + Leased, + Expired, + Broken + } + + private String leaseId; + private State state = State.Available; + private int leaseDurationSeconds; + + public synchronized String acquire(@Nullable String proposedLeaseId, int leaseDurationSeconds) { + maybeExpire(proposedLeaseId); + switch (state) { + case Available, Expired, Broken -> { + final State prevState = state; + state = State.Leased; + leaseId = proposedLeaseId != null ? proposedLeaseId : UUID.randomUUID().toString(); + validateLeaseDuration(leaseDurationSeconds); + this.leaseDurationSeconds = leaseDurationSeconds; + logger.debug("Granting lease, prior state={}, leaseId={}, expires={}", prevState, leaseId); + } + case Leased -> { + if (leaseId.equals(proposedLeaseId) == false) { + logger.debug("Mismatch on acquire - proposed leaseId: {}, active leaseId: {}", proposedLeaseId, leaseId); + throw new ConflictException( + "LeaseIdMismatchWithLeaseOperation", + "The lease ID specified did not match the lease ID for the blob/container." + ); + } + validateLeaseDuration(leaseDurationSeconds); + } + } + return leaseId; + } + + public synchronized void release(String requestLeaseId) { + switch (state) { + case Available -> throw new ConflictException( + "LeaseNotPresentWithLeaseOperation", + "There is currently no lease on the blob/container." + ); + case Leased, Expired, Broken -> { + if (leaseId.equals(requestLeaseId) == false) { + logger.debug("Mismatch on release - submitted leaseId: {}, active leaseId: {}", requestLeaseId, this.leaseId); + throw new ConflictException( + "LeaseIdMismatchWithLeaseOperation", + "The lease ID specified did not match the lease ID for the blob/container." + ); + } + state = State.Available; + this.leaseId = null; + } + } + } + + public synchronized void breakLease(Integer leaseBreakPeriod) { + // We haven't implemented the "Breaking" state so we don't support 'breaks' for non-infinite leases unless break-period is 0 + if (leaseDurationSeconds != -1 && (leaseBreakPeriod == null || leaseBreakPeriod != 0)) { + failTestWithAssertionError( + "MockAzureBlobStore only supports breaking non-infinite leases with 'x-ms-lease-break-period: 0'" + ); + } + switch (state) { + case Available -> throw new ConflictException( + "LeaseNotPresentWithLeaseOperation", + "There is currently no lease on the blob/container." + ); + case Leased, Expired, Broken -> state = State.Broken; + } + } + + public synchronized void checkLeaseForWrite(@Nullable String requestLeaseId) { + maybeExpire(requestLeaseId); + switch (state) { + case Available, Expired, Broken -> { + if (requestLeaseId != null) { + throw new PreconditionFailedException( + "LeaseLost", + "A lease ID was specified, but the lease for the blob/container has expired." + ); + } + } + case Leased -> { + if (requestLeaseId == null) { + throw new PreconditionFailedException( + "LeaseIdMissing", + "There is currently a lease on the blob/container and no lease ID was specified in the request." + ); + } + if (leaseId.equals(requestLeaseId) == false) { + throw new ConflictException( + "LeaseIdMismatchWithBlobOperation", + "The lease ID specified did not match the lease ID for the blob." + ); + } + } + } + } + + public synchronized void checkLeaseForRead(@Nullable String requestLeaseId) { + maybeExpire(requestLeaseId); + switch (state) { + case Available, Expired, Broken -> { + if (requestLeaseId != null) { + throw new PreconditionFailedException( + "LeaseLost", + "A lease ID was specified, but the lease for the blob/container has expired." + ); + } + } + case Leased -> { + if (requestLeaseId != null && requestLeaseId.equals(leaseId) == false) { + throw new ConflictException( + "LeaseIdMismatchWithBlobOperation", + "The lease ID specified did not match the lease ID for the blob." + ); + } + } + } + } + + /** + * If there's an active lease, ask the predicate if we should expire the existing it + * + * @param requestLeaseId The lease of the request + */ + private void maybeExpire(String requestLeaseId) { + if (state == State.Leased && leaseExpiryPredicate.shouldExpireLease(leaseId, requestLeaseId)) { + logger.debug("Expiring lease, id={}", leaseId); + state = State.Expired; + } + } + + private void validateLeaseDuration(long leaseTimeSeconds) { + if (leaseTimeSeconds != -1 && (leaseTimeSeconds < 15 || leaseTimeSeconds > 60)) { + throw new BadRequestException( + "InvalidHeaderValue", + AzureHttpHandler.X_MS_LEASE_DURATION + " must be between 16 and 60 seconds (was " + leaseTimeSeconds + ")" + ); + } + } + } + + public static class AzureBlobStoreError extends RuntimeException { + private final RestStatus restStatus; + private final String errorCode; + + public AzureBlobStoreError(RestStatus restStatus, String errorCode, String message) { + super(message); + this.restStatus = restStatus; + this.errorCode = errorCode; + } + + public RestStatus getRestStatus() { + return restStatus; + } + + public String getErrorCode() { + return errorCode; + } + } + + public static class BlobNotFoundException extends AzureBlobStoreError { + public BlobNotFoundException() { + super(RestStatus.NOT_FOUND, "BlobNotFound", "The specified blob does not exist."); + } + } + + public static class BadRequestException extends AzureBlobStoreError { + public BadRequestException(String errorCode, String message) { + super(RestStatus.BAD_REQUEST, errorCode, message); + } + } + + public static class ConflictException extends AzureBlobStoreError { + public ConflictException(String errorCode, String message) { + super(RestStatus.CONFLICT, errorCode, message); + } + } + + public static class PreconditionFailedException extends AzureBlobStoreError { + public PreconditionFailedException(String errorCode, String message) { + super(RestStatus.PRECONDITION_FAILED, errorCode, message); + } + } + + public interface LeaseExpiryPredicate { + + LeaseExpiryPredicate NEVER_EXPIRE = (activeLeaseId, requestLeaseId) -> false; + + /** + * Should the lease be expired? + * + * @param activeLeaseId The current active lease ID + * @param requestLeaseId The request lease ID (if any) + * @return true to expire the lease, false otherwise + */ + boolean shouldExpireLease(String activeLeaseId, @Nullable String requestLeaseId); + } +} diff --git a/x-pack/plugin/repositories-metering-api/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/repositories/metering/azure/AzureRepositoriesMeteringIT.java b/x-pack/plugin/repositories-metering-api/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/repositories/metering/azure/AzureRepositoriesMeteringIT.java index 7029a38edcb5a..d21dc4b2982f1 100644 --- a/x-pack/plugin/repositories-metering-api/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/repositories/metering/azure/AzureRepositoriesMeteringIT.java +++ b/x-pack/plugin/repositories-metering-api/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/repositories/metering/azure/AzureRepositoriesMeteringIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.repositories.metering.azure; import fixture.azure.AzureHttpFixture; +import fixture.azure.MockAzureBlobStore; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Booleans; @@ -37,7 +38,8 @@ public class AzureRepositoriesMeteringIT extends AbstractRepositoriesMeteringAPI AZURE_TEST_CONTAINER, System.getProperty("test.azure.tenant_id"), System.getProperty("test.azure.client_id"), - AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_TEST_ACCOUNT) + AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_TEST_ACCOUNT), + MockAzureBlobStore.LeaseExpiryPredicate.NEVER_EXPIRE ); private static TestTrustStore trustStore = new TestTrustStore( diff --git a/x-pack/plugin/searchable-snapshots/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/AzureSearchableSnapshotsIT.java b/x-pack/plugin/searchable-snapshots/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/AzureSearchableSnapshotsIT.java index 610b58453716c..f65db6dab1e68 100644 --- a/x-pack/plugin/searchable-snapshots/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/AzureSearchableSnapshotsIT.java +++ b/x-pack/plugin/searchable-snapshots/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/AzureSearchableSnapshotsIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.searchablesnapshots; import fixture.azure.AzureHttpFixture; +import fixture.azure.MockAzureBlobStore; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Booleans; @@ -38,7 +39,8 @@ public class AzureSearchableSnapshotsIT extends AbstractSearchableSnapshotsRestT AZURE_TEST_CONTAINER, System.getProperty("test.azure.tenant_id"), System.getProperty("test.azure.client_id"), - AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_TEST_ACCOUNT) + AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_TEST_ACCOUNT), + MockAzureBlobStore.LeaseExpiryPredicate.NEVER_EXPIRE ); private static TestTrustStore trustStore = new TestTrustStore( diff --git a/x-pack/plugin/snapshot-based-recoveries/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/AzureSnapshotBasedRecoveryIT.java b/x-pack/plugin/snapshot-based-recoveries/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/AzureSnapshotBasedRecoveryIT.java index 591d4582d5905..8142b40166840 100644 --- a/x-pack/plugin/snapshot-based-recoveries/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/AzureSnapshotBasedRecoveryIT.java +++ b/x-pack/plugin/snapshot-based-recoveries/qa/azure/src/javaRestTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/AzureSnapshotBasedRecoveryIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.snapshotbasedrecoveries.recovery; import fixture.azure.AzureHttpFixture; +import fixture.azure.MockAzureBlobStore; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Booleans; @@ -37,7 +38,8 @@ public class AzureSnapshotBasedRecoveryIT extends AbstractSnapshotBasedRecoveryR AZURE_TEST_CONTAINER, System.getProperty("test.azure.tenant_id"), System.getProperty("test.azure.client_id"), - AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_TEST_ACCOUNT) + AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_TEST_ACCOUNT), + MockAzureBlobStore.LeaseExpiryPredicate.NEVER_EXPIRE ); private static TestTrustStore trustStore = new TestTrustStore( diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/AzureRepositoryAnalysisRestIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/AzureRepositoryAnalysisRestIT.java index a9b8fe51c01cc..03906b3cf69da 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/AzureRepositoryAnalysisRestIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/AzureRepositoryAnalysisRestIT.java @@ -25,6 +25,7 @@ import java.io.IOException; import java.util.Map; +import java.util.concurrent.ThreadLocalRandom; import java.util.function.Predicate; import static org.hamcrest.Matchers.blankOrNullString; @@ -49,7 +50,10 @@ public class AzureRepositoryAnalysisRestIT extends AbstractRepositoryAnalysisRes AZURE_TEST_CONTAINER, AZURE_TEST_TENANT_ID, AZURE_TEST_CLIENT_ID, - decideAuthHeaderPredicate() + decideAuthHeaderPredicate(), + // 5% of the time, in a contended lease scenario, expire the existing lease + (currentLeaseId, requestLeaseId) -> currentLeaseId.equals(requestLeaseId) == false + && ThreadLocalRandom.current().nextDouble() < 0.05 ); private static Predicate decideAuthHeaderPredicate() { @@ -78,12 +82,6 @@ private static Predicate decideAuthHeaderPredicate() { () -> "ignored;DefaultEndpointsProtocol=http;BlobEndpoint=" + fixture.getAddress(), s -> USE_FIXTURE ) - .apply(c -> { - if (USE_FIXTURE) { - // test fixture does not support CAS yet; TODO fix this - c.systemProperty("test.repository_test_kit.skip_cas", "true"); - } - }) .systemProperty( "tests.azure.credentials.disable_instance_discovery", () -> "true", From 24bc505e28cadad4a3253a458ce6493a916b22e8 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Fri, 29 Nov 2024 14:07:48 +1100 Subject: [PATCH 313/386] [Test] Increase test secret key length (#117675) Running with FIPS approved mode requires secret keys to be at least 114 bits long. Relates: #117324 Resolves: #117596 Resolves: #117709 Resolves: #117710 Resolves: #117711 Resolves: #117712 --- .../RepositoryS3RestReloadCredentialsIT.java | 19 +++++++++++++------ muted-tests.yml | 2 -- .../fixture/aws/sts/AwsStsHttpHandler.java | 3 ++- .../fixture/aws/imds/Ec2ImdsHttpHandler.java | 3 ++- .../org/elasticsearch/test/ESTestCase.java | 7 +++++++ 5 files changed, 24 insertions(+), 10 deletions(-) diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestReloadCredentialsIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestReloadCredentialsIT.java index 430c0a1994967..1f09fa6b081b9 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestReloadCredentialsIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestReloadCredentialsIT.java @@ -10,6 +10,7 @@ package org.elasticsearch.repositories.s3; import fixture.s3.S3HttpFixture; +import io.netty.handler.codec.http.HttpMethod; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; @@ -61,8 +62,6 @@ protected String getTestRestCluster() { } public void testReloadCredentialsFromKeystore() throws IOException { - assumeFalse("doesn't work in a FIPS JVM, but that's ok", inFipsJvm()); - // Register repository (?verify=false because we don't have access to the blob store yet) final var repositoryName = randomIdentifier(); registerRepository( @@ -77,15 +76,16 @@ public void testReloadCredentialsFromKeystore() throws IOException { final var accessKey1 = randomIdentifier(); repositoryAccessKey = accessKey1; keystoreSettings.put("s3.client.default.access_key", accessKey1); - keystoreSettings.put("s3.client.default.secret_key", randomIdentifier()); + keystoreSettings.put("s3.client.default.secret_key", randomSecretKey()); cluster.updateStoredSecureSettings(); - assertOK(client().performRequest(new Request("POST", "/_nodes/reload_secure_settings"))); + + assertOK(client().performRequest(createReloadSecureSettingsRequest())); // Check access using initial credentials assertOK(client().performRequest(verifyRequest)); // Rotate credentials in blob store - final var accessKey2 = randomValueOtherThan(accessKey1, ESTestCase::randomIdentifier); + final var accessKey2 = randomValueOtherThan(accessKey1, ESTestCase::randomSecretKey); repositoryAccessKey = accessKey2; // Ensure that initial credentials now invalid @@ -99,10 +99,17 @@ public void testReloadCredentialsFromKeystore() throws IOException { // Set up refreshed credentials keystoreSettings.put("s3.client.default.access_key", accessKey2); cluster.updateStoredSecureSettings(); - assertOK(client().performRequest(new Request("POST", "/_nodes/reload_secure_settings"))); + assertOK(client().performRequest(createReloadSecureSettingsRequest())); // Check access using refreshed credentials assertOK(client().performRequest(verifyRequest)); } + private Request createReloadSecureSettingsRequest() throws IOException { + return newXContentRequest( + HttpMethod.POST, + "/_nodes/reload_secure_settings", + (b, p) -> inFipsJvm() ? b.field("secure_settings_password", "keystore-password") : b + ); + } } diff --git a/muted-tests.yml b/muted-tests.yml index d703cfaa1b9aa..c3f67f97011ee 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -216,8 +216,6 @@ tests: - class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests method: testStopWorksInMiddleOfProcessing issue: https://github.com/elastic/elasticsearch/issues/117591 -- class: org.elasticsearch.repositories.s3.RepositoryS3ClientYamlTestSuiteIT - issue: https://github.com/elastic/elasticsearch/issues/117596 - class: "org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT" method: "test {scoring.*}" issue: https://github.com/elastic/elasticsearch/issues/117641 diff --git a/test/fixtures/aws-sts-fixture/src/main/java/fixture/aws/sts/AwsStsHttpHandler.java b/test/fixtures/aws-sts-fixture/src/main/java/fixture/aws/sts/AwsStsHttpHandler.java index 84541f5e15211..ac3299f157485 100644 --- a/test/fixtures/aws-sts-fixture/src/main/java/fixture/aws/sts/AwsStsHttpHandler.java +++ b/test/fixtures/aws-sts-fixture/src/main/java/fixture/aws/sts/AwsStsHttpHandler.java @@ -28,6 +28,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.test.ESTestCase.randomIdentifier; +import static org.elasticsearch.test.ESTestCase.randomSecretKey; /** * Minimal HTTP handler that emulates the AWS STS server @@ -102,7 +103,7 @@ public void handle(final HttpExchange exchange) throws IOException { ROLE_ARN, ROLE_NAME, sessionToken, - randomIdentifier(), + randomSecretKey(), ZonedDateTime.now().plusDays(1L).format(DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ssZ")), accessKey ).getBytes(StandardCharsets.UTF_8); diff --git a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java index a92f1bdc5f9ae..bc87eff592bec 100644 --- a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java +++ b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java @@ -28,6 +28,7 @@ import java.util.function.BiConsumer; import static org.elasticsearch.test.ESTestCase.randomIdentifier; +import static org.elasticsearch.test.ESTestCase.randomSecretKey; /** * Minimal HTTP handler that emulates the EC2 IMDS server @@ -84,7 +85,7 @@ public void handle(final HttpExchange exchange) throws IOException { accessKey, ZonedDateTime.now(Clock.systemUTC()).plusDays(1L).format(DateTimeFormatter.ISO_DATE_TIME), randomIdentifier(), - randomIdentifier(), + randomSecretKey(), sessionToken ).getBytes(StandardCharsets.UTF_8); exchange.getResponseHeaders().add("Content-Type", "application/json"); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 5b2beaee00bfe..d983fc854bdfd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -1358,6 +1358,13 @@ public static String randomDateFormatterPattern() { return randomFrom(FormatNames.values()).getName(); } + /** + * Generate a random string of at least 112 bits to satisfy minimum entropy requirement when running in FIPS mode. + */ + public static String randomSecretKey() { + return randomAlphaOfLengthBetween(14, 20); + } + /** * Randomly choose between {@link EsExecutors#DIRECT_EXECUTOR_SERVICE} (which does not fork), {@link ThreadPool#generic}, and one of the * other named threadpool executors. From 5935f766df80325f748c3193e13e6e74fb5c1f37 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 29 Nov 2024 17:44:27 +1100 Subject: [PATCH 314/386] Mute org.elasticsearch.xpack.inference.InferenceCrudIT testSupportedStream #117745 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index c3f67f97011ee..40d3dcf46e1b9 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -222,6 +222,9 @@ tests: - class: "org.elasticsearch.xpack.esql.qa.single_node.EsqlSpecIT" method: "test {scoring.*}" issue: https://github.com/elastic/elasticsearch/issues/117641 +- class: org.elasticsearch.xpack.inference.InferenceCrudIT + method: testSupportedStream + issue: https://github.com/elastic/elasticsearch/issues/117745 # Examples: # From 17d280363c62dc4d35c320246d36ec8cd14e4533 Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 29 Nov 2024 09:54:38 +0000 Subject: [PATCH 315/386] Add YAML test for status in indices stats (#116711) The feature added in #81954 lacks coverage in BwC situations. This commit adds a YAML test to address that. --- .../indices.stats/15_open_closed_state.yml | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/15_open_closed_state.yml diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/15_open_closed_state.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/15_open_closed_state.yml new file mode 100644 index 0000000000000..94b6a3acc83a8 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/15_open_closed_state.yml @@ -0,0 +1,22 @@ +--- +"Ensure index state is exposed": + - requires: + cluster_features: ["gte_v8.1.0"] + reason: index state added to stats in 8.1.0 + + - do: + indices.create: + index: openindex + - do: + indices.create: + index: closedindex + - do: + indices.close: + index: closedindex + - do: + indices.stats: + expand_wildcards: [open,closed] + forbid_closed_indices: false + + - match: { indices.openindex.status: open } + - match: { indices.closedindex.status: close } From c3f9e0172333b8edae525865c9d84b29a1c6ab8f Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 29 Nov 2024 09:58:09 +0000 Subject: [PATCH 316/386] Migrate `repository-s3` YAML tests to Java REST tests (#117628) Today these YAML tests rely on a bunch of rather complex setup organised by Gradle, and contain lots of duplication and coincident strings, mostly because that was the only way to achieve what we wanted before we could orchestrate test clusters and fixtures directly from Java test suites. We're not actually running the YAML tests in ways that take advantage of their YAMLness (e.g. in mixed-version clusters, or from other client libraries). This commit replaces these tests with Java REST tests which enormously simplifies this area of code. Relates ES-9984 --- modules/repository-s3/build.gradle | 118 +----- .../s3/S3RepositoryThirdPartyTests.java | 7 +- .../s3/AbstractRepositoryS3RestTestCase.java | 383 ++++++++++++++++++ .../RepositoryS3BasicCredentialsRestIT.java | 65 +++ .../s3/RepositoryS3EcsCredentialsRestIT.java} | 44 +- .../RepositoryS3ImdsV1CredentialsRestIT.java | 73 ++++ ...ositoryS3MinioBasicCredentialsRestIT.java} | 44 +- .../RepositoryS3SessionCredentialsRestIT.java | 72 ++++ .../s3/RepositoryS3StsCredentialsRestIT.java} | 64 +-- .../repositories/s3/S3BlobStore.java | 2 +- .../repositories/s3/S3Service.java | 8 +- .../resources/aws-web-identity-token-file | 1 - .../s3/RepositoryS3ClientYamlTestSuiteIT.java | 57 +-- ...oryS3RegionalStsClientYamlTestSuiteIT.java | 12 +- .../20_repository_permanent_credentials.yml | 265 +----------- .../30_repository_temporary_credentials.yml | 278 ------------- .../40_repository_ec2_credentials.yml | 278 ------------- .../50_repository_ecs_credentials.yml | 278 ------------- .../60_repository_sts_credentials.yml | 279 ------------- .../fixtures/minio/MinioTestContainer.java | 12 +- .../main/java/fixture/s3/S3HttpFixture.java | 4 - .../local/AbstractLocalClusterFactory.java | 2 + .../minio/MinioSearchableSnapshotsIT.java | 7 +- .../MinioRepositoryAnalysisRestIT.java | 7 +- 24 files changed, 765 insertions(+), 1595 deletions(-) create mode 100644 modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/AbstractRepositoryS3RestTestCase.java create mode 100644 modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3BasicCredentialsRestIT.java rename modules/repository-s3/src/{yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsClientYamlTestSuiteIT.java => javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsCredentialsRestIT.java} (59%) create mode 100644 modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV1CredentialsRestIT.java rename modules/repository-s3/src/{yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3MinioClientYamlTestSuiteIT.java => javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3MinioBasicCredentialsRestIT.java} (50%) create mode 100644 modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3SessionCredentialsRestIT.java rename modules/repository-s3/src/{yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3StsClientYamlTestSuiteIT.java => javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3StsCredentialsRestIT.java} (53%) delete mode 100644 modules/repository-s3/src/test/resources/aws-web-identity-token-file delete mode 100644 modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/30_repository_temporary_credentials.yml delete mode 100644 modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml delete mode 100644 modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/50_repository_ecs_credentials.yml delete mode 100644 modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/60_repository_sts_credentials.yml diff --git a/modules/repository-s3/build.gradle b/modules/repository-s3/build.gradle index ed1777891f40d..2cfb5d23db4ff 100644 --- a/modules/repository-s3/build.gradle +++ b/modules/repository-s3/build.gradle @@ -43,19 +43,24 @@ dependencies { api 'javax.xml.bind:jaxb-api:2.2.2' testImplementation project(':test:fixtures:s3-fixture') - yamlRestTestImplementation project(":test:framework") - yamlRestTestImplementation project(':test:fixtures:s3-fixture') - yamlRestTestImplementation project(':test:fixtures:ec2-imds-fixture') - yamlRestTestImplementation project(':test:fixtures:aws-sts-fixture') - yamlRestTestImplementation project(':test:fixtures:minio-fixture') - internalClusterTestImplementation project(':test:fixtures:minio-fixture') - javaRestTestImplementation project(":test:framework") - javaRestTestImplementation project(':test:fixtures:s3-fixture') - javaRestTestImplementation project(':modules:repository-s3') + internalClusterTestImplementation project(':test:fixtures:minio-fixture') + internalClusterTestRuntimeOnly "org.slf4j:slf4j-simple:${versions.slf4j}" + yamlRestTestImplementation project(':modules:repository-s3') + yamlRestTestImplementation project(':test:fixtures:s3-fixture') + yamlRestTestImplementation project(':test:fixtures:testcontainer-utils') + yamlRestTestImplementation project(':test:framework') yamlRestTestRuntimeOnly "org.slf4j:slf4j-simple:${versions.slf4j}" - internalClusterTestRuntimeOnly "org.slf4j:slf4j-simple:${versions.slf4j}" + + javaRestTestImplementation project(':modules:repository-s3') + javaRestTestImplementation project(':test:fixtures:aws-sts-fixture') + javaRestTestImplementation project(':test:fixtures:ec2-imds-fixture') + javaRestTestImplementation project(':test:fixtures:minio-fixture') + javaRestTestImplementation project(':test:fixtures:s3-fixture') + javaRestTestImplementation project(':test:fixtures:testcontainer-utils') + javaRestTestImplementation project(':test:framework') + javaRestTestRuntimeOnly "org.slf4j:slf4j-simple:${versions.slf4j}" } restResources { @@ -82,90 +87,25 @@ def testRepositoryCreds = tasks.register("testRepositoryCreds", Test) { testClassesDirs = sourceSets.test.output.classesDirs } -tasks.named('check').configure { - dependsOn(testRepositoryCreds) -} - tasks.named('test').configure { // this is tested explicitly in separate test tasks exclude '**/RepositoryCredentialsTests.class' } boolean useFixture = false - -// We test against two repositories, one which uses the usual two-part "permanent" credentials and -// the other which uses three-part "temporary" or "session" credentials. - String s3PermanentAccessKey = System.getenv("amazon_s3_access_key") String s3PermanentSecretKey = System.getenv("amazon_s3_secret_key") String s3PermanentBucket = System.getenv("amazon_s3_bucket") String s3PermanentBasePath = System.getenv("amazon_s3_base_path") -String s3TemporaryAccessKey = System.getenv("amazon_s3_access_key_temporary") -String s3TemporarySecretKey = System.getenv("amazon_s3_secret_key_temporary") -String s3TemporarySessionToken = System.getenv("amazon_s3_session_token_temporary") -String s3TemporaryBucket = System.getenv("amazon_s3_bucket_temporary") -String s3TemporaryBasePath = System.getenv("amazon_s3_base_path_temporary") - -String s3EC2Bucket = System.getenv("amazon_s3_bucket_ec2") -String s3EC2BasePath = System.getenv("amazon_s3_base_path_ec2") - -String s3ECSBucket = System.getenv("amazon_s3_bucket_ecs") -String s3ECSBasePath = System.getenv("amazon_s3_base_path_ecs") - -String s3STSBucket = System.getenv("amazon_s3_bucket_sts") -String s3STSBasePath = System.getenv("amazon_s3_base_path_sts") - -boolean s3DisableChunkedEncoding = buildParams.random.nextBoolean() - -// If all these variables are missing then we are testing against the internal fixture instead, which has the following -// credentials hard-coded in. +// If all these variables are missing then we are testing against the internal fixture instead, which has the following credentials hard-coded in. if (!s3PermanentAccessKey && !s3PermanentSecretKey && !s3PermanentBucket && !s3PermanentBasePath) { + useFixture = true s3PermanentAccessKey = 's3_test_access_key' s3PermanentSecretKey = 's3_test_secret_key' s3PermanentBucket = 'bucket' s3PermanentBasePath = 'base_path' - useFixture = true -} -if (!s3TemporaryAccessKey && !s3TemporarySecretKey && !s3TemporaryBucket && !s3TemporaryBasePath && !s3TemporarySessionToken) { - s3TemporaryAccessKey = 'session_token_access_key' - s3TemporarySecretKey = 'session_token_secret_key' - s3TemporaryBucket = 'session_token_bucket' - s3TemporaryBasePath = 'session_token_base_path' -} - -if (!s3EC2Bucket && !s3EC2BasePath && !s3ECSBucket && !s3ECSBasePath) { - s3EC2Bucket = 'ec2_bucket' - s3EC2BasePath = 'ec2_base_path' - s3ECSBucket = 'ecs_bucket' - s3ECSBasePath = 'ecs_base_path' -} - -if (!s3STSBucket && !s3STSBasePath) { - s3STSBucket = 'sts_bucket' - s3STSBasePath = 'sts_base_path' -} - -tasks.named("processYamlRestTestResources").configure { - from("src/test/resources") { - include "aws-web-identity-token-file" - } - Map expansions = [ - 'permanent_bucket' : s3PermanentBucket, - 'permanent_base_path' : s3PermanentBasePath + "_integration_tests", - 'temporary_bucket' : s3TemporaryBucket, - 'temporary_base_path' : s3TemporaryBasePath + "_integration_tests", - 'ec2_bucket' : s3EC2Bucket, - 'ec2_base_path' : s3EC2BasePath, - 'ecs_bucket' : s3ECSBucket, - 'ecs_base_path' : s3ECSBasePath, - 'sts_bucket' : s3STSBucket, - 'sts_base_path' : s3STSBasePath, - 'disable_chunked_encoding': s3DisableChunkedEncoding - ] - inputs.properties(expansions) - filter("tokens" : expansions.collectEntries {k, v -> [k, v.toString()]} /* must be a map of strings */, ReplaceTokens.class) } tasks.named("internalClusterTest").configure { @@ -175,22 +115,7 @@ tasks.named("internalClusterTest").configure { systemProperty 'es.insecure_network_trace_enabled', 'true' } -tasks.named("yamlRestTest").configure { - systemProperty("s3PermanentAccessKey", s3PermanentAccessKey) - systemProperty("s3PermanentSecretKey", s3PermanentSecretKey) - systemProperty("s3TemporaryAccessKey", s3TemporaryAccessKey) - systemProperty("s3TemporarySecretKey", s3TemporarySecretKey) - systemProperty("s3EC2AccessKey", s3PermanentAccessKey) - - // ideally we could resolve an env path in cluster config as resource similar to configuring a config file - // not sure how common this is, but it would be nice to support - File awsWebIdentityTokenExternalLocation = file('src/test/resources/aws-web-identity-token-file') - // The web identity token can be read only from the plugin config directory because of security restrictions - // Ideally we would create a symlink, but extraConfigFile doesn't support it - nonInputProperties.systemProperty("awsWebIdentityTokenExternalLocation", awsWebIdentityTokenExternalLocation.getAbsolutePath()) -} - -// 3rd Party Tests +// 3rd Party Tests, i.e. testing against a real S3 repository tasks.register("s3ThirdPartyTest", Test) { SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class); SourceSet internalTestSourceSet = sourceSets.getByName(InternalClusterTestPlugin.SOURCE_SET_NAME) @@ -198,13 +123,13 @@ tasks.register("s3ThirdPartyTest", Test) { setClasspath(internalTestSourceSet.getRuntimeClasspath()) include '**/S3RepositoryThirdPartyTests.class' systemProperty("tests.use.fixture", Boolean.toString(useFixture)) - - // test container accesses ~/.testcontainers.properties read - systemProperty "tests.security.manager", "false" systemProperty 'test.s3.account', s3PermanentAccessKey systemProperty 'test.s3.key', s3PermanentSecretKey systemProperty 'test.s3.bucket', s3PermanentBucket nonInputProperties.systemProperty 'test.s3.base', s3PermanentBasePath + "_third_party_tests_" + buildParams.testSeed + + // test container accesses ~/.testcontainers.properties read + systemProperty "tests.security.manager", "false" } tasks.named("thirdPartyAudit").configure { @@ -241,5 +166,6 @@ tasks.named("thirdPartyAudit").configure { tasks.named("check").configure { dependsOn(tasks.withType(Test)) + dependsOn(testRepositoryCreds) } diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java index 3552cb8d9389a..4cebedebfba07 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java @@ -61,7 +61,12 @@ public class S3RepositoryThirdPartyTests extends AbstractThirdPartyRepositoryTes static final boolean USE_FIXTURE = Booleans.parseBoolean(System.getProperty("tests.use.fixture", "true")); @ClassRule - public static MinioTestContainer minio = new MinioTestContainer(USE_FIXTURE); + public static MinioTestContainer minio = new MinioTestContainer( + USE_FIXTURE, + System.getProperty("test.s3.account"), + System.getProperty("test.s3.key"), + System.getProperty("test.s3.bucket") + ); @Override protected Collection> getPlugins() { diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/AbstractRepositoryS3RestTestCase.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/AbstractRepositoryS3RestTestCase.java new file mode 100644 index 0000000000000..2199a64521759 --- /dev/null +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/AbstractRepositoryS3RestTestCase.java @@ -0,0 +1,383 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.repositories.s3; + +import io.netty.handler.codec.http.HttpMethod; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.cluster.routing.Murmur3HashFunction; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.ESRestTestCase; + +import java.io.Closeable; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Set; +import java.util.function.UnaryOperator; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public abstract class AbstractRepositoryS3RestTestCase extends ESRestTestCase { + + public record TestRepository(String repositoryName, String clientName, String bucketName, String basePath) { + + public Closeable register() throws IOException { + return register(UnaryOperator.identity()); + } + + public Closeable register(UnaryOperator settingsUnaryOperator) throws IOException { + assertOK(client().performRequest(getRegisterRequest(settingsUnaryOperator))); + return () -> assertOK(client().performRequest(new Request("DELETE", "/_snapshot/" + repositoryName()))); + } + + private Request getRegisterRequest(UnaryOperator settingsUnaryOperator) throws IOException { + return newXContentRequest( + HttpMethod.PUT, + "/_snapshot/" + repositoryName(), + (b, p) -> b.field("type", S3Repository.TYPE) + .startObject("settings") + .value( + settingsUnaryOperator.apply( + Settings.builder() + .put("bucket", bucketName()) + .put("base_path", basePath()) + .put("client", clientName()) + .put("canned_acl", "private") + .put("storage_class", "standard") + .put("disable_chunked_encoding", randomBoolean()) + .build() + ) + ) + .endObject() + ); + } + } + + protected abstract String getBucketName(); + + protected abstract String getBasePath(); + + protected abstract String getClientName(); + + protected static String getIdentifierPrefix(String testSuiteName) { + return testSuiteName + "-" + Integer.toString(Murmur3HashFunction.hash(testSuiteName + System.getProperty("tests.seed")), 16) + "-"; + } + + private TestRepository newTestRepository() { + return new TestRepository(randomIdentifier(), getClientName(), getBucketName(), getBasePath()); + } + + private static UnaryOperator readonlyOperator(Boolean readonly) { + return readonly == null + ? UnaryOperator.identity() + : s -> Settings.builder().put(s).put(BlobStoreRepository.READONLY_SETTING_KEY, readonly).build(); + } + + public void testGetRepository() throws IOException { + testGetRepository(null); + } + + public void testGetRepositoryReadonlyTrue() throws IOException { + testGetRepository(Boolean.TRUE); + } + + public void testGetRepositoryReadonlyFalse() throws IOException { + testGetRepository(Boolean.FALSE); + } + + private void testGetRepository(Boolean readonly) throws IOException { + final var repository = newTestRepository(); + try (var ignored = repository.register(readonlyOperator(readonly))) { + final var repositoryName = repository.repositoryName(); + final var responseObjectPath = assertOKAndCreateObjectPath( + client().performRequest(new Request("GET", "/_snapshot/" + repositoryName)) + ); + + assertEquals("s3", responseObjectPath.evaluate(repositoryName + ".type")); + assertNotNull(responseObjectPath.evaluate(repositoryName + ".settings")); + assertEquals(repository.bucketName(), responseObjectPath.evaluate(repositoryName + ".settings.bucket")); + assertEquals(repository.clientName(), responseObjectPath.evaluate(repositoryName + ".settings.client")); + assertEquals(repository.basePath(), responseObjectPath.evaluate(repositoryName + ".settings.base_path")); + assertEquals("private", responseObjectPath.evaluate(repositoryName + ".settings.canned_acl")); + assertEquals("standard", responseObjectPath.evaluate(repositoryName + ".settings.storage_class")); + assertNull(responseObjectPath.evaluate(repositoryName + ".settings.access_key")); + assertNull(responseObjectPath.evaluate(repositoryName + ".settings.secret_key")); + assertNull(responseObjectPath.evaluate(repositoryName + ".settings.session_token")); + + if (readonly == null) { + assertNull(responseObjectPath.evaluate(repositoryName + ".settings." + BlobStoreRepository.READONLY_SETTING_KEY)); + } else { + assertEquals( + Boolean.toString(readonly), + responseObjectPath.evaluate(repositoryName + ".settings." + BlobStoreRepository.READONLY_SETTING_KEY) + ); + } + } + } + + public void testNonexistentBucket() throws Exception { + testNonexistentBucket(null); + } + + public void testNonexistentBucketReadonlyTrue() throws Exception { + testNonexistentBucket(Boolean.TRUE); + } + + public void testNonexistentBucketReadonlyFalse() throws Exception { + testNonexistentBucket(Boolean.FALSE); + } + + private void testNonexistentBucket(Boolean readonly) throws Exception { + final var repository = new TestRepository( + randomIdentifier(), + getClientName(), + randomValueOtherThan(getBucketName(), ESTestCase::randomIdentifier), + getBasePath() + ); + final var registerRequest = repository.getRegisterRequest(readonlyOperator(readonly)); + + final var responseException = expectThrows(ResponseException.class, () -> client().performRequest(registerRequest)); + assertEquals(RestStatus.INTERNAL_SERVER_ERROR.getStatus(), responseException.getResponse().getStatusLine().getStatusCode()); + assertThat( + responseException.getMessage(), + allOf(containsString("repository_verification_exception"), containsString("is not accessible on master node")) + ); + } + + public void testNonexistentClient() throws Exception { + testNonexistentClient(null); + } + + public void testNonexistentClientReadonlyTrue() throws Exception { + testNonexistentClient(Boolean.TRUE); + } + + public void testNonexistentClientReadonlyFalse() throws Exception { + testNonexistentClient(Boolean.FALSE); + } + + private void testNonexistentClient(Boolean readonly) throws Exception { + final var repository = new TestRepository( + randomIdentifier(), + randomValueOtherThanMany(c -> c.equals(getClientName()) || c.equals("default"), ESTestCase::randomIdentifier), + getBucketName(), + getBasePath() + ); + final var registerRequest = repository.getRegisterRequest(readonlyOperator(readonly)); + + final var responseException = expectThrows(ResponseException.class, () -> client().performRequest(registerRequest)); + assertEquals(RestStatus.INTERNAL_SERVER_ERROR.getStatus(), responseException.getResponse().getStatusLine().getStatusCode()); + assertThat( + responseException.getMessage(), + allOf( + containsString("repository_verification_exception"), + containsString("is not accessible on master node"), + containsString("illegal_argument_exception"), + containsString("Unknown s3 client name") + ) + ); + } + + public void testNonexistentSnapshot() throws Exception { + testNonexistentSnapshot(null); + } + + public void testNonexistentSnapshotReadonlyTrue() throws Exception { + testNonexistentSnapshot(Boolean.TRUE); + } + + public void testNonexistentSnapshotReadonlyFalse() throws Exception { + testNonexistentSnapshot(Boolean.FALSE); + } + + private void testNonexistentSnapshot(Boolean readonly) throws Exception { + final var repository = newTestRepository(); + try (var ignored = repository.register(readonlyOperator(readonly))) { + final var repositoryName = repository.repositoryName(); + + final var getSnapshotRequest = new Request("GET", "/_snapshot/" + repositoryName + "/" + randomIdentifier()); + final var getSnapshotException = expectThrows(ResponseException.class, () -> client().performRequest(getSnapshotRequest)); + assertEquals(RestStatus.NOT_FOUND.getStatus(), getSnapshotException.getResponse().getStatusLine().getStatusCode()); + assertThat(getSnapshotException.getMessage(), containsString("snapshot_missing_exception")); + + final var restoreRequest = new Request("POST", "/_snapshot/" + repositoryName + "/" + randomIdentifier() + "/_restore"); + if (randomBoolean()) { + restoreRequest.addParameter("wait_for_completion", Boolean.toString(randomBoolean())); + } + final var restoreException = expectThrows(ResponseException.class, () -> client().performRequest(restoreRequest)); + assertEquals(RestStatus.INTERNAL_SERVER_ERROR.getStatus(), restoreException.getResponse().getStatusLine().getStatusCode()); + assertThat(restoreException.getMessage(), containsString("snapshot_restore_exception")); + + if (readonly != Boolean.TRUE) { + final var deleteRequest = new Request("DELETE", "/_snapshot/" + repositoryName + "/" + randomIdentifier()); + final var deleteException = expectThrows(ResponseException.class, () -> client().performRequest(deleteRequest)); + assertEquals(RestStatus.NOT_FOUND.getStatus(), deleteException.getResponse().getStatusLine().getStatusCode()); + assertThat(deleteException.getMessage(), containsString("snapshot_missing_exception")); + } + } + } + + public void testUsageStats() throws Exception { + testUsageStats(null); + } + + public void testUsageStatsReadonlyTrue() throws Exception { + testUsageStats(Boolean.TRUE); + } + + public void testUsageStatsReadonlyFalse() throws Exception { + testUsageStats(Boolean.FALSE); + } + + private void testUsageStats(Boolean readonly) throws Exception { + final var repository = newTestRepository(); + try (var ignored = repository.register(readonlyOperator(readonly))) { + final var responseObjectPath = assertOKAndCreateObjectPath(client().performRequest(new Request("GET", "/_cluster/stats"))); + assertThat(responseObjectPath.evaluate("repositories.s3.count"), equalTo(1)); + + if (readonly == Boolean.TRUE) { + assertThat(responseObjectPath.evaluate("repositories.s3.read_only"), equalTo(1)); + assertNull(responseObjectPath.evaluate("repositories.s3.read_write")); + } else { + assertNull(responseObjectPath.evaluate("repositories.s3.read_only")); + assertThat(responseObjectPath.evaluate("repositories.s3.read_write"), equalTo(1)); + } + } + } + + public void testSnapshotAndRestore() throws Exception { + final var repository = newTestRepository(); + try (var ignored = repository.register()) { + final var repositoryName = repository.repositoryName(); + final var indexName = randomIdentifier(); + final var snapshotsToDelete = new ArrayList(2); + + try { + indexDocuments(indexName, """ + {"index":{"_id":"1"}} + {"snapshot":"one"} + {"index":{"_id":"2"}} + {"snapshot":"one"} + {"index":{"_id":"3"}} + {"snapshot":"one"} + """, 3); + + // create the first snapshot + final var snapshot1Name = randomIdentifier(); + createSnapshot(repositoryName, snapshotsToDelete, snapshot1Name); + + // check the first snapshot's status + { + final var snapshotStatusResponse = assertOKAndCreateObjectPath( + client().performRequest(new Request("GET", "/_snapshot/" + repositoryName + "/" + snapshot1Name + "/_status")) + ); + assertEquals(snapshot1Name, snapshotStatusResponse.evaluate("snapshots.0.snapshot")); + assertEquals("SUCCESS", snapshotStatusResponse.evaluate("snapshots.0.state")); + } + + // add more documents to the index + indexDocuments(indexName, """ + {"index":{"_id":"4"}} + {"snapshot":"one"} + {"index":{"_id":"5"}} + {"snapshot":"one"} + {"index":{"_id":"6"}} + {"snapshot":"one"} + {"index":{"_id":"7"}} + {"snapshot":"one"} + """, 7); + + // create the second snapshot + final var snapshot2Name = randomValueOtherThan(snapshot1Name, ESTestCase::randomIdentifier); + createSnapshot(repositoryName, snapshotsToDelete, snapshot2Name); + + // list the snapshots + { + final var listSnapshotsResponse = assertOKAndCreateObjectPath( + client().performRequest( + new Request("GET", "/_snapshot/" + repositoryName + "/" + snapshot1Name + "," + snapshot2Name) + ) + ); + assertEquals(2, listSnapshotsResponse.evaluateArraySize("snapshots")); + assertEquals( + Set.of(snapshot1Name, snapshot2Name), + Set.of( + listSnapshotsResponse.evaluate("snapshots.0.snapshot"), + listSnapshotsResponse.evaluate("snapshots.1.snapshot") + ) + ); + assertEquals("SUCCESS", listSnapshotsResponse.evaluate("snapshots.0.state")); + assertEquals("SUCCESS", listSnapshotsResponse.evaluate("snapshots.1.state")); + } + + // delete and restore the index from snapshot 2 + deleteAndRestoreIndex(indexName, repositoryName, snapshot2Name, 7); + + // delete and restore the index from snapshot 1 + deleteAndRestoreIndex(indexName, repositoryName, snapshot1Name, 3); + } finally { + if (snapshotsToDelete.isEmpty() == false) { + assertOK( + client().performRequest( + new Request( + "DELETE", + "/_snapshot/" + repositoryName + "/" + snapshotsToDelete.stream().collect(Collectors.joining(",")) + ) + ) + ); + } + } + } + } + + private static void deleteAndRestoreIndex(String indexName, String repositoryName, String snapshot2Name, int expectedDocCount) + throws IOException { + assertOK(client().performRequest(new Request("DELETE", "/" + indexName))); + final var restoreRequest = new Request("POST", "/_snapshot/" + repositoryName + "/" + snapshot2Name + "/_restore"); + restoreRequest.addParameter("wait_for_completion", "true"); + assertOK(client().performRequest(restoreRequest)); + assertIndexDocCount(indexName, expectedDocCount); + } + + private static void indexDocuments(String indexName, String body, int expectedDocCount) throws IOException { + // create and populate an index + final var indexDocsRequest = new Request("POST", "/" + indexName + "/_bulk"); + indexDocsRequest.addParameter("refresh", "true"); + indexDocsRequest.setJsonEntity(body); + assertFalse(assertOKAndCreateObjectPath(client().performRequest(indexDocsRequest)).evaluate("errors")); + + // check the index contents + assertIndexDocCount(indexName, expectedDocCount); + } + + private static void createSnapshot(String repositoryName, ArrayList snapshotsToDelete, String snapshotName) throws IOException { + final var createSnapshotRequest = new Request("POST", "/_snapshot/" + repositoryName + "/" + snapshotName); + createSnapshotRequest.addParameter("wait_for_completion", "true"); + final var createSnapshotResponse = assertOKAndCreateObjectPath(client().performRequest(createSnapshotRequest)); + snapshotsToDelete.add(snapshotName); + assertEquals(snapshotName, createSnapshotResponse.evaluate("snapshot.snapshot")); + assertEquals("SUCCESS", createSnapshotResponse.evaluate("snapshot.state")); + assertThat(createSnapshotResponse.evaluate("snapshot.shards.failed"), equalTo(0)); + } + + private static void assertIndexDocCount(String indexName, int expectedCount) throws IOException { + assertThat( + assertOKAndCreateObjectPath(client().performRequest(new Request("GET", "/" + indexName + "/_count"))).evaluate("count"), + equalTo(expectedCount) + ); + } +} diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3BasicCredentialsRestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3BasicCredentialsRestIT.java new file mode 100644 index 0000000000000..45844703683bb --- /dev/null +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3BasicCredentialsRestIT.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.repositories.s3; + +import fixture.s3.S3HttpFixture; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) +@ThreadLeakScope(ThreadLeakScope.Scope.NONE) // https://github.com/elastic/elasticsearch/issues/102482 +public class RepositoryS3BasicCredentialsRestIT extends AbstractRepositoryS3RestTestCase { + + private static final String PREFIX = getIdentifierPrefix("RepositoryS3BasicCredentialsRestIT"); + private static final String BUCKET = PREFIX + "bucket"; + private static final String BASE_PATH = PREFIX + "base_path"; + private static final String ACCESS_KEY = PREFIX + "access-key"; + private static final String SECRET_KEY = PREFIX + "secret-key"; + private static final String CLIENT = "basic_credentials_client"; + + private static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, S3HttpFixture.fixedAccessKey(ACCESS_KEY)); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .module("repository-s3") + .keystore("s3.client." + CLIENT + ".access_key", ACCESS_KEY) + .keystore("s3.client." + CLIENT + ".secret_key", SECRET_KEY) + .setting("s3.client." + CLIENT + ".endpoint", s3Fixture::getAddress) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(s3Fixture).around(cluster); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected String getBucketName() { + return BUCKET; + } + + @Override + protected String getBasePath() { + return BASE_PATH; + } + + @Override + protected String getClientName() { + return CLIENT; + } +} diff --git a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsClientYamlTestSuiteIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsCredentialsRestIT.java similarity index 59% rename from modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsClientYamlTestSuiteIT.java rename to modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsCredentialsRestIT.java index bbd003f506ead..267ba6e6b3a13 100644 --- a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsClientYamlTestSuiteIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsCredentialsRestIT.java @@ -13,18 +13,25 @@ import fixture.s3.DynamicS3Credentials; import fixture.s3.S3HttpFixture; -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; import org.junit.ClassRule; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; import java.util.Set; -public class RepositoryS3EcsClientYamlTestSuiteIT extends AbstractRepositoryS3ClientYamlTestSuiteIT { +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) +@ThreadLeakScope(ThreadLeakScope.Scope.NONE) // https://github.com/elastic/elasticsearch/issues/102482 +public class RepositoryS3EcsCredentialsRestIT extends AbstractRepositoryS3RestTestCase { + + private static final String PREFIX = getIdentifierPrefix("RepositoryS3EcsCredentialsRestIT"); + private static final String BUCKET = PREFIX + "bucket"; + private static final String BASE_PATH = PREFIX + "base_path"; + private static final String CLIENT = "ecs_credentials_client"; private static final DynamicS3Credentials dynamicS3Credentials = new DynamicS3Credentials(); @@ -33,33 +40,34 @@ public class RepositoryS3EcsClientYamlTestSuiteIT extends AbstractRepositoryS3Cl Set.of("/ecs_credentials_endpoint") ); - private static final S3HttpFixture s3Fixture = new S3HttpFixture( - true, - "ecs_bucket", - "ecs_base_path", - dynamicS3Credentials::isAuthorized - ); + private static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, dynamicS3Credentials::isAuthorized); public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .module("repository-s3") - .setting("s3.client.integration_test_ecs.endpoint", s3Fixture::getAddress) + .setting("s3.client." + CLIENT + ".endpoint", s3Fixture::getAddress) .environment("AWS_CONTAINER_CREDENTIALS_FULL_URI", () -> ec2ImdsHttpFixture.getAddress() + "/ecs_credentials_endpoint") .build(); @ClassRule public static TestRule ruleChain = RuleChain.outerRule(s3Fixture).around(ec2ImdsHttpFixture).around(cluster); - @ParametersFactory - public static Iterable parameters() throws Exception { - return createParameters(new String[] { "repository_s3/50_repository_ecs_credentials" }); + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); } - public RepositoryS3EcsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { - super(testCandidate); + @Override + protected String getBucketName() { + return BUCKET; } @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); + protected String getBasePath() { + return BASE_PATH; + } + + @Override + protected String getClientName() { + return CLIENT; } } diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV1CredentialsRestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV1CredentialsRestIT.java new file mode 100644 index 0000000000000..de9c9b6ae0695 --- /dev/null +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV1CredentialsRestIT.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.repositories.s3; + +import fixture.aws.imds.Ec2ImdsHttpFixture; +import fixture.s3.DynamicS3Credentials; +import fixture.s3.S3HttpFixture; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.util.Set; + +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) +@ThreadLeakScope(ThreadLeakScope.Scope.NONE) // https://github.com/elastic/elasticsearch/issues/102482 +public class RepositoryS3ImdsV1CredentialsRestIT extends AbstractRepositoryS3RestTestCase { + + private static final String PREFIX = getIdentifierPrefix("RepositoryS3ImdsV1CredentialsRestIT"); + private static final String BUCKET = PREFIX + "bucket"; + private static final String BASE_PATH = PREFIX + "base_path"; + private static final String CLIENT = "imdsv1_credentials_client"; + + private static final DynamicS3Credentials dynamicS3Credentials = new DynamicS3Credentials(); + + private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture( + dynamicS3Credentials::addValidCredentials, + Set.of() + ); + + private static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, dynamicS3Credentials::isAuthorized); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .module("repository-s3") + .setting("s3.client." + CLIENT + ".endpoint", s3Fixture::getAddress) + .systemProperty("com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", ec2ImdsHttpFixture::getAddress) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(ec2ImdsHttpFixture).around(s3Fixture).around(cluster); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected String getBucketName() { + return BUCKET; + } + + @Override + protected String getBasePath() { + return BASE_PATH; + } + + @Override + protected String getClientName() { + return CLIENT; + } +} diff --git a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3MinioClientYamlTestSuiteIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3MinioBasicCredentialsRestIT.java similarity index 50% rename from modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3MinioClientYamlTestSuiteIT.java rename to modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3MinioBasicCredentialsRestIT.java index d2b1413295ceb..93915e8491d5b 100644 --- a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3MinioClientYamlTestSuiteIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3MinioBasicCredentialsRestIT.java @@ -9,44 +9,56 @@ package org.elasticsearch.repositories.s3; -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.fixtures.minio.MinioTestContainer; import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; -import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.junit.ClassRule; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; @ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) -public class RepositoryS3MinioClientYamlTestSuiteIT extends AbstractRepositoryS3ClientYamlTestSuiteIT { +@ThreadLeakScope(ThreadLeakScope.Scope.NONE) // https://github.com/elastic/elasticsearch/issues/102482 +public class RepositoryS3MinioBasicCredentialsRestIT extends AbstractRepositoryS3RestTestCase { - public static MinioTestContainer minio = new MinioTestContainer(); + private static final String PREFIX = getIdentifierPrefix("RepositoryS3MinioBasicCredentialsRestIT"); + private static final String BUCKET = PREFIX + "bucket"; + private static final String BASE_PATH = PREFIX + "base_path"; + private static final String ACCESS_KEY = PREFIX + "access-key"; + private static final String SECRET_KEY = PREFIX + "secret-key"; + private static final String CLIENT = "minio_client"; + + private static final MinioTestContainer minioFixture = new MinioTestContainer(true, ACCESS_KEY, SECRET_KEY, BUCKET); public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .module("repository-s3") - .keystore("s3.client.integration_test_permanent.access_key", System.getProperty("s3PermanentAccessKey")) - .keystore("s3.client.integration_test_permanent.secret_key", System.getProperty("s3PermanentSecretKey")) - .setting("s3.client.integration_test_permanent.endpoint", () -> minio.getAddress()) + .keystore("s3.client." + CLIENT + ".access_key", ACCESS_KEY) + .keystore("s3.client." + CLIENT + ".secret_key", SECRET_KEY) + .setting("s3.client." + CLIENT + ".endpoint", minioFixture::getAddress) .build(); @ClassRule - public static TestRule ruleChain = RuleChain.outerRule(minio).around(cluster); + public static TestRule ruleChain = RuleChain.outerRule(minioFixture).around(cluster); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } - @ParametersFactory - public static Iterable parameters() throws Exception { - return createParameters(new String[] { "repository_s3/10_basic", "repository_s3/20_repository_permanent_credentials" }); + @Override + protected String getBucketName() { + return BUCKET; } - public RepositoryS3MinioClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { - super(testCandidate); + @Override + protected String getBasePath() { + return BASE_PATH; } @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); + protected String getClientName() { + return CLIENT; } } diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3SessionCredentialsRestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3SessionCredentialsRestIT.java new file mode 100644 index 0000000000000..84a327ee131ae --- /dev/null +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3SessionCredentialsRestIT.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.repositories.s3; + +import fixture.s3.S3HttpFixture; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) +@ThreadLeakScope(ThreadLeakScope.Scope.NONE) // https://github.com/elastic/elasticsearch/issues/102482 +public class RepositoryS3SessionCredentialsRestIT extends AbstractRepositoryS3RestTestCase { + + private static final String PREFIX = getIdentifierPrefix("RepositoryS3SessionCredentialsRestIT"); + private static final String BUCKET = PREFIX + "bucket"; + private static final String BASE_PATH = PREFIX + "base_path"; + private static final String ACCESS_KEY = PREFIX + "access-key"; + private static final String SECRET_KEY = PREFIX + "secret-key"; + private static final String SESSION_TOKEN = PREFIX + "session-token"; + private static final String CLIENT = "session_credentials_client"; + + private static final S3HttpFixture s3Fixture = new S3HttpFixture( + true, + BUCKET, + BASE_PATH, + S3HttpFixture.fixedAccessKeyAndToken(ACCESS_KEY, SESSION_TOKEN) + ); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .module("repository-s3") + .keystore("s3.client." + CLIENT + ".access_key", ACCESS_KEY) + .keystore("s3.client." + CLIENT + ".secret_key", SECRET_KEY) + .keystore("s3.client." + CLIENT + ".session_token", SESSION_TOKEN) + .setting("s3.client." + CLIENT + ".endpoint", s3Fixture::getAddress) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(s3Fixture).around(cluster); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected String getBucketName() { + return BUCKET; + } + + @Override + protected String getBasePath() { + return BASE_PATH; + } + + @Override + protected String getClientName() { + return CLIENT; + } +} diff --git a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3StsClientYamlTestSuiteIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3StsCredentialsRestIT.java similarity index 53% rename from modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3StsClientYamlTestSuiteIT.java rename to modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3StsCredentialsRestIT.java index 7c4d719485113..de80e4179ef5e 100644 --- a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3StsClientYamlTestSuiteIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3StsCredentialsRestIT.java @@ -13,43 +13,53 @@ import fixture.s3.DynamicS3Credentials; import fixture.s3.S3HttpFixture; -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.util.resource.Resource; -import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; import org.junit.ClassRule; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -public class RepositoryS3StsClientYamlTestSuiteIT extends AbstractRepositoryS3ClientYamlTestSuiteIT { +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) +@ThreadLeakScope(ThreadLeakScope.Scope.NONE) // https://github.com/elastic/elasticsearch/issues/102482 +public class RepositoryS3StsCredentialsRestIT extends AbstractRepositoryS3RestTestCase { + + private static final String PREFIX = getIdentifierPrefix("RepositoryS3StsCredentialsRestIT"); + private static final String BUCKET = PREFIX + "bucket"; + private static final String BASE_PATH = PREFIX + "base_path"; + private static final String CLIENT = "sts_credentials_client"; private static final DynamicS3Credentials dynamicS3Credentials = new DynamicS3Credentials(); - private static final S3HttpFixture s3HttpFixture = new S3HttpFixture( - true, - "sts_bucket", - "sts_base_path", - dynamicS3Credentials::isAuthorized - ); + private static final S3HttpFixture s3HttpFixture = new S3HttpFixture(true, BUCKET, BASE_PATH, dynamicS3Credentials::isAuthorized); - private static final AwsStsHttpFixture stsHttpFixture = new AwsStsHttpFixture(dynamicS3Credentials::addValidCredentials, """ + private static final String WEB_IDENTITY_TOKEN_FILE_CONTENTS = """ Atza|IQEBLjAsAhRFiXuWpUXuRvQ9PZL3GMFcYevydwIUFAHZwXZXXXXXXXXJnrulxKDHwy87oGKPznh0D6bEQZTSCzyoCtL_8S07pLpr0zMbn6w1lfVZKNTBdDans\ FBmtGnIsIapjI6xKR02Yc_2bQ8LZbUXSGm6Ry6_BG7PrtLZtj_dfCTj92xNGed-CrKqjG7nPBjNIL016GGvuS5gSvPRUxWES3VYfm1wl7WTI7jn-Pcb6M-buCgHhFO\ - zTQxod27L9CqnOLio7N3gZAGpsp6n1-AJBOCJckcyXe2c6uD0srOJeZlKUm2eTDVMf8IehDVI0r1QOnTV6KzzAI3OY87Vd_cVMQ"""); + zTQxod27L9CqnOLio7N3gZAGpsp6n1-AJBOCJckcyXe2c6uD0srOJeZlKUm2eTDVMf8IehDVI0r1QOnTV6KzzAI3OY87Vd_cVMQ"""; + + private static final AwsStsHttpFixture stsHttpFixture = new AwsStsHttpFixture( + dynamicS3Credentials::addValidCredentials, + WEB_IDENTITY_TOKEN_FILE_CONTENTS + ); public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .module("repository-s3") - .setting("s3.client.integration_test_sts.endpoint", s3HttpFixture::getAddress) + .setting("s3.client." + CLIENT + ".endpoint", s3HttpFixture::getAddress) .systemProperty( "com.amazonaws.sdk.stsMetadataServiceEndpointOverride", () -> stsHttpFixture.getAddress() + "/assume-role-with-web-identity" ) - .configFile("repository-s3/aws-web-identity-token-file", Resource.fromClasspath("aws-web-identity-token-file")) - .environment("AWS_WEB_IDENTITY_TOKEN_FILE", System.getProperty("awsWebIdentityTokenExternalLocation")) - // // The AWS STS SDK requires the role and session names to be set. We can verify that they are sent to S3S in the - // // S3HttpFixtureWithSTS fixture + .configFile( + S3Service.CustomWebIdentityTokenCredentialsProvider.WEB_IDENTITY_TOKEN_FILE_LOCATION, + Resource.fromString(WEB_IDENTITY_TOKEN_FILE_CONTENTS) + ) + .environment("AWS_WEB_IDENTITY_TOKEN_FILE", S3Service.CustomWebIdentityTokenCredentialsProvider.WEB_IDENTITY_TOKEN_FILE_LOCATION) + // The AWS STS SDK requires the role and session names to be set. We can verify that they are sent to S3S in the + // S3HttpFixtureWithSTS fixture .environment("AWS_ROLE_ARN", "arn:aws:iam::123456789012:role/FederatedWebIdentityRole") .environment("AWS_ROLE_SESSION_NAME", "sts-fixture-test") .build(); @@ -57,17 +67,23 @@ public class RepositoryS3StsClientYamlTestSuiteIT extends AbstractRepositoryS3Cl @ClassRule public static TestRule ruleChain = RuleChain.outerRule(s3HttpFixture).around(stsHttpFixture).around(cluster); - @ParametersFactory - public static Iterable parameters() throws Exception { - return createParameters(new String[] { "repository_s3/60_repository_sts_credentials" }); + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected String getBucketName() { + return BUCKET; } - public RepositoryS3StsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { - super(testCandidate); + @Override + protected String getBasePath() { + return BASE_PATH; } @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); + protected String getClientName() { + return CLIENT; } } diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java index 5fb3254df819b..d08bd40275fec 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java @@ -450,7 +450,7 @@ private static DeleteObjectsRequest bulkDelete(OperationPurpose purpose, S3BlobS @Override public void close() throws IOException { - this.service.close(); + service.onBlobStoreClose(); } @Override diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java index 1ebd6f920d518..1a66f5782fc03 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java @@ -303,6 +303,10 @@ private synchronized void releaseCachedClients() { IdleConnectionReaper.shutdown(); } + public void onBlobStoreClose() { + releaseCachedClients(); + } + @Override public void close() throws IOException { releaseCachedClients(); @@ -345,6 +349,8 @@ static class CustomWebIdentityTokenCredentialsProvider implements AWSCredentials private static final String STS_HOSTNAME = "https://sts.amazonaws.com"; + static final String WEB_IDENTITY_TOKEN_FILE_LOCATION = "repository-s3/aws-web-identity-token-file"; + private STSAssumeRoleWithWebIdentitySessionCredentialsProvider credentialsProvider; private AWSSecurityTokenService stsClient; private String stsRegion; @@ -363,7 +369,7 @@ static class CustomWebIdentityTokenCredentialsProvider implements AWSCredentials } // Make sure that a readable symlink to the token file exists in the plugin config directory // AWS_WEB_IDENTITY_TOKEN_FILE exists but we only use Web Identity Tokens if a corresponding symlink exists and is readable - Path webIdentityTokenFileSymlink = environment.configFile().resolve("repository-s3/aws-web-identity-token-file"); + Path webIdentityTokenFileSymlink = environment.configFile().resolve(WEB_IDENTITY_TOKEN_FILE_LOCATION); if (Files.exists(webIdentityTokenFileSymlink) == false) { LOGGER.warn( "Cannot use AWS Web Identity Tokens: AWS_WEB_IDENTITY_TOKEN_FILE is defined but no corresponding symlink exists " diff --git a/modules/repository-s3/src/test/resources/aws-web-identity-token-file b/modules/repository-s3/src/test/resources/aws-web-identity-token-file deleted file mode 100644 index 15cb29eac2ff6..0000000000000 --- a/modules/repository-s3/src/test/resources/aws-web-identity-token-file +++ /dev/null @@ -1 +0,0 @@ -Atza|IQEBLjAsAhRFiXuWpUXuRvQ9PZL3GMFcYevydwIUFAHZwXZXXXXXXXXJnrulxKDHwy87oGKPznh0D6bEQZTSCzyoCtL_8S07pLpr0zMbn6w1lfVZKNTBdDansFBmtGnIsIapjI6xKR02Yc_2bQ8LZbUXSGm6Ry6_BG7PrtLZtj_dfCTj92xNGed-CrKqjG7nPBjNIL016GGvuS5gSvPRUxWES3VYfm1wl7WTI7jn-Pcb6M-buCgHhFOzTQxod27L9CqnOLio7N3gZAGpsp6n1-AJBOCJckcyXe2c6uD0srOJeZlKUm2eTDVMf8IehDVI0r1QOnTV6KzzAI3OY87Vd_cVMQ diff --git a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java b/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java index a3b154b4bdfed..3d34934e54945 100644 --- a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java +++ b/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java @@ -9,8 +9,6 @@ package org.elasticsearch.repositories.s3; -import fixture.aws.imds.Ec2ImdsHttpFixture; -import fixture.s3.DynamicS3Credentials; import fixture.s3.S3HttpFixture; import com.carrotsearch.randomizedtesting.annotations.Name; @@ -18,7 +16,6 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; -import org.elasticsearch.cluster.routing.Murmur3HashFunction; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; @@ -26,67 +23,33 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -import java.util.Set; - @ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) @ThreadLeakScope(ThreadLeakScope.Scope.NONE) // https://github.com/elastic/elasticsearch/issues/102482 public class RepositoryS3ClientYamlTestSuiteIT extends AbstractRepositoryS3ClientYamlTestSuiteIT { - private static final String HASHED_SEED = Integer.toString(Murmur3HashFunction.hash(System.getProperty("tests.seed"))); - private static final String TEMPORARY_SESSION_TOKEN = "session_token-" + HASHED_SEED; - - private static final S3HttpFixture s3Fixture = new S3HttpFixture(); - - private static final S3HttpFixture s3HttpFixtureWithSessionToken = new S3HttpFixture( - true, - "session_token_bucket", - "session_token_base_path_integration_tests", - S3HttpFixture.fixedAccessKeyAndToken(System.getProperty("s3TemporaryAccessKey"), TEMPORARY_SESSION_TOKEN) - ); - - private static final DynamicS3Credentials dynamicS3Credentials = new DynamicS3Credentials(); - - private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture( - dynamicS3Credentials::addValidCredentials, - Set.of() - ); + private static final String ACCESS_KEY = "RepositoryS3ClientYamlTestSuiteIT-access-key"; + private static final String SECRET_KEY = "RepositoryS3ClientYamlTestSuiteIT-secret-key"; - private static final S3HttpFixture s3HttpFixtureWithImdsSessionToken = new S3HttpFixture( + private static final S3HttpFixture s3Fixture = new S3HttpFixture( true, - "ec2_bucket", - "ec2_base_path", - dynamicS3Credentials::isAuthorized + "bucket", + "base_path_integration_tests", + S3HttpFixture.fixedAccessKey(ACCESS_KEY) ); public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .module("repository-s3") - .keystore("s3.client.integration_test_permanent.access_key", System.getProperty("s3PermanentAccessKey")) - .keystore("s3.client.integration_test_permanent.secret_key", System.getProperty("s3PermanentSecretKey")) - .keystore("s3.client.integration_test_temporary.access_key", System.getProperty("s3TemporaryAccessKey")) - .keystore("s3.client.integration_test_temporary.secret_key", System.getProperty("s3TemporarySecretKey")) - .keystore("s3.client.integration_test_temporary.session_token", TEMPORARY_SESSION_TOKEN) + .keystore("s3.client.integration_test_permanent.access_key", ACCESS_KEY) + .keystore("s3.client.integration_test_permanent.secret_key", SECRET_KEY) .setting("s3.client.integration_test_permanent.endpoint", s3Fixture::getAddress) - .setting("s3.client.integration_test_temporary.endpoint", s3HttpFixtureWithSessionToken::getAddress) - .setting("s3.client.integration_test_ec2.endpoint", s3HttpFixtureWithImdsSessionToken::getAddress) - .systemProperty("com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", ec2ImdsHttpFixture::getAddress) .build(); @ClassRule - public static TestRule ruleChain = RuleChain.outerRule(s3Fixture) - .around(s3HttpFixtureWithSessionToken) - .around(s3HttpFixtureWithImdsSessionToken) - .around(ec2ImdsHttpFixture) - .around(cluster); + public static TestRule ruleChain = RuleChain.outerRule(s3Fixture).around(cluster); @ParametersFactory public static Iterable parameters() throws Exception { - return createParameters( - new String[] { - "repository_s3/10_basic", - "repository_s3/20_repository_permanent_credentials", - "repository_s3/30_repository_temporary_credentials", - "repository_s3/40_repository_ec2_credentials" } - ); + return createParameters(new String[] { "repository_s3/10_basic", "repository_s3/20_repository_permanent_credentials" }); } public RepositoryS3ClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { diff --git a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RegionalStsClientYamlTestSuiteIT.java b/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RegionalStsClientYamlTestSuiteIT.java index 2baba66a8a4d0..ac356083983eb 100644 --- a/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RegionalStsClientYamlTestSuiteIT.java +++ b/modules/repository-s3/src/yamlRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RegionalStsClientYamlTestSuiteIT.java @@ -21,10 +21,11 @@ public class RepositoryS3RegionalStsClientYamlTestSuiteIT extends AbstractReposi @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .module("repository-s3") - .configFile("repository-s3/aws-web-identity-token-file", Resource.fromClasspath("aws-web-identity-token-file")) - .environment("AWS_WEB_IDENTITY_TOKEN_FILE", System.getProperty("awsWebIdentityTokenExternalLocation")) - // The AWS STS SDK requires the role and session names to be set. We can verify that they are sent to S3S in the - // S3HttpFixtureWithSTS fixture + .configFile(S3Service.CustomWebIdentityTokenCredentialsProvider.WEB_IDENTITY_TOKEN_FILE_LOCATION, Resource.fromString(""" + Atza|IQEBLjAsAhRFiXuWpUXuRvQ9PZL3GMFcYevydwIUFAHZwXZXXXXXXXXJnrulxKDHwy87oGKPznh0D6bEQZTSCzyoCtL_8S07pLpr0zMbn6w1lfVZKNTBdDans\ + FBmtGnIsIapjI6xKR02Yc_2bQ8LZbUXSGm6Ry6_BG7PrtLZtj_dfCTj92xNGed-CrKqjG7nPBjNIL016GGvuS5gSvPRUxWES3VYfm1wl7WTI7jn-Pcb6M-buCgHhFO\ + zTQxod27L9CqnOLio7N3gZAGpsp6n1-AJBOCJckcyXe2c6uD0srOJeZlKUm2eTDVMf8IehDVI0r1QOnTV6KzzAI3OY87Vd_cVMQ""")) + .environment("AWS_WEB_IDENTITY_TOKEN_FILE", S3Service.CustomWebIdentityTokenCredentialsProvider.WEB_IDENTITY_TOKEN_FILE_LOCATION) .environment("AWS_ROLE_ARN", "arn:aws:iam::123456789012:role/FederatedWebIdentityRole") .environment("AWS_ROLE_SESSION_NAME", "sts-fixture-test") .environment("AWS_STS_REGIONAL_ENDPOINTS", "regional") @@ -33,6 +34,9 @@ public class RepositoryS3RegionalStsClientYamlTestSuiteIT extends AbstractReposi @ParametersFactory public static Iterable parameters() throws Exception { + // Run just the basic sanity test to make sure ES starts up and loads the S3 repository with a regional endpoint without an error. + // It would be great to make actual requests against a test fixture, but setting the region means using a production endpoint. + // See #102230 for more details. return createParameters(new String[] { "repository_s3/10_basic" }); } diff --git a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml index e88a0861ec01c..6f6fdaed8c666 100644 --- a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml +++ b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml @@ -10,12 +10,11 @@ setup: body: type: s3 settings: - bucket: @permanent_bucket@ + bucket: bucket client: integration_test_permanent - base_path: "@permanent_base_path@" + base_path: base_path_integration_tests canned_acl: private storage_class: standard - disable_chunked_encoding: @disable_chunked_encoding@ # Remove the snapshots, if a previous test failed to delete them. This is # useful for third party tests that runs the test against a real external service. @@ -40,9 +39,9 @@ setup: body: type: s3 settings: - bucket: @permanent_bucket@ + bucket: bucket client: integration_test_permanent - base_path: "@permanent_base_path@" + base_path: base_path_integration_tests endpoint: 127.0.0.1:5 canned_acl: private storage_class: standard @@ -55,9 +54,9 @@ setup: body: type: s3 settings: - bucket: @permanent_bucket@ + bucket: bucket client: integration_test_permanent - base_path: "@permanent_base_path@" + base_path: base_path_integration_tests endpoint: 127.0.0.1:5 canned_acl: private storage_class: standard @@ -106,258 +105,6 @@ setup: - match: { snapshot.include_global_state: true } - match: { snapshot.shards.failed: 0 } ---- -"Snapshot and Restore with repository-s3 using permanent credentials": - - # Get repository - - do: - snapshot.get_repository: - repository: repository_permanent - - - match: { repository_permanent.settings.bucket : @permanent_bucket@ } - - match: { repository_permanent.settings.client : "integration_test_permanent" } - - match: { repository_permanent.settings.base_path : "@permanent_base_path@" } - - match: { repository_permanent.settings.canned_acl : "private" } - - match: { repository_permanent.settings.storage_class : "standard" } - - is_false: repository_permanent.settings.access_key - - is_false: repository_permanent.settings.secret_key - - is_false: repository_permanent.settings.session_token - - # Index documents - - do: - bulk: - refresh: true - body: - - index: - _index: docs - _id: "1" - - snapshot: one - - index: - _index: docs - _id: "2" - - snapshot: one - - index: - _index: docs - _id: "3" - - snapshot: one - - - do: - count: - index: docs - - - match: {count: 3} - - # Create a first snapshot - - do: - snapshot.create: - repository: repository_permanent - snapshot: snapshot-one - wait_for_completion: true - - - match: { snapshot.snapshot: snapshot-one } - - match: { snapshot.state : SUCCESS } - - match: { snapshot.include_global_state: true } - - match: { snapshot.shards.failed : 0 } - - - do: - snapshot.status: - repository: repository_permanent - snapshot: snapshot-one - - - is_true: snapshots - - match: { snapshots.0.snapshot: snapshot-one } - - match: { snapshots.0.state : SUCCESS } - - # Index more documents - - do: - bulk: - refresh: true - body: - - index: - _index: docs - _id: "4" - - snapshot: two - - index: - _index: docs - _id: "5" - - snapshot: two - - index: - _index: docs - _id: "6" - - snapshot: two - - index: - _index: docs - _id: "7" - - snapshot: two - - - do: - count: - index: docs - - - match: {count: 7} - - # Create a second snapshot - - do: - snapshot.create: - repository: repository_permanent - snapshot: snapshot-two - wait_for_completion: true - - - match: { snapshot.snapshot: snapshot-two } - - match: { snapshot.state : SUCCESS } - - match: { snapshot.shards.failed : 0 } - - - do: - snapshot.get: - repository: repository_permanent - snapshot: snapshot-one,snapshot-two - - - is_true: snapshots - - match: { snapshots.0.state : SUCCESS } - - match: { snapshots.1.state : SUCCESS } - - # Delete the index - - do: - indices.delete: - index: docs - - # Restore the second snapshot - - do: - snapshot.restore: - repository: repository_permanent - snapshot: snapshot-two - wait_for_completion: true - - - do: - count: - index: docs - - - match: {count: 7} - - # Delete the index again - - do: - indices.delete: - index: docs - - # Restore the first snapshot - - do: - snapshot.restore: - repository: repository_permanent - snapshot: snapshot-one - wait_for_completion: true - - - do: - count: - index: docs - - - match: {count: 3} - - # Remove the snapshots - - do: - snapshot.delete: - repository: repository_permanent - snapshot: snapshot-two - - - do: - snapshot.delete: - repository: repository_permanent - snapshot: snapshot-one - ---- -"Register a repository with a non existing bucket": - - - do: - catch: /repository_verification_exception/ - snapshot.create_repository: - repository: repository_permanent - body: - type: s3 - settings: - bucket: zHHkfSqlbnBsbpSgvCYtxrEfFLqghXtyPvvvKPNBnRCicNHQLE - client: integration_test_permanent - ---- -"Register a repository with a non existing client": - - - do: - catch: /illegal_argument_exception/ - snapshot.create_repository: - repository: repository_permanent - body: - type: s3 - settings: - bucket: repository_permanent - client: unknown - ---- -"Register a read-only repository with a non existing bucket": - -- do: - catch: /repository_verification_exception/ - snapshot.create_repository: - repository: repository_permanent - body: - type: s3 - settings: - readonly: true - bucket: zHHkfSqlbnBsbpSgvCYtxrEfFLqghXtyPvvvKPNBnRCicNHQLE - client: integration_test_permanent - ---- -"Register a read-only repository with a non existing client": - -- do: - catch: /illegal_argument_exception/ - snapshot.create_repository: - repository: repository_permanent - body: - type: s3 - settings: - readonly: true - bucket: repository_permanent - client: unknown - ---- -"Get a non existing snapshot": - - - do: - catch: /snapshot_missing_exception/ - snapshot.get: - repository: repository_permanent - snapshot: missing - ---- -"Delete a non existing snapshot": - - - do: - catch: /snapshot_missing_exception/ - snapshot.delete: - repository: repository_permanent - snapshot: missing - ---- -"Restore a non existing snapshot": - - - do: - catch: /snapshot_restore_exception/ - snapshot.restore: - repository: repository_permanent - snapshot: missing - wait_for_completion: true - ---- -"Usage stats": - - requires: - cluster_features: - - repositories.supports_usage_stats - reason: requires this feature - - - do: - cluster.stats: {} - - - gte: { repositories.s3.count: 1 } - - gte: { repositories.s3.read_write: 1 } - --- teardown: diff --git a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/30_repository_temporary_credentials.yml b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/30_repository_temporary_credentials.yml deleted file mode 100644 index 501af980e17e3..0000000000000 --- a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/30_repository_temporary_credentials.yml +++ /dev/null @@ -1,278 +0,0 @@ -# Integration tests for repository-s3 - ---- -setup: - - # Register repository with temporary credentials - - do: - snapshot.create_repository: - repository: repository_temporary - body: - type: s3 - settings: - bucket: @temporary_bucket@ - client: integration_test_temporary - base_path: "@temporary_base_path@" - canned_acl: private - storage_class: standard - disable_chunked_encoding: @disable_chunked_encoding@ - ---- -"Snapshot and Restore with repository-s3 using temporary credentials": - - # Get repository - - do: - snapshot.get_repository: - repository: repository_temporary - - - match: { repository_temporary.settings.bucket : @temporary_bucket@ } - - match: { repository_temporary.settings.client : "integration_test_temporary" } - - match: { repository_temporary.settings.base_path : "@temporary_base_path@" } - - match: { repository_temporary.settings.canned_acl : "private" } - - match: { repository_temporary.settings.storage_class : "standard" } - - is_false: repository_temporary.settings.access_key - - is_false: repository_temporary.settings.secret_key - - is_false: repository_temporary.settings.session_token - - # Index documents - - do: - bulk: - refresh: true - body: - - index: - _index: docs - _id: "1" - - snapshot: one - - index: - _index: docs - _id: "2" - - snapshot: one - - index: - _index: docs - _id: "3" - - snapshot: one - - - do: - count: - index: docs - - - match: {count: 3} - - # Create a first snapshot - - do: - snapshot.create: - repository: repository_temporary - snapshot: snapshot-one - wait_for_completion: true - - - match: { snapshot.snapshot: snapshot-one } - - match: { snapshot.state : SUCCESS } - - match: { snapshot.include_global_state: true } - - match: { snapshot.shards.failed : 0 } - - - do: - snapshot.status: - repository: repository_temporary - snapshot: snapshot-one - - - is_true: snapshots - - match: { snapshots.0.snapshot: snapshot-one } - - match: { snapshots.0.state : SUCCESS } - - # Index more documents - - do: - bulk: - refresh: true - body: - - index: - _index: docs - _id: "4" - - snapshot: two - - index: - _index: docs - _id: "5" - - snapshot: two - - index: - _index: docs - _id: "6" - - snapshot: two - - index: - _index: docs - _id: "7" - - snapshot: two - - - do: - count: - index: docs - - - match: {count: 7} - - # Create a second snapshot - - do: - snapshot.create: - repository: repository_temporary - snapshot: snapshot-two - wait_for_completion: true - - - match: { snapshot.snapshot: snapshot-two } - - match: { snapshot.state : SUCCESS } - - match: { snapshot.shards.failed : 0 } - - - do: - snapshot.get: - repository: repository_temporary - snapshot: snapshot-one,snapshot-two - - - is_true: snapshots - - match: { snapshots.0.state : SUCCESS } - - match: { snapshots.1.state : SUCCESS } - - # Delete the index - - do: - indices.delete: - index: docs - - # Restore the second snapshot - - do: - snapshot.restore: - repository: repository_temporary - snapshot: snapshot-two - wait_for_completion: true - - - do: - count: - index: docs - - - match: {count: 7} - - # Delete the index again - - do: - indices.delete: - index: docs - - # Restore the first snapshot - - do: - snapshot.restore: - repository: repository_temporary - snapshot: snapshot-one - wait_for_completion: true - - - do: - count: - index: docs - - - match: {count: 3} - - # Remove the snapshots - - do: - snapshot.delete: - repository: repository_temporary - snapshot: snapshot-two - - - do: - snapshot.delete: - repository: repository_temporary - snapshot: snapshot-one - ---- -"Register a repository with a non existing bucket": - - - do: - catch: /repository_verification_exception/ - snapshot.create_repository: - repository: repository_temporary - body: - type: s3 - settings: - bucket: zHHkfSqlbnBsbpSgvCYtxrEfFLqghXtyPvvvKPNBnRCicNHQLE - client: integration_test_temporary - ---- -"Register a repository with a non existing client": - - - do: - catch: /illegal_argument_exception/ - snapshot.create_repository: - repository: repository_temporary - body: - type: s3 - settings: - bucket: repository_temporary - client: unknown - ---- -"Register a read-only repository with a non existing bucket": - -- do: - catch: /repository_verification_exception/ - snapshot.create_repository: - repository: repository_temporary - body: - type: s3 - settings: - readonly: true - bucket: zHHkfSqlbnBsbpSgvCYtxrEfFLqghXtyPvvvKPNBnRCicNHQLE - client: integration_test_temporary - ---- -"Register a read-only repository with a non existing client": - -- do: - catch: /illegal_argument_exception/ - snapshot.create_repository: - repository: repository_temporary - body: - type: s3 - settings: - readonly: true - bucket: repository_temporary - client: unknown - ---- -"Get a non existing snapshot": - - - do: - catch: /snapshot_missing_exception/ - snapshot.get: - repository: repository_temporary - snapshot: missing - ---- -"Delete a non existing snapshot": - - - do: - catch: /snapshot_missing_exception/ - snapshot.delete: - repository: repository_temporary - snapshot: missing - ---- -"Restore a non existing snapshot": - - - do: - catch: /snapshot_restore_exception/ - snapshot.restore: - repository: repository_temporary - snapshot: missing - wait_for_completion: true - ---- -"Usage stats": - - requires: - cluster_features: - - repositories.supports_usage_stats - reason: requires this feature - - - do: - cluster.stats: {} - - - gte: { repositories.s3.count: 1 } - - gte: { repositories.s3.read_write: 1 } - ---- -teardown: - - # Remove our repository - - do: - snapshot.delete_repository: - repository: repository_temporary diff --git a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml deleted file mode 100644 index 129f0ba5d7588..0000000000000 --- a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml +++ /dev/null @@ -1,278 +0,0 @@ -# Integration tests for repository-s3 - ---- -setup: - - # Register repository with ec2 credentials - - do: - snapshot.create_repository: - repository: repository_ec2 - body: - type: s3 - settings: - bucket: @ec2_bucket@ - client: integration_test_ec2 - base_path: "@ec2_base_path@" - canned_acl: private - storage_class: standard - disable_chunked_encoding: @disable_chunked_encoding@ - ---- -"Snapshot and Restore with repository-s3 using ec2 credentials": - - # Get repository - - do: - snapshot.get_repository: - repository: repository_ec2 - - - match: { repository_ec2.settings.bucket : @ec2_bucket@ } - - match: { repository_ec2.settings.client : "integration_test_ec2" } - - match: { repository_ec2.settings.base_path : "@ec2_base_path@" } - - match: { repository_ec2.settings.canned_acl : "private" } - - match: { repository_ec2.settings.storage_class : "standard" } - - is_false: repository_ec2.settings.access_key - - is_false: repository_ec2.settings.secret_key - - is_false: repository_ec2.settings.session_token - - # Index documents - - do: - bulk: - refresh: true - body: - - index: - _index: docs - _id: "1" - - snapshot: one - - index: - _index: docs - _id: "2" - - snapshot: one - - index: - _index: docs - _id: "3" - - snapshot: one - - - do: - count: - index: docs - - - match: {count: 3} - - # Create a first snapshot - - do: - snapshot.create: - repository: repository_ec2 - snapshot: snapshot-one - wait_for_completion: true - - - match: { snapshot.snapshot: snapshot-one } - - match: { snapshot.state : SUCCESS } - - match: { snapshot.include_global_state: true } - - match: { snapshot.shards.failed : 0 } - - - do: - snapshot.status: - repository: repository_ec2 - snapshot: snapshot-one - - - is_true: snapshots - - match: { snapshots.0.snapshot: snapshot-one } - - match: { snapshots.0.state : SUCCESS } - - # Index more documents - - do: - bulk: - refresh: true - body: - - index: - _index: docs - _id: "4" - - snapshot: two - - index: - _index: docs - _id: "5" - - snapshot: two - - index: - _index: docs - _id: "6" - - snapshot: two - - index: - _index: docs - _id: "7" - - snapshot: two - - - do: - count: - index: docs - - - match: {count: 7} - - # Create a second snapshot - - do: - snapshot.create: - repository: repository_ec2 - snapshot: snapshot-two - wait_for_completion: true - - - match: { snapshot.snapshot: snapshot-two } - - match: { snapshot.state : SUCCESS } - - match: { snapshot.shards.failed : 0 } - - - do: - snapshot.get: - repository: repository_ec2 - snapshot: snapshot-one,snapshot-two - - - is_true: snapshots - - match: { snapshots.0.state : SUCCESS } - - match: { snapshots.1.state : SUCCESS } - - # Delete the index - - do: - indices.delete: - index: docs - - # Restore the second snapshot - - do: - snapshot.restore: - repository: repository_ec2 - snapshot: snapshot-two - wait_for_completion: true - - - do: - count: - index: docs - - - match: {count: 7} - - # Delete the index again - - do: - indices.delete: - index: docs - - # Restore the first snapshot - - do: - snapshot.restore: - repository: repository_ec2 - snapshot: snapshot-one - wait_for_completion: true - - - do: - count: - index: docs - - - match: {count: 3} - - # Remove the snapshots - - do: - snapshot.delete: - repository: repository_ec2 - snapshot: snapshot-two - - - do: - snapshot.delete: - repository: repository_ec2 - snapshot: snapshot-one - ---- -"Register a repository with a non existing bucket": - - - do: - catch: /repository_verification_exception/ - snapshot.create_repository: - repository: repository_ec2 - body: - type: s3 - settings: - bucket: zHHkfSqlbnBsbpSgvCYtxrEfFLqghXtyPvvvKPNBnRCicNHQLE - client: integration_test_temporary - ---- -"Register a repository with a non existing client": - - - do: - catch: /illegal_argument_exception/ - snapshot.create_repository: - repository: repository_ec2 - body: - type: s3 - settings: - bucket: repository_ec2 - client: unknown - ---- -"Register a read-only repository with a non existing bucket": - -- do: - catch: /repository_verification_exception/ - snapshot.create_repository: - repository: repository_ec2 - body: - type: s3 - settings: - readonly: true - bucket: zHHkfSqlbnBsbpSgvCYtxrEfFLqghXtyPvvvKPNBnRCicNHQLE - client: integration_test_temporary - ---- -"Register a read-only repository with a non existing client": - -- do: - catch: /illegal_argument_exception/ - snapshot.create_repository: - repository: repository_ec2 - body: - type: s3 - settings: - readonly: true - bucket: repository_ec2 - client: unknown - ---- -"Get a non existing snapshot": - - - do: - catch: /snapshot_missing_exception/ - snapshot.get: - repository: repository_ec2 - snapshot: missing - ---- -"Delete a non existing snapshot": - - - do: - catch: /snapshot_missing_exception/ - snapshot.delete: - repository: repository_ec2 - snapshot: missing - ---- -"Restore a non existing snapshot": - - - do: - catch: /snapshot_restore_exception/ - snapshot.restore: - repository: repository_ec2 - snapshot: missing - wait_for_completion: true - ---- -"Usage stats": - - requires: - cluster_features: - - repositories.supports_usage_stats - reason: requires this feature - - - do: - cluster.stats: {} - - - gte: { repositories.s3.count: 1 } - - gte: { repositories.s3.read_write: 1 } - ---- -teardown: - - # Remove our repository - - do: - snapshot.delete_repository: - repository: repository_ec2 diff --git a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/50_repository_ecs_credentials.yml b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/50_repository_ecs_credentials.yml deleted file mode 100644 index de334b4b3df96..0000000000000 --- a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/50_repository_ecs_credentials.yml +++ /dev/null @@ -1,278 +0,0 @@ -# Integration tests for repository-s3 - ---- -setup: - - # Register repository with ecs credentials - - do: - snapshot.create_repository: - repository: repository_ecs - body: - type: s3 - settings: - bucket: @ecs_bucket@ - client: integration_test_ecs - base_path: "@ecs_base_path@" - canned_acl: private - storage_class: standard - disable_chunked_encoding: @disable_chunked_encoding@ - ---- -"Snapshot and Restore with repository-s3 using ecs credentials": - - # Get repository - - do: - snapshot.get_repository: - repository: repository_ecs - - - match: { repository_ecs.settings.bucket : @ecs_bucket@ } - - match: { repository_ecs.settings.client : "integration_test_ecs" } - - match: { repository_ecs.settings.base_path : "@ecs_base_path@" } - - match: { repository_ecs.settings.canned_acl : "private" } - - match: { repository_ecs.settings.storage_class : "standard" } - - is_false: repository_ecs.settings.access_key - - is_false: repository_ecs.settings.secret_key - - is_false: repository_ecs.settings.session_token - - # Index documents - - do: - bulk: - refresh: true - body: - - index: - _index: docs - _id: "1" - - snapshot: one - - index: - _index: docs - _id: "2" - - snapshot: one - - index: - _index: docs - _id: "3" - - snapshot: one - - - do: - count: - index: docs - - - match: {count: 3} - - # Create a first snapshot - - do: - snapshot.create: - repository: repository_ecs - snapshot: snapshot-one - wait_for_completion: true - - - match: { snapshot.snapshot: snapshot-one } - - match: { snapshot.state : SUCCESS } - - match: { snapshot.include_global_state: true } - - match: { snapshot.shards.failed : 0 } - - - do: - snapshot.status: - repository: repository_ecs - snapshot: snapshot-one - - - is_true: snapshots - - match: { snapshots.0.snapshot: snapshot-one } - - match: { snapshots.0.state : SUCCESS } - - # Index more documents - - do: - bulk: - refresh: true - body: - - index: - _index: docs - _id: "4" - - snapshot: two - - index: - _index: docs - _id: "5" - - snapshot: two - - index: - _index: docs - _id: "6" - - snapshot: two - - index: - _index: docs - _id: "7" - - snapshot: two - - - do: - count: - index: docs - - - match: {count: 7} - - # Create a second snapshot - - do: - snapshot.create: - repository: repository_ecs - snapshot: snapshot-two - wait_for_completion: true - - - match: { snapshot.snapshot: snapshot-two } - - match: { snapshot.state : SUCCESS } - - match: { snapshot.shards.failed : 0 } - - - do: - snapshot.get: - repository: repository_ecs - snapshot: snapshot-one,snapshot-two - - - is_true: snapshots - - match: { snapshots.0.state : SUCCESS } - - match: { snapshots.1.state : SUCCESS } - - # Delete the index - - do: - indices.delete: - index: docs - - # Restore the second snapshot - - do: - snapshot.restore: - repository: repository_ecs - snapshot: snapshot-two - wait_for_completion: true - - - do: - count: - index: docs - - - match: {count: 7} - - # Delete the index again - - do: - indices.delete: - index: docs - - # Restore the first snapshot - - do: - snapshot.restore: - repository: repository_ecs - snapshot: snapshot-one - wait_for_completion: true - - - do: - count: - index: docs - - - match: {count: 3} - - # Remove the snapshots - - do: - snapshot.delete: - repository: repository_ecs - snapshot: snapshot-two - - - do: - snapshot.delete: - repository: repository_ecs - snapshot: snapshot-one - ---- -"Register a repository with a non existing bucket": - - - do: - catch: /repository_verification_exception/ - snapshot.create_repository: - repository: repository_ecs - body: - type: s3 - settings: - bucket: zHHkfSqlbnBsbpSgvCYtxrEfFLqghXtyPvvvKPNBnRCicNHQLE - client: integration_test_ecs - ---- -"Register a repository with a non existing client": - - - do: - catch: /illegal_argument_exception/ - snapshot.create_repository: - repository: repository_ecs - body: - type: s3 - settings: - bucket: repository_ecs - client: unknown - ---- -"Register a read-only repository with a non existing bucket": - -- do: - catch: /repository_verification_exception/ - snapshot.create_repository: - repository: repository_ecs - body: - type: s3 - settings: - readonly: true - bucket: zHHkfSqlbnBsbpSgvCYtxrEfFLqghXtyPvvvKPNBnRCicNHQLE - client: integration_test_ecs - ---- -"Register a read-only repository with a non existing client": - -- do: - catch: /illegal_argument_exception/ - snapshot.create_repository: - repository: repository_ecs - body: - type: s3 - settings: - readonly: true - bucket: repository_ecs - client: unknown - ---- -"Get a non existing snapshot": - - - do: - catch: /snapshot_missing_exception/ - snapshot.get: - repository: repository_ecs - snapshot: missing - ---- -"Delete a non existing snapshot": - - - do: - catch: /snapshot_missing_exception/ - snapshot.delete: - repository: repository_ecs - snapshot: missing - ---- -"Restore a non existing snapshot": - - - do: - catch: /snapshot_restore_exception/ - snapshot.restore: - repository: repository_ecs - snapshot: missing - wait_for_completion: true - ---- -"Usage stats": - - requires: - cluster_features: - - repositories.supports_usage_stats - reason: requires this feature - - - do: - cluster.stats: {} - - - gte: { repositories.s3.count: 1 } - - gte: { repositories.s3.read_write: 1 } - ---- -teardown: - - # Remove our repository - - do: - snapshot.delete_repository: - repository: repository_ecs diff --git a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/60_repository_sts_credentials.yml b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/60_repository_sts_credentials.yml deleted file mode 100644 index 09a8526017960..0000000000000 --- a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/60_repository_sts_credentials.yml +++ /dev/null @@ -1,279 +0,0 @@ -# Integration tests for repository-s3 - ---- -setup: - - # Register repository with sts credentials - - do: - snapshot.create_repository: - repository: repository_sts - body: - type: s3 - settings: - bucket: @sts_bucket@ - client: integration_test_sts - base_path: "@sts_base_path@" - canned_acl: private - storage_class: standard - disable_chunked_encoding: @disable_chunked_encoding@ - ---- -"Snapshot and Restore repository-s3 using sts credentials": - - # Get repository - - do: - snapshot.get_repository: - repository: repository_sts - - - match: { repository_sts.settings.bucket: @sts_bucket@ } - - match: { repository_sts.settings.client: "integration_test_sts" } - - match: { repository_sts.settings.base_path: "@sts_base_path@" } - - match: { repository_sts.settings.canned_acl: "private" } - - match: { repository_sts.settings.storage_class: "standard" } - - is_false: repository_sts.settings.access_key - - is_false: repository_sts.settings.secret_key - - is_false: repository_sts.settings.session_token - - # Index documents - - do: - bulk: - refresh: true - body: - - index: - _index: docs - _id: 1 - - snapshot: one - - index: - _index: docs - _id: 2 - - snapshot: one - - index: - _index: docs - _id: 3 - - snapshot: one - - - do: - count: - index: docs - - - match: { count: 3 } - - # Create a first snapshot - - do: - snapshot.create: - repository: repository_sts - snapshot: snapshot-one - wait_for_completion: true - - - match: { snapshot.snapshot: snapshot-one } - - match: { snapshot.state: SUCCESS } - - match: { snapshot.include_global_state: true } - - match: { snapshot.shards.failed: 0 } - - - do: - snapshot.status: - repository: repository_sts - snapshot: snapshot-one - - - is_true: snapshots - - match: { snapshots.0.snapshot: snapshot-one } - - match: { snapshots.0.state: SUCCESS } - - # Index more documents - - do: - bulk: - refresh: true - body: - - index: - _index: docs - _id: 4 - - snapshot: two - - index: - _index: docs - _id: 5 - - snapshot: two - - index: - _index: docs - _id: 6 - - snapshot: two - - index: - _index: docs - _id: 7 - - snapshot: two - - - do: - count: - index: docs - - - match: { count: 7 } - - # Create a second snapshot - - do: - snapshot.create: - repository: repository_sts - snapshot: snapshot-two - wait_for_completion: true - - - match: { snapshot.snapshot: snapshot-two } - - match: { snapshot.state: SUCCESS } - - match: { snapshot.shards.failed: 0 } - - - do: - snapshot.get: - repository: repository_sts - snapshot: snapshot-one,snapshot-two - - - is_true: snapshots - - match: { snapshots.0.state: SUCCESS } - - match: { snapshots.1.state: SUCCESS } - - # Delete the index - - do: - indices.delete: - index: docs - - # Restore the second snapshot - - do: - snapshot.restore: - repository: repository_sts - snapshot: snapshot-two - wait_for_completion: true - - - do: - count: - index: docs - - - match: { count: 7 } - - # Delete the index again - - do: - indices.delete: - index: docs - - # Restore the first snapshot - - do: - snapshot.restore: - repository: repository_sts - snapshot: snapshot-one - wait_for_completion: true - - - do: - count: - index: docs - - - match: { count: 3 } - - # Remove the snapshots - - do: - snapshot.delete: - repository: repository_sts - snapshot: snapshot-two - - - do: - snapshot.delete: - repository: repository_sts - snapshot: snapshot-one - ---- - -"Register a repository with a non existing bucket": - - - do: - catch: /repository_verification_exception/ - snapshot.create_repository: - repository: repository_sts - body: - type: s3 - settings: - bucket: zHHkfSqlbnBsbpSgvCYtxrEfFLqghXtyPvvvKPNBnRCicNHQLE - client: integration_test_sts - ---- -"Register a repository with a non existing client": - - - do: - catch: /illegal_argument_exception/ - snapshot.create_repository: - repository: repository_sts - body: - type: s3 - settings: - bucket: repository_sts - client: unknown - ---- -"Register a read-only repository with a non existing bucket": - - - do: - catch: /repository_verification_exception/ - snapshot.create_repository: - repository: repository_sts - body: - type: s3 - settings: - readonly: true - bucket: zHHkfSqlbnBsbpSgvCYtxrEfFLqghXtyPvvvKPNBnRCicNHQLE - client: integration_test_sts - ---- -"Register a read-only repository with a non existing client": - - - do: - catch: /illegal_argument_exception/ - snapshot.create_repository: - repository: repository_sts - body: - type: s3 - settings: - readonly: true - bucket: repository_sts - client: unknown - ---- -"Get a non existing snapshot": - - - do: - catch: /snapshot_missing_exception/ - snapshot.get: - repository: repository_sts - snapshot: missing - ---- -"Delete a non existing snapshot": - - - do: - catch: /snapshot_missing_exception/ - snapshot.delete: - repository: repository_sts - snapshot: missing - ---- -"Restore a non existing snapshot": - - - do: - catch: /snapshot_restore_exception/ - snapshot.restore: - repository: repository_sts - snapshot: missing - wait_for_completion: true - ---- -"Usage stats": - - requires: - cluster_features: - - repositories.supports_usage_stats - reason: requires this feature - - - do: - cluster.stats: {} - - - gte: { repositories.s3.count: 1 } - - gte: { repositories.s3.read_write: 1 } - ---- -teardown: - - # Remove our repository - - do: - snapshot.delete_repository: - repository: repository_sts diff --git a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java b/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java index 285bbb91983cc..3ee18d71a5a79 100644 --- a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java +++ b/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java @@ -18,17 +18,13 @@ public final class MinioTestContainer extends DockerEnvironmentAwareTestContaine public static final String DOCKER_BASE_IMAGE = "minio/minio:RELEASE.2021-03-01T04-20-55Z"; private final boolean enabled; - public MinioTestContainer() { - this(true); - } - - public MinioTestContainer(boolean enabled) { + public MinioTestContainer(boolean enabled, String accessKey, String secretKey, String bucketName) { super( new ImageFromDockerfile("es-minio-testfixture").withDockerfileFromBuilder( builder -> builder.from(DOCKER_BASE_IMAGE) - .env("MINIO_ACCESS_KEY", "s3_test_access_key") - .env("MINIO_SECRET_KEY", "s3_test_secret_key") - .run("mkdir -p /minio/data/bucket") + .env("MINIO_ACCESS_KEY", accessKey) + .env("MINIO_SECRET_KEY", secretKey) + .run("mkdir -p /minio/data/" + bucketName) .cmd("server", "/minio/data") .build() ) diff --git a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixture.java b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixture.java index 36f8fedcb3335..ab70f043043cc 100644 --- a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixture.java +++ b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixture.java @@ -33,10 +33,6 @@ public class S3HttpFixture extends ExternalResource { private final String basePath; private final BiPredicate authorizationPredicate; - public S3HttpFixture() { - this(true); - } - public S3HttpFixture(boolean enabled) { this(enabled, "bucket", "base_path_integration_tests", fixedAccessKey("s3_test_access_key")); } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java index 717cf96ad6a92..2dac2ee232aa5 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java @@ -49,6 +49,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Objects; import java.util.Properties; import java.util.Set; import java.util.UUID; @@ -473,6 +474,7 @@ private void createKeystore() { private void addKeystoreSettings() { spec.resolveKeystore().forEach((key, value) -> { + Objects.requireNonNull(value, "keystore setting for '" + key + "' may not be null"); String input = spec.getKeystorePassword() == null || spec.getKeystorePassword().isEmpty() ? value : spec.getKeystorePassword() + "\n" + value; diff --git a/x-pack/plugin/searchable-snapshots/qa/minio/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/minio/MinioSearchableSnapshotsIT.java b/x-pack/plugin/searchable-snapshots/qa/minio/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/minio/MinioSearchableSnapshotsIT.java index 5c2b19fe75a07..53f1a9a88e10e 100644 --- a/x-pack/plugin/searchable-snapshots/qa/minio/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/minio/MinioSearchableSnapshotsIT.java +++ b/x-pack/plugin/searchable-snapshots/qa/minio/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/minio/MinioSearchableSnapshotsIT.java @@ -21,7 +21,12 @@ @ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) public class MinioSearchableSnapshotsIT extends AbstractSearchableSnapshotsRestTestCase { - public static final MinioTestContainer minioFixture = new MinioTestContainer(); + public static final MinioTestContainer minioFixture = new MinioTestContainer( + true, + "s3_test_access_key", + "s3_test_secret_key", + "bucket" + ); public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/minio/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/MinioRepositoryAnalysisRestIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/minio/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/MinioRepositoryAnalysisRestIT.java index b0068bd7bfdaf..3b5edaf768057 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/minio/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/MinioRepositoryAnalysisRestIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/minio/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/analyze/MinioRepositoryAnalysisRestIT.java @@ -20,7 +20,12 @@ @ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) public class MinioRepositoryAnalysisRestIT extends AbstractRepositoryAnalysisRestTestCase { - public static final MinioTestContainer minioFixture = new MinioTestContainer(); + public static final MinioTestContainer minioFixture = new MinioTestContainer( + true, + "s3_test_access_key", + "s3_test_secret_key", + "bucket" + ); public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) From d729558529cafc80d705296328140b45830aa974 Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Fri, 29 Nov 2024 11:00:54 +0100 Subject: [PATCH 317/386] Correct categorization analyzer in ES|QL categorize (#117695) * Correct categorization analyzer in ES|QL categorize * close categorizer if constructing analyzer fails * Rename capability CATEGORIZE_V4 * add comments --- x-pack/plugin/esql/compute/build.gradle | 4 +- .../compute/src/main/java/module-info.java | 1 + .../aggregation/blockhash/BlockHash.java | 10 +- .../blockhash/CategorizeRawBlockHash.java | 34 ++--- .../operator/HashAggregationOperator.java | 6 +- .../GroupingAggregatorFunctionTestCase.java | 4 +- .../blockhash/CategorizeBlockHashTests.java | 76 +++++++---- .../HashAggregationOperatorTests.java | 3 +- .../src/main/resources/categorize.csv-spec | 123 ++++++++++-------- .../xpack/esql/action/EsqlCapabilities.java | 2 +- .../AbstractPhysicalOperationProviders.java | 9 +- .../planner/EsPhysicalOperationProviders.java | 4 +- .../xpack/esql/plugin/ComputeService.java | 2 +- .../xpack/esql/analysis/VerifierTests.java | 6 +- .../optimizer/LogicalPlanOptimizerTests.java | 4 +- .../planner/LocalExecutionPlannerTests.java | 4 +- .../TestPhysicalOperationProviders.java | 20 ++- 17 files changed, 199 insertions(+), 113 deletions(-) diff --git a/x-pack/plugin/esql/compute/build.gradle b/x-pack/plugin/esql/compute/build.gradle index 609c778df5929..8e866cec3f421 100644 --- a/x-pack/plugin/esql/compute/build.gradle +++ b/x-pack/plugin/esql/compute/build.gradle @@ -11,11 +11,13 @@ base { dependencies { compileOnly project(':server') compileOnly project('ann') + compileOnly project(xpackModule('core')) compileOnly project(xpackModule('ml')) annotationProcessor project('gen') implementation 'com.carrotsearch:hppc:0.8.1' - testImplementation project(':test:framework') + testImplementation(project(':modules:analysis-common')) + testImplementation(project(':test:framework')) testImplementation(project(xpackModule('esql-core'))) testImplementation(project(xpackModule('core'))) testImplementation(project(xpackModule('ml'))) diff --git a/x-pack/plugin/esql/compute/src/main/java/module-info.java b/x-pack/plugin/esql/compute/src/main/java/module-info.java index 573d9e048a4d4..1b3253694b298 100644 --- a/x-pack/plugin/esql/compute/src/main/java/module-info.java +++ b/x-pack/plugin/esql/compute/src/main/java/module-info.java @@ -19,6 +19,7 @@ requires org.elasticsearch.ml; requires org.elasticsearch.tdigest; requires org.elasticsearch.geo; + requires org.elasticsearch.xcore; requires hppc; exports org.elasticsearch.compute; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java index ef0f3ceb112c4..ea76c3bd0a0aa 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java @@ -25,6 +25,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.ReleasableIterator; +import org.elasticsearch.index.analysis.AnalysisRegistry; import java.util.Iterator; import java.util.List; @@ -169,14 +170,19 @@ public static BlockHash buildPackedValuesBlockHash(List groups, Block /** * Builds a BlockHash for the Categorize grouping function. */ - public static BlockHash buildCategorizeBlockHash(List groups, AggregatorMode aggregatorMode, BlockFactory blockFactory) { + public static BlockHash buildCategorizeBlockHash( + List groups, + AggregatorMode aggregatorMode, + BlockFactory blockFactory, + AnalysisRegistry analysisRegistry + ) { if (groups.size() != 1) { throw new IllegalArgumentException("only a single CATEGORIZE group can used"); } return aggregatorMode.isInputPartial() ? new CategorizedIntermediateBlockHash(groups.get(0).channel, blockFactory, aggregatorMode.isOutputPartial()) - : new CategorizeRawBlockHash(groups.get(0).channel, blockFactory, aggregatorMode.isOutputPartial()); + : new CategorizeRawBlockHash(groups.get(0).channel, blockFactory, aggregatorMode.isOutputPartial(), analysisRegistry); } /** diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeRawBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeRawBlockHash.java index 0d0a2fef2f82b..47dd7f650dffa 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeRawBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeRawBlockHash.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.aggregation.blockhash; -import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.data.Block; @@ -19,13 +18,14 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; -import org.elasticsearch.index.analysis.CharFilterFactory; -import org.elasticsearch.index.analysis.CustomAnalyzer; -import org.elasticsearch.index.analysis.TokenFilterFactory; -import org.elasticsearch.index.analysis.TokenizerFactory; +import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.xpack.core.ml.job.config.CategorizationAnalyzerConfig; import org.elasticsearch.xpack.ml.aggs.categorization.TokenListCategorizer; import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzer; +import java.io.IOException; +import java.util.List; + /** * BlockHash implementation for {@code Categorize} grouping function. *

    @@ -33,19 +33,23 @@ *

    */ public class CategorizeRawBlockHash extends AbstractCategorizeBlockHash { + private static final CategorizationAnalyzerConfig ANALYZER_CONFIG = CategorizationAnalyzerConfig.buildStandardCategorizationAnalyzer( + List.of() + ); + private final CategorizeEvaluator evaluator; - CategorizeRawBlockHash(int channel, BlockFactory blockFactory, boolean outputPartial) { + CategorizeRawBlockHash(int channel, BlockFactory blockFactory, boolean outputPartial, AnalysisRegistry analysisRegistry) { super(blockFactory, channel, outputPartial); - CategorizationAnalyzer analyzer = new CategorizationAnalyzer( - // TODO: should be the same analyzer as used in Production - new CustomAnalyzer( - TokenizerFactory.newFactory("whitespace", WhitespaceTokenizer::new), - new CharFilterFactory[0], - new TokenFilterFactory[0] - ), - true - ); + + CategorizationAnalyzer analyzer; + try { + analyzer = new CategorizationAnalyzer(analysisRegistry, ANALYZER_CONFIG); + } catch (IOException e) { + categorizer.close(); + throw new RuntimeException(e); + } + this.evaluator = new CategorizeEvaluator(analyzer, categorizer, blockFactory); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java index a69e8ca767014..6f8386ec08de1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java @@ -24,6 +24,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -42,14 +43,15 @@ public record HashAggregationOperatorFactory( List groups, AggregatorMode aggregatorMode, List aggregators, - int maxPageSize + int maxPageSize, + AnalysisRegistry analysisRegistry ) implements OperatorFactory { @Override public Operator get(DriverContext driverContext) { if (groups.stream().anyMatch(BlockHash.GroupSpec::isCategorize)) { return new HashAggregationOperator( aggregators, - () -> BlockHash.buildCategorizeBlockHash(groups, aggregatorMode, driverContext.blockFactory()), + () -> BlockHash.buildCategorizeBlockHash(groups, aggregatorMode, driverContext.blockFactory(), analysisRegistry), driverContext ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java index 1e97bdf5a2e79..58925a5ca36fc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java @@ -54,7 +54,6 @@ import static org.elasticsearch.compute.data.BlockTestUtils.append; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.in; /** * Shared tests for testing grouped aggregations. @@ -107,7 +106,8 @@ private Operator.OperatorFactory simpleWithMode( List.of(new BlockHash.GroupSpec(0, ElementType.LONG)), mode, List.of(supplier.groupingAggregatorFactory(mode)), - randomPageSize() + randomPageSize(), + null ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java index dd7a87dc4a574..8a3c723557151 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java @@ -8,8 +8,10 @@ package org.elasticsearch.compute.aggregation.blockhash; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.analysis.common.CommonAnalysisPlugin; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; @@ -35,7 +37,15 @@ import org.elasticsearch.compute.operator.LocalSourceOperator; import org.elasticsearch.compute.operator.PageConsumerOperator; import org.elasticsearch.core.Releasables; - +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.plugins.scanners.StablePluginsRegistry; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.junit.Before; + +import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -50,6 +60,19 @@ public class CategorizeBlockHashTests extends BlockHashTestCase { + private AnalysisRegistry analysisRegistry; + + @Before + private void initAnalysisRegistry() throws IOException { + analysisRegistry = new AnalysisModule( + TestEnvironment.newEnvironment( + Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build() + ), + List.of(new MachineLearning(Settings.EMPTY), new CommonAnalysisPlugin()), + new StablePluginsRegistry() + ).getAnalysisRegistry(); + } + public void testCategorizeRaw() { final Page page; boolean withNull = randomBoolean(); @@ -72,7 +95,7 @@ public void testCategorizeRaw() { page = new Page(builder.build()); } - try (BlockHash hash = new CategorizeRawBlockHash(0, blockFactory, true)) { + try (BlockHash hash = new CategorizeRawBlockHash(0, blockFactory, true, analysisRegistry)) { hash.add(page, new GroupingAggregatorFunction.AddInput() { @Override public void add(int positionOffset, IntBlock groupIds) { @@ -145,8 +168,8 @@ public void testCategorizeIntermediate() { // Fill intermediatePages with the intermediate state from the raw hashes try ( - BlockHash rawHash1 = new CategorizeRawBlockHash(0, blockFactory, true); - BlockHash rawHash2 = new CategorizeRawBlockHash(0, blockFactory, true) + BlockHash rawHash1 = new CategorizeRawBlockHash(0, blockFactory, true, analysisRegistry); + BlockHash rawHash2 = new CategorizeRawBlockHash(0, blockFactory, true, analysisRegistry); ) { rawHash1.add(page1, new GroupingAggregatorFunction.AddInput() { @Override @@ -267,14 +290,16 @@ public void testCategorize_withDriver() { BytesRefVector.Builder textsBuilder = driverContext.blockFactory().newBytesRefVectorBuilder(10); LongVector.Builder countsBuilder = driverContext.blockFactory().newLongVectorBuilder(10) ) { - textsBuilder.appendBytesRef(new BytesRef("a")); - textsBuilder.appendBytesRef(new BytesRef("b")); + // Note that just using "a" or "aaa" doesn't work, because the ml_standard + // tokenizer drops numbers, including hexadecimal ones. + textsBuilder.appendBytesRef(new BytesRef("aaazz")); + textsBuilder.appendBytesRef(new BytesRef("bbbzz")); textsBuilder.appendBytesRef(new BytesRef("words words words goodbye jan")); textsBuilder.appendBytesRef(new BytesRef("words words words goodbye nik")); textsBuilder.appendBytesRef(new BytesRef("words words words goodbye tom")); textsBuilder.appendBytesRef(new BytesRef("words words words hello jan")); - textsBuilder.appendBytesRef(new BytesRef("c")); - textsBuilder.appendBytesRef(new BytesRef("d")); + textsBuilder.appendBytesRef(new BytesRef("ccczz")); + textsBuilder.appendBytesRef(new BytesRef("dddzz")); countsBuilder.appendLong(1); countsBuilder.appendLong(2); countsBuilder.appendLong(800); @@ -293,10 +318,10 @@ public void testCategorize_withDriver() { ) { textsBuilder.appendBytesRef(new BytesRef("words words words hello nik")); textsBuilder.appendBytesRef(new BytesRef("words words words hello nik")); - textsBuilder.appendBytesRef(new BytesRef("c")); + textsBuilder.appendBytesRef(new BytesRef("ccczz")); textsBuilder.appendBytesRef(new BytesRef("words words words goodbye chris")); - textsBuilder.appendBytesRef(new BytesRef("d")); - textsBuilder.appendBytesRef(new BytesRef("e")); + textsBuilder.appendBytesRef(new BytesRef("dddzz")); + textsBuilder.appendBytesRef(new BytesRef("eeezz")); countsBuilder.appendLong(9); countsBuilder.appendLong(90); countsBuilder.appendLong(3); @@ -320,7 +345,8 @@ public void testCategorize_withDriver() { new SumLongAggregatorFunctionSupplier(List.of(1)).groupingAggregatorFactory(AggregatorMode.INITIAL), new MaxLongAggregatorFunctionSupplier(List.of(1)).groupingAggregatorFactory(AggregatorMode.INITIAL) ), - 16 * 1024 + 16 * 1024, + analysisRegistry ).get(driverContext) ), new PageConsumerOperator(intermediateOutput::add), @@ -339,7 +365,8 @@ public void testCategorize_withDriver() { new SumLongAggregatorFunctionSupplier(List.of(1)).groupingAggregatorFactory(AggregatorMode.INITIAL), new MaxLongAggregatorFunctionSupplier(List.of(1)).groupingAggregatorFactory(AggregatorMode.INITIAL) ), - 16 * 1024 + 16 * 1024, + analysisRegistry ).get(driverContext) ), new PageConsumerOperator(intermediateOutput::add), @@ -360,7 +387,8 @@ public void testCategorize_withDriver() { new SumLongAggregatorFunctionSupplier(List.of(1, 2)).groupingAggregatorFactory(AggregatorMode.FINAL), new MaxLongAggregatorFunctionSupplier(List.of(3, 4)).groupingAggregatorFactory(AggregatorMode.FINAL) ), - 16 * 1024 + 16 * 1024, + analysisRegistry ).get(driverContext) ), new PageConsumerOperator(finalOutput::add), @@ -385,15 +413,15 @@ public void testCategorize_withDriver() { sums, equalTo( Map.of( - ".*?a.*?", + ".*?aaazz.*?", 1L, - ".*?b.*?", + ".*?bbbzz.*?", 2L, - ".*?c.*?", + ".*?ccczz.*?", 33L, - ".*?d.*?", + ".*?dddzz.*?", 44L, - ".*?e.*?", + ".*?eeezz.*?", 5L, ".*?words.+?words.+?words.+?goodbye.*?", 8888L, @@ -406,15 +434,15 @@ public void testCategorize_withDriver() { maxs, equalTo( Map.of( - ".*?a.*?", + ".*?aaazz.*?", 1L, - ".*?b.*?", + ".*?bbbzz.*?", 2L, - ".*?c.*?", + ".*?ccczz.*?", 30L, - ".*?d.*?", + ".*?dddzz.*?", 40L, - ".*?e.*?", + ".*?eeezz.*?", 5L, ".*?words.+?words.+?words.+?goodbye.*?", 8000L, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java index b2f4ad594936e..953c7d1c313f1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java @@ -59,7 +59,8 @@ protected Operator.OperatorFactory simpleWithMode(AggregatorMode mode) { new SumLongAggregatorFunctionSupplier(sumChannels).groupingAggregatorFactory(mode), new MaxLongAggregatorFunctionSupplier(maxChannels).groupingAggregatorFactory(mode) ), - randomPageSize() + randomPageSize(), + null ); } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec index 547c430ed7518..e45b10d1aa122 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec @@ -1,5 +1,5 @@ standard aggs -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | STATS count=COUNT(), @@ -17,7 +17,7 @@ count:long | sum:long | avg:double | count_distinct:long | category:keyw ; values aggs -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | STATS values=MV_SORT(VALUES(message)), @@ -33,7 +33,7 @@ values:keyword | top ; mv -required_capability: categorize_v3 +required_capability: categorize_v4 FROM mv_sample_data | STATS COUNT(), SUM(event_duration) BY category=CATEGORIZE(message) @@ -48,7 +48,7 @@ COUNT():long | SUM(event_duration):long | category:keyword ; row mv -required_capability: categorize_v3 +required_capability: categorize_v4 ROW message = ["connected to a", "connected to b", "disconnected"], str = ["a", "b", "c"] | STATS COUNT(), VALUES(str) BY category=CATEGORIZE(message) @@ -60,8 +60,20 @@ COUNT():long | VALUES(str):keyword | category:keyword 1 | [a, b, c] | .*?disconnected.*? ; +skips stopwords +required_capability: categorize_v4 + +ROW message = ["Mon Tue connected to a", "Jul Aug connected to b September ", "UTC connected GMT to c UTC"] + | STATS COUNT() BY category=CATEGORIZE(message) + | SORT category +; + +COUNT():long | category:keyword + 3 | .*?connected.+?to.*? +; + with multiple indices -required_capability: categorize_v3 +required_capability: categorize_v4 required_capability: union_types FROM sample_data* @@ -76,7 +88,7 @@ COUNT():long | category:keyword ; mv with many values -required_capability: categorize_v3 +required_capability: categorize_v4 FROM employees | STATS COUNT() BY category=CATEGORIZE(job_positions) @@ -93,7 +105,7 @@ COUNT():long | category:keyword ; mv with many values and SUM -required_capability: categorize_v3 +required_capability: categorize_v4 FROM employees | STATS SUM(languages) BY category=CATEGORIZE(job_positions) @@ -108,7 +120,7 @@ SUM(languages):long | category:keyword ; mv with many values and nulls and SUM -required_capability: categorize_v3 +required_capability: categorize_v4 FROM employees | STATS SUM(languages) BY category=CATEGORIZE(job_positions) @@ -122,7 +134,7 @@ SUM(languages):long | category:keyword ; mv via eval -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | EVAL message = MV_APPEND(message, "Banana") @@ -138,7 +150,7 @@ COUNT():long | category:keyword ; mv via eval const -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | EVAL message = ["Banana", "Bread"] @@ -152,7 +164,7 @@ COUNT():long | category:keyword ; mv via eval const without aliases -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | EVAL message = ["Banana", "Bread"] @@ -166,7 +178,7 @@ COUNT():long | CATEGORIZE(message):keyword ; mv const in parameter -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | STATS COUNT() BY c = CATEGORIZE(["Banana", "Bread"]) @@ -179,7 +191,7 @@ COUNT():long | c:keyword ; agg alias shadowing -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | STATS c = COUNT() BY c = CATEGORIZE(["Banana", "Bread"]) @@ -194,7 +206,7 @@ c:keyword ; chained aggregations using categorize -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(message) @@ -203,13 +215,13 @@ FROM sample_data ; COUNT():long | category:keyword - 1 | .*?\.\*\?Connected\.\+\?to\.\*\?.*? - 1 | .*?\.\*\?Connection\.\+\?error\.\*\?.*? - 1 | .*?\.\*\?Disconnected\.\*\?.*? + 1 | .*?Connected.+?to.*? + 1 | .*?Connection.+?error.*? + 1 | .*?Disconnected.*? ; stats without aggs -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | STATS BY category=CATEGORIZE(message) @@ -223,7 +235,7 @@ category:keyword ; text field -required_capability: categorize_v3 +required_capability: categorize_v4 FROM hosts | STATS COUNT() BY category=CATEGORIZE(host_group) @@ -231,14 +243,17 @@ FROM hosts ; COUNT():long | category:keyword - 2 | .*?DB.+?servers.*? 2 | .*?Gateway.+?instances.*? 5 | .*?Kubernetes.+?cluster.*? + 2 | .*?servers.*? 1 | null + +// Note: DB is removed from "DB servers", because the ml_standard +// tokenizer drops numbers, including hexadecimal ones. ; on TO_UPPER -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(TO_UPPER(message)) @@ -252,7 +267,7 @@ COUNT():long | category:keyword ; on CONCAT -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(CONCAT(message, " banana")) @@ -266,7 +281,7 @@ COUNT():long | category:keyword ; on CONCAT with unicode -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(CONCAT(message, " 👍🏽😊")) @@ -274,13 +289,13 @@ FROM sample_data ; COUNT():long | category:keyword - 3 | .*?Connected.+?to.+?👍🏽😊.*? - 3 | .*?Connection.+?error.+?👍🏽😊.*? - 1 | .*?Disconnected.+?👍🏽😊.*? + 3 | .*?Connected.+?to.*? + 3 | .*?Connection.+?error.*? + 1 | .*?Disconnected.*? ; on REVERSE(CONCAT()) -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(REVERSE(CONCAT(message, " 👍🏽😊"))) @@ -288,13 +303,13 @@ FROM sample_data ; COUNT():long | category:keyword - 1 | .*?😊👍🏽.+?detcennocsiD.*? - 3 | .*?😊👍🏽.+?ot.+?detcennoC.*? - 3 | .*?😊👍🏽.+?rorre.+?noitcennoC.*? + 1 | .*?detcennocsiD.*? + 3 | .*?ot.+?detcennoC.*? + 3 | .*?rorre.+?noitcennoC.*? ; and then TO_LOWER -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(message) @@ -309,7 +324,7 @@ COUNT():long | category:keyword ; on const empty string -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | STATS COUNT() BY category=CATEGORIZE("") @@ -321,7 +336,7 @@ COUNT():long | category:keyword ; on const empty string from eval -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | EVAL x = "" @@ -334,7 +349,7 @@ COUNT():long | category:keyword ; on null -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | EVAL x = null @@ -347,7 +362,7 @@ COUNT():long | SUM(event_duration):long | category:keyword ; on null string -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | EVAL x = null::string @@ -360,7 +375,7 @@ COUNT():long | category:keyword ; filtering out all data -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | WHERE @timestamp < "2023-10-23T00:00:00Z" @@ -372,7 +387,7 @@ COUNT():long | category:keyword ; filtering out all data with constant -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(message) @@ -383,7 +398,7 @@ COUNT():long | category:keyword ; drop output columns -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | STATS count=COUNT() BY category=CATEGORIZE(message) @@ -398,7 +413,7 @@ x:integer ; category value processing -required_capability: categorize_v3 +required_capability: categorize_v4 ROW message = ["connected to a", "connected to b", "disconnected"] | STATS COUNT() BY category=CATEGORIZE(message) @@ -412,21 +427,21 @@ COUNT():long | category:keyword ; row aliases -required_capability: categorize_v3 +required_capability: categorize_v4 -ROW message = "connected to a" +ROW message = "connected to xyz" | EVAL x = message | STATS COUNT() BY category=CATEGORIZE(x) | EVAL y = category | SORT y ; -COUNT():long | category:keyword | y:keyword - 1 | .*?connected.+?to.+?a.*? | .*?connected.+?to.+?a.*? +COUNT():long | category:keyword | y:keyword + 1 | .*?connected.+?to.+?xyz.*? | .*?connected.+?to.+?xyz.*? ; from aliases -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | EVAL x = message @@ -442,9 +457,9 @@ COUNT():long | category:keyword | y:keyword ; row aliases with keep -required_capability: categorize_v3 +required_capability: categorize_v4 -ROW message = "connected to a" +ROW message = "connected to xyz" | EVAL x = message | KEEP x | STATS COUNT() BY category=CATEGORIZE(x) @@ -454,11 +469,11 @@ ROW message = "connected to a" ; COUNT():long | y:keyword - 1 | .*?connected.+?to.+?a.*? + 1 | .*?connected.+?to.+?xyz.*? ; from aliases with keep -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | EVAL x = message @@ -476,9 +491,9 @@ COUNT():long | y:keyword ; row rename -required_capability: categorize_v3 +required_capability: categorize_v4 -ROW message = "connected to a" +ROW message = "connected to xyz" | RENAME message as x | STATS COUNT() BY category=CATEGORIZE(x) | RENAME category as y @@ -486,11 +501,11 @@ ROW message = "connected to a" ; COUNT():long | y:keyword - 1 | .*?connected.+?to.+?a.*? + 1 | .*?connected.+?to.+?xyz.*? ; from rename -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | RENAME message as x @@ -506,7 +521,7 @@ COUNT():long | y:keyword ; row drop -required_capability: categorize_v3 +required_capability: categorize_v4 ROW message = "connected to a" | STATS c = COUNT() BY category=CATEGORIZE(message) @@ -519,7 +534,7 @@ c:long ; from drop -required_capability: categorize_v3 +required_capability: categorize_v4 FROM sample_data | STATS c = COUNT() BY category=CATEGORIZE(message) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 77a3e2840977f..373be23cdf847 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -403,7 +403,7 @@ public enum Cap { /** * Supported the text categorization function "CATEGORIZE". */ - CATEGORIZE_V3(Build.current().isSnapshot()), + CATEGORIZE_V4(Build.current().isSnapshot()), /** * QSTR function diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java index a7418654f6b0e..69e2d1c45aa3c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java @@ -18,6 +18,7 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.HashAggregationOperator.HashAggregationOperatorFactory; import org.elasticsearch.compute.operator.Operator; +import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -46,6 +47,11 @@ public abstract class AbstractPhysicalOperationProviders implements PhysicalOperationProviders { private final AggregateMapper aggregateMapper = new AggregateMapper(); + private final AnalysisRegistry analysisRegistry; + + AbstractPhysicalOperationProviders(AnalysisRegistry analysisRegistry) { + this.analysisRegistry = analysisRegistry; + } @Override public final PhysicalOperation groupingPhysicalOperation( @@ -173,7 +179,8 @@ else if (aggregatorMode.isOutputPartial()) { groupSpecs.stream().map(GroupSpec::toHashGroupSpec).toList(), aggregatorMode, aggregatorFactories, - context.pageSize(aggregateExec.estimatedRowSize()) + context.pageSize(aggregateExec.estimatedRowSize()), + analysisRegistry ); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java index 15f5b6579098d..7bf7d0e2d08eb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java @@ -34,6 +34,7 @@ import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.mapper.BlockLoader; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -98,7 +99,8 @@ public interface ShardContext extends org.elasticsearch.compute.lucene.ShardCont private final List shardContexts; - public EsPhysicalOperationProviders(List shardContexts) { + public EsPhysicalOperationProviders(List shardContexts, AnalysisRegistry analysisRegistry) { + super(analysisRegistry); this.shardContexts = shardContexts; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 73266551f169c..b06dd3cdb64d3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -452,7 +452,7 @@ void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, context.exchangeSink(), enrichLookupService, lookupFromIndexService, - new EsPhysicalOperationProviders(contexts) + new EsPhysicalOperationProviders(contexts, searchService.getIndicesService().getAnalysis()) ); LOGGER.debug("Received physical plan:\n{}", plan); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index dd14e8dd82123..d4fca2a0a2540 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1846,7 +1846,7 @@ public void testIntervalAsString() { } public void testCategorizeSingleGrouping() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V3.isEnabled()); + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V4.isEnabled()); query("from test | STATS COUNT(*) BY CATEGORIZE(first_name)"); query("from test | STATS COUNT(*) BY cat = CATEGORIZE(first_name)"); @@ -1875,7 +1875,7 @@ public void testCategorizeSingleGrouping() { } public void testCategorizeNestedGrouping() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V3.isEnabled()); + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V4.isEnabled()); query("from test | STATS COUNT(*) BY CATEGORIZE(LENGTH(first_name)::string)"); @@ -1890,7 +1890,7 @@ public void testCategorizeNestedGrouping() { } public void testCategorizeWithinAggregations() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V3.isEnabled()); + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V4.isEnabled()); query("from test | STATS MV_COUNT(cat), COUNT(*) BY cat = CATEGORIZE(first_name)"); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index e98f2b88b33c9..57d0c7432f97b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -1212,7 +1212,7 @@ public void testCombineProjectionWithAggregationFirstAndAliasedGroupingUsedInAgg * \_EsRelation[test][_meta_field{f}#23, emp_no{f}#17, first_name{f}#18, ..] */ public void testCombineProjectionWithCategorizeGrouping() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V3.isEnabled()); + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V4.isEnabled()); var plan = plan(""" from test @@ -3949,7 +3949,7 @@ public void testNestedExpressionsInGroups() { * \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] */ public void testNestedExpressionsInGroupsWithCategorize() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V3.isEnabled()); + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V4.isEnabled()); var plan = optimizedPlan(""" from test diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index ff9e45a9f9233..5d8da21c6faad 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -156,7 +156,7 @@ private Configuration config() { randomZone(), randomLocale(random()), "test_user", - "test_cluser", + "test_cluster", pragmas, EsqlPlugin.QUERY_RESULT_TRUNCATION_MAX_SIZE.getDefault(null), EsqlPlugin.QUERY_RESULT_TRUNCATION_DEFAULT_SIZE.getDefault(null), @@ -187,7 +187,7 @@ private EsPhysicalOperationProviders esPhysicalOperationProviders() throws IOExc ); } releasables.add(searcher); - return new EsPhysicalOperationProviders(shardContexts); + return new EsPhysicalOperationProviders(shardContexts, null); } private IndexReader reader() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java index c811643c8daea..e91fc6e49312d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.esql.planner; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.analysis.common.CommonAnalysisPlugin; import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.Describable; import org.elasticsearch.compute.aggregation.GroupingAggregator; @@ -28,7 +30,11 @@ import org.elasticsearch.compute.operator.OrdinalsGroupingOperator; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.compute.operator.SourceOperator.SourceOperatorFactory; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.plugins.scanners.StablePluginsRegistry; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.core.expression.Attribute; @@ -39,7 +45,9 @@ import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.LocalExecutionPlannerContext; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.PhysicalOperation; +import org.elasticsearch.xpack.ml.MachineLearning; +import java.io.IOException; import java.util.List; import java.util.Random; import java.util.function.Function; @@ -48,6 +56,7 @@ import static com.carrotsearch.randomizedtesting.generators.RandomNumbers.randomIntBetween; import static java.util.stream.Collectors.joining; +import static org.apache.lucene.tests.util.LuceneTestCase.createTempDir; import static org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference.DOC_VALUES; import static org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference.NONE; @@ -56,7 +65,16 @@ public class TestPhysicalOperationProviders extends AbstractPhysicalOperationPro private final Page testData; private final List columnNames; - public TestPhysicalOperationProviders(Page testData, List columnNames) { + public TestPhysicalOperationProviders(Page testData, List columnNames) throws IOException { + super( + new AnalysisModule( + TestEnvironment.newEnvironment( + Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build() + ), + List.of(new MachineLearning(Settings.EMPTY), new CommonAnalysisPlugin()), + new StablePluginsRegistry() + ).getAnalysisRegistry() + ); this.testData = testData; this.columnNames = columnNames; } From 2226d6cbfa434206826207da46e95969fc77776c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 29 Nov 2024 11:24:28 +0100 Subject: [PATCH 318/386] Add _field_names disabling to archival index tests (#117703) Disabling the "_field_names" field in mappings was possible until 8.x and now issues a deprecation warning. We need to maintain the ability to read these mappings for archival indices so this change adds this case to one of the index mappings in tests and checks for the deprecation warning for it. --- .../test/java/org/elasticsearch/oldrepos/OldMappingsIT.java | 6 +++++- .../test/resources/org/elasticsearch/oldrepos/custom.json | 3 +++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldMappingsIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldMappingsIT.java index 67dbdec6b8399..95bc92d4f185a 100644 --- a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldMappingsIT.java +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldMappingsIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.WarningsHandler; @@ -166,7 +167,10 @@ public void setupIndex() throws IOException { createRestoreRequest.addParameter("wait_for_completion", "true"); createRestoreRequest.setJsonEntity("{\"indices\":\"" + indices.stream().collect(Collectors.joining(",")) + "\"}"); createRestoreRequest.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE)); - assertOK(client().performRequest(createRestoreRequest)); + Response response = client().performRequest(createRestoreRequest); + // check deprecation warning for "_field_name" disabling + assertTrue(response.getWarnings().stream().filter(s -> s.contains("Disabling _field_names is not necessary")).count() > 0); + assertOK(response); } private Request createIndex(String indexName, String file) throws IOException { diff --git a/x-pack/qa/repository-old-versions/src/test/resources/org/elasticsearch/oldrepos/custom.json b/x-pack/qa/repository-old-versions/src/test/resources/org/elasticsearch/oldrepos/custom.json index ae52ccbcce330..ad1c6b0dc59ae 100644 --- a/x-pack/qa/repository-old-versions/src/test/resources/org/elasticsearch/oldrepos/custom.json +++ b/x-pack/qa/repository-old-versions/src/test/resources/org/elasticsearch/oldrepos/custom.json @@ -1,4 +1,7 @@ "_default_": { + "_field_names": { + "enabled": false + }, "properties": { "apache2": { "properties": { From b7c38a1451d13fa7402ff7055231451f43ac3ac6 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 29 Nov 2024 21:54:34 +1100 Subject: [PATCH 319/386] Mute org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT test {scoring.QstrWithFieldAndScoringSortedEval} #117751 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 40d3dcf46e1b9..96631d15f374f 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -225,6 +225,9 @@ tests: - class: org.elasticsearch.xpack.inference.InferenceCrudIT method: testSupportedStream issue: https://github.com/elastic/elasticsearch/issues/117745 +- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT + method: test {scoring.QstrWithFieldAndScoringSortedEval} + issue: https://github.com/elastic/elasticsearch/issues/117751 # Examples: # From 045f6a31f994f51d87a217be60251e060132c8a1 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Fri, 29 Nov 2024 11:55:51 +0100 Subject: [PATCH 320/386] Add INDEX_REFRESH_BLOCK (#117543) This change adds a new ClusterBlockLevel called REFRESH. This level is used in a new ClusterBlock.INDEX_REFRESH_BLOCK which is automatically added to new indices that are created from empty store, with replicas, and only on serverless deployments that have a feature flag enabled. This block is also only added when all nodes of a cluster are in a recent enough transport version. If for some reason the new ClusterBlock is sent over the wire to a node with an old transport version, the REFRESH cluster block level will be removed from the set of level blocked. In the future, the REFRESH cluster block will be used: to block refreshes on shards until an unpromotable shard is started to allow skipping shards when searching Relates ES-10131 --- .../org/elasticsearch/TransportVersions.java | 1 + .../cluster/block/ClusterBlock.java | 24 +++++- .../cluster/block/ClusterBlockLevel.java | 3 +- .../cluster/metadata/IndexMetadata.java | 9 ++ .../metadata/MetadataCreateIndexService.java | 54 ++++++++++++ .../cluster/ClusterStateTests.java | 18 ++-- .../cluster/block/ClusterBlockTests.java | 49 +++++++++-- .../MetadataCreateIndexServiceTests.java | 86 ++++++++++++++++++- 8 files changed, 228 insertions(+), 16 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index a1315ccf66701..b38a285907937 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -211,6 +211,7 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_REMOVE_NODE_LEVEL_PLAN = def(8_800_00_0); public static final TransportVersion LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE = def(8_801_00_0); public static final TransportVersion SOURCE_MODE_TELEMETRY = def(8_802_00_0); + public static final TransportVersion NEW_REFRESH_CLUSTER_BLOCK = def(8_803_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlock.java b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlock.java index 4e47925d383c2..25c6a1ff5b67f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlock.java +++ b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlock.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.block; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -21,6 +22,7 @@ import java.util.EnumSet; import java.util.Locale; import java.util.Objects; +import java.util.function.Predicate; public class ClusterBlock implements Writeable, ToXContentFragment { @@ -142,7 +144,12 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVInt(id); out.writeOptionalString(uuid); out.writeString(description); - out.writeEnumSet(levels); + if (out.getTransportVersion().onOrAfter(TransportVersions.NEW_REFRESH_CLUSTER_BLOCK)) { + out.writeEnumSet(levels); + } else { + // do not send ClusterBlockLevel.REFRESH to old nodes + out.writeEnumSet(filterLevels(levels, level -> ClusterBlockLevel.REFRESH.equals(level) == false)); + } out.writeBoolean(retryable); out.writeBoolean(disableStatePersistence); RestStatus.writeTo(out, status); @@ -185,4 +192,19 @@ public int hashCode() { public boolean isAllowReleaseResources() { return allowReleaseResources; } + + static EnumSet filterLevels(EnumSet levels, Predicate predicate) { + assert levels != null; + int size = levels.size(); + if (size == 0 || (size == 1 && predicate.test(levels.iterator().next()))) { + return levels; + } + var filteredLevels = EnumSet.noneOf(ClusterBlockLevel.class); + for (ClusterBlockLevel level : levels) { + if (predicate.test(level)) { + filteredLevels.add(level); + } + } + return filteredLevels; + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlockLevel.java b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlockLevel.java index f6330fb18e5e6..262044b091ac7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlockLevel.java +++ b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlockLevel.java @@ -15,7 +15,8 @@ public enum ClusterBlockLevel { READ, WRITE, METADATA_READ, - METADATA_WRITE; + METADATA_WRITE, + REFRESH; public static final EnumSet ALL = EnumSet.allOf(ClusterBlockLevel.class); public static final EnumSet READ_WRITE = EnumSet.of(READ, WRITE); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java index 6456240c2317e..b7c1ee5fbad96 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java @@ -140,6 +140,15 @@ public class IndexMetadata implements Diffable, ToXContentFragmen RestStatus.TOO_MANY_REQUESTS, EnumSet.of(ClusterBlockLevel.WRITE) ); + public static final ClusterBlock INDEX_REFRESH_BLOCK = new ClusterBlock( + 14, + "index refresh blocked, waiting for shard(s) to be started", + true, + false, + false, + RestStatus.REQUEST_TIMEOUT, + EnumSet.of(ClusterBlockLevel.REFRESH) + ); // 'event.ingested' (part of Elastic Common Schema) range is tracked in cluster state, along with @timestamp public static final String EVENT_INGESTED_FIELD_NAME = "event.ingested"; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index 1f014a526b9a6..52e4d75ac5116 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; @@ -127,6 +128,16 @@ public class MetadataCreateIndexService { public static final int MAX_INDEX_NAME_BYTES = 255; + /** + * Name of the setting used to allow blocking refreshes on newly created indices. + */ + public static final String USE_INDEX_REFRESH_BLOCK_SETTING_NAME = "stateless.indices.use_refresh_block_upon_index_creation"; + + @FunctionalInterface + interface ClusterBlocksTransformer { + void apply(ClusterBlocks.Builder clusterBlocks, IndexMetadata indexMetadata, TransportVersion minClusterTransportVersion); + } + private final Settings settings; private final ClusterService clusterService; private final IndicesService indicesService; @@ -139,6 +150,7 @@ public class MetadataCreateIndexService { private final boolean forbidPrivateIndexSettings; private final Set indexSettingProviders; private final ThreadPool threadPool; + private final ClusterBlocksTransformer blocksTransformerUponIndexCreation; public MetadataCreateIndexService( final Settings settings, @@ -166,6 +178,7 @@ public MetadataCreateIndexService( this.shardLimitValidator = shardLimitValidator; this.indexSettingProviders = indexSettingProviders.getIndexSettingProviders(); this.threadPool = threadPool; + this.blocksTransformerUponIndexCreation = createClusterBlocksTransformerForIndexCreation(settings); } /** @@ -540,8 +553,10 @@ private ClusterState applyCreateIndexWithTemporaryService( currentState, indexMetadata, metadataTransformer, + blocksTransformerUponIndexCreation, allocationService.getShardRoutingRoleStrategy() ); + assert assertHasRefreshBlock(indexMetadata, updated, updated.getMinTransportVersion()); if (request.performReroute()) { updated = allocationService.reroute(updated, "index [" + indexMetadata.getIndex().getName() + "] created", rerouteListener); } @@ -1294,6 +1309,7 @@ static ClusterState clusterStateCreateIndex( ClusterState currentState, IndexMetadata indexMetadata, BiConsumer metadataTransformer, + ClusterBlocksTransformer blocksTransformer, ShardRoutingRoleStrategy shardRoutingRoleStrategy ) { final Metadata newMetadata; @@ -1307,6 +1323,9 @@ static ClusterState clusterStateCreateIndex( var blocksBuilder = ClusterBlocks.builder().blocks(currentState.blocks()); blocksBuilder.updateBlocks(indexMetadata); + if (blocksTransformer != null) { + blocksTransformer.apply(blocksBuilder, indexMetadata, currentState.getMinTransportVersion()); + } var routingTableBuilder = RoutingTable.builder(shardRoutingRoleStrategy, currentState.routingTable()) .addAsNew(newMetadata.index(indexMetadata.getIndex().getName())); @@ -1745,4 +1764,39 @@ public static void validateStoreTypeSetting(Settings indexSettings) { ); } } + + private static boolean useRefreshBlock(Settings settings) { + return DiscoveryNode.isStateless(settings) && settings.getAsBoolean(USE_INDEX_REFRESH_BLOCK_SETTING_NAME, false); + } + + static ClusterBlocksTransformer createClusterBlocksTransformerForIndexCreation(Settings settings) { + if (useRefreshBlock(settings) == false) { + return (clusterBlocks, indexMetadata, minClusterTransportVersion) -> {}; + } + logger.debug("applying refresh block on index creation"); + return (clusterBlocks, indexMetadata, minClusterTransportVersion) -> { + if (applyRefreshBlock(indexMetadata, minClusterTransportVersion)) { + // Applies the INDEX_REFRESH_BLOCK to the index. This block will remain in cluster state until an unpromotable shard is + // started or a configurable delay is elapsed. + clusterBlocks.addIndexBlock(indexMetadata.getIndex().getName(), IndexMetadata.INDEX_REFRESH_BLOCK); + } + }; + } + + private static boolean applyRefreshBlock(IndexMetadata indexMetadata, TransportVersion minClusterTransportVersion) { + return 0 < indexMetadata.getNumberOfReplicas() // index has replicas + && indexMetadata.getResizeSourceIndex() == null // index is not a split/shrink index + && indexMetadata.getInSyncAllocationIds().values().stream().allMatch(Set::isEmpty) // index is a new index + && minClusterTransportVersion.onOrAfter(TransportVersions.NEW_REFRESH_CLUSTER_BLOCK); + } + + private boolean assertHasRefreshBlock(IndexMetadata indexMetadata, ClusterState clusterState, TransportVersion minTransportVersion) { + var hasRefreshBlock = clusterState.blocks().hasIndexBlock(indexMetadata.getIndex().getName(), IndexMetadata.INDEX_REFRESH_BLOCK); + if (useRefreshBlock(settings) == false || applyRefreshBlock(indexMetadata, minTransportVersion) == false) { + assert hasRefreshBlock == false : indexMetadata.getIndex(); + } else { + assert hasRefreshBlock : indexMetadata.getIndex(); + } + return true; + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java b/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java index 9613086aa9f57..668aea70c23f2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java @@ -167,7 +167,8 @@ public void testToXContent() throws IOException { "read", "write", "metadata_read", - "metadata_write" + "metadata_write", + "refresh" ] } }, @@ -180,7 +181,8 @@ public void testToXContent() throws IOException { "read", "write", "metadata_read", - "metadata_write" + "metadata_write", + "refresh" ] } } @@ -440,7 +442,8 @@ public void testToXContent_FlatSettingTrue_ReduceMappingFalse() throws IOExcepti "read", "write", "metadata_read", - "metadata_write" + "metadata_write", + "refresh" ] } }, @@ -453,7 +456,8 @@ public void testToXContent_FlatSettingTrue_ReduceMappingFalse() throws IOExcepti "read", "write", "metadata_read", - "metadata_write" + "metadata_write", + "refresh" ] } } @@ -712,7 +716,8 @@ public void testToXContent_FlatSettingFalse_ReduceMappingTrue() throws IOExcepti "read", "write", "metadata_read", - "metadata_write" + "metadata_write", + "refresh" ] } }, @@ -725,7 +730,8 @@ public void testToXContent_FlatSettingFalse_ReduceMappingTrue() throws IOExcepti "read", "write", "metadata_read", - "metadata_write" + "metadata_write", + "refresh" ] } } diff --git a/server/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java b/server/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java index 311f2ec36af5c..0237fff8fdda5 100644 --- a/server/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java @@ -10,19 +10,22 @@ package org.elasticsearch.cluster.block; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; -import java.util.Arrays; import java.util.Collections; -import java.util.List; +import java.util.EnumSet; import java.util.Map; import static java.util.EnumSet.copyOf; +import static org.elasticsearch.test.TransportVersionUtils.getFirstVersion; +import static org.elasticsearch.test.TransportVersionUtils.getPreviousVersion; import static org.elasticsearch.test.TransportVersionUtils.randomVersion; +import static org.elasticsearch.test.TransportVersionUtils.randomVersionBetween; import static org.hamcrest.CoreMatchers.endsWith; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.not; @@ -36,7 +39,7 @@ public void testSerialization() throws Exception { int iterations = randomIntBetween(5, 20); for (int i = 0; i < iterations; i++) { TransportVersion version = randomVersion(random()); - ClusterBlock clusterBlock = randomClusterBlock(); + ClusterBlock clusterBlock = randomClusterBlock(version); BytesStreamOutput out = new BytesStreamOutput(); out.setTransportVersion(version); @@ -50,13 +53,41 @@ public void testSerialization() throws Exception { } } + public void testSerializationBwc() throws Exception { + var out = new BytesStreamOutput(); + out.setTransportVersion( + randomVersionBetween(random(), getFirstVersion(), getPreviousVersion(TransportVersions.NEW_REFRESH_CLUSTER_BLOCK)) + ); + + var clusterBlock = randomClusterBlock(TransportVersions.NEW_REFRESH_CLUSTER_BLOCK); + clusterBlock.writeTo(out); + + var in = out.bytes().streamInput(); + in.setTransportVersion(randomVersion()); + + assertClusterBlockEquals( + new ClusterBlock( + clusterBlock.id(), + clusterBlock.uuid(), + clusterBlock.description(), + clusterBlock.retryable(), + clusterBlock.disableStatePersistence(), + clusterBlock.isAllowReleaseResources(), + clusterBlock.status(), + // ClusterBlockLevel.REFRESH should not be sent over the wire to nodes with version < NEW_REFRESH_CLUSTER_BLOCK + ClusterBlock.filterLevels(clusterBlock.levels(), level -> ClusterBlockLevel.REFRESH.equals(level) == false) + ), + new ClusterBlock(in) + ); + } + public void testToStringDanglingComma() { - final ClusterBlock clusterBlock = randomClusterBlock(); + final ClusterBlock clusterBlock = randomClusterBlock(randomVersion(random())); assertThat(clusterBlock.toString(), not(endsWith(","))); } public void testGlobalBlocksCheckedIfNoIndicesSpecified() { - ClusterBlock globalBlock = randomClusterBlock(); + ClusterBlock globalBlock = randomClusterBlock(randomVersion(random())); ClusterBlocks clusterBlocks = new ClusterBlocks(Collections.singleton(globalBlock), Map.of()); ClusterBlockException exception = clusterBlocks.indicesBlockedException(randomFrom(globalBlock.levels()), new String[0]); assertNotNull(exception); @@ -113,9 +144,13 @@ public void testGetIndexBlockWithId() { assertThat(builder.build().getIndexBlockWithId("index", randomValueOtherThan(blockId, ESTestCase::randomInt)), nullValue()); } - private static ClusterBlock randomClusterBlock() { + private static ClusterBlock randomClusterBlock(TransportVersion version) { final String uuid = randomBoolean() ? UUIDs.randomBase64UUID() : null; - final List levels = Arrays.asList(ClusterBlockLevel.values()); + final EnumSet levels = ClusterBlock.filterLevels( + EnumSet.allOf(ClusterBlockLevel.class), + // Filter out ClusterBlockLevel.REFRESH for versions < TransportVersions.NEW_REFRESH_CLUSTER_BLOCK + level -> ClusterBlockLevel.REFRESH.equals(level) == false || version.onOrAfter(TransportVersions.NEW_REFRESH_CLUSTER_BLOCK) + ); return new ClusterBlock( randomInt(), uuid, diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index 96a74d2e23aad..1876a1f2da556 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.cluster.version.CompatibilityVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.IndexScopedSettings; @@ -66,6 +67,7 @@ import org.elasticsearch.snapshots.EmptySnapshotsInfoService; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.test.gateway.TestGatewayAllocator; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.threadpool.TestThreadPool; @@ -105,6 +107,8 @@ import static org.elasticsearch.cluster.metadata.MetadataCreateIndexService.resolveAndValidateAliases; import static org.elasticsearch.index.IndexSettings.INDEX_SOFT_DELETES_SETTING; import static org.elasticsearch.indices.ShardLimitValidatorTests.createTestShardLimitService; +import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; @@ -1133,7 +1137,7 @@ public void testClusterStateCreateIndexThrowsWriteIndexValidationException() thr assertThat( expectThrows( IllegalStateException.class, - () -> clusterStateCreateIndex(currentClusterState, newIndex, null, TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY) + () -> clusterStateCreateIndex(currentClusterState, newIndex, null, null, TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY) ).getMessage(), startsWith("alias [alias1] has more than one write index [") ); @@ -1153,6 +1157,7 @@ public void testClusterStateCreateIndex() { currentClusterState, newIndexMetadata, null, + null, TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY ); assertThat(updatedClusterState.blocks().getIndexBlockWithId("test", INDEX_READ_ONLY_BLOCK.id()), is(INDEX_READ_ONLY_BLOCK)); @@ -1198,6 +1203,7 @@ public void testClusterStateCreateIndexWithMetadataTransaction() { currentClusterState, newIndexMetadata, metadataTransformer, + null, TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY ); assertTrue(updatedClusterState.metadata().findAllAliases(new String[] { "my-index" }).containsKey("my-index")); @@ -1547,6 +1553,84 @@ public void testDeprecateSimpleFS() { ); } + public void testClusterStateCreateIndexWithClusterBlockTransformer() { + { + var emptyClusterState = ClusterState.builder(ClusterState.EMPTY_STATE).build(); + var updatedClusterState = clusterStateCreateIndex( + emptyClusterState, + IndexMetadata.builder("test") + .settings(settings(IndexVersion.current())) + .numberOfShards(1) + .numberOfReplicas(randomIntBetween(1, 3)) + .build(), + null, + MetadataCreateIndexService.createClusterBlocksTransformerForIndexCreation(Settings.EMPTY), + TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY + ); + assertThat(updatedClusterState.blocks().indices(), is(anEmptyMap())); + assertThat(updatedClusterState.blocks().hasIndexBlock("test", IndexMetadata.INDEX_REFRESH_BLOCK), is(false)); + assertThat(updatedClusterState.routingTable().index("test"), is(notNullValue())); + } + { + var minTransportVersion = TransportVersionUtils.randomCompatibleVersion(random()); + var emptyClusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .nodes(DiscoveryNodes.builder().add(DiscoveryNodeUtils.create("_node_id")).build()) + .putCompatibilityVersions("_node_id", new CompatibilityVersions(minTransportVersion, Map.of())) + .build(); + var settings = Settings.builder() + .put(DiscoveryNode.STATELESS_ENABLED_SETTING_NAME, true) + .put(MetadataCreateIndexService.USE_INDEX_REFRESH_BLOCK_SETTING_NAME, true) + .build(); + int nbReplicas = randomIntBetween(0, 1); + var updatedClusterState = clusterStateCreateIndex( + emptyClusterState, + IndexMetadata.builder("test") + .settings(settings(IndexVersion.current())) + .numberOfShards(1) + .numberOfReplicas(nbReplicas) + .build() + .withTimestampRanges(IndexLongFieldRange.UNKNOWN, IndexLongFieldRange.UNKNOWN, minTransportVersion), + null, + MetadataCreateIndexService.createClusterBlocksTransformerForIndexCreation(settings), + TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY + ); + + var expectRefreshBlock = 0 < nbReplicas && minTransportVersion.onOrAfter(TransportVersions.NEW_REFRESH_CLUSTER_BLOCK); + assertThat(updatedClusterState.blocks().indices(), is(aMapWithSize(expectRefreshBlock ? 1 : 0))); + assertThat(updatedClusterState.blocks().hasIndexBlock("test", IndexMetadata.INDEX_REFRESH_BLOCK), is(expectRefreshBlock)); + assertThat(updatedClusterState.routingTable().index("test"), is(notNullValue())); + } + } + + public void testCreateClusterBlocksTransformerForIndexCreation() { + boolean isStateless = randomBoolean(); + boolean useRefreshBlock = randomBoolean(); + var minTransportVersion = TransportVersionUtils.randomCompatibleVersion(random()); + + var applier = MetadataCreateIndexService.createClusterBlocksTransformerForIndexCreation( + Settings.builder() + .put(DiscoveryNode.STATELESS_ENABLED_SETTING_NAME, isStateless) + .put(MetadataCreateIndexService.USE_INDEX_REFRESH_BLOCK_SETTING_NAME, useRefreshBlock) + .build() + ); + assertThat(applier, notNullValue()); + + var blocks = ClusterBlocks.builder().blocks(ClusterState.EMPTY_STATE.blocks()); + applier.apply( + blocks, + IndexMetadata.builder("test") + .settings(settings(IndexVersion.current())) + .numberOfShards(1) + .numberOfReplicas(randomIntBetween(1, 3)) + .build(), + minTransportVersion + ); + assertThat( + blocks.hasIndexBlock("test", IndexMetadata.INDEX_REFRESH_BLOCK), + is(isStateless && useRefreshBlock && minTransportVersion.onOrAfter(TransportVersions.NEW_REFRESH_CLUSTER_BLOCK)) + ); + } + private IndexTemplateMetadata addMatchingTemplate(Consumer configurator) { IndexTemplateMetadata.Builder builder = templateMetadataBuilder("template1", "te*"); configurator.accept(builder); From ad83d9b35ddc01229a5b2b5de21b122f9d1b2106 Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Fri, 29 Nov 2024 14:50:01 +0200 Subject: [PATCH 321/386] Updating retriever-examples documentation to run validation tests on the provided snippets (#116643) --- docs/reference/search/rrf.asciidoc | 98 +- .../retrievers-examples.asciidoc | 1270 ++++++++++++++--- 2 files changed, 1149 insertions(+), 219 deletions(-) diff --git a/docs/reference/search/rrf.asciidoc b/docs/reference/search/rrf.asciidoc index edd3b67e3de04..a942c0162a80a 100644 --- a/docs/reference/search/rrf.asciidoc +++ b/docs/reference/search/rrf.asciidoc @@ -105,7 +105,7 @@ The `rrf` retriever does not currently support: * <> Using unsupported features as part of a search with an `rrf` retriever results in an exception. -+ + IMPORTANT: It is best to avoid providing a <> as part of the request, as RRF creates one internally that is shared by all sub-retrievers to ensure consistent results. @@ -703,3 +703,99 @@ So for the same params as above, we would now have: * `from=0, size=2` would return [`1`, `5`] with ranks `[1, 2]` * `from=2, size=2` would return an empty result set as it would fall outside the available `rank_window_size` results. + +==== Aggregations in RRF + +The `rrf` retriever supports aggregations from all specified sub-retrievers. Important notes about aggregations: + +* They operate on the complete result set from all sub-retrievers +* They are not limited by the `rank_window_size` parameter +* They process the union of all matching documents + +For example, consider the following document set: +[source,js] +---- +{ + "_id": 1, "termA": "foo", + "_id": 2, "termA": "foo", "termB": "bar", + "_id": 3, "termA": "aardvark", "termB": "bar", + "_id": 4, "termA": "foo", "termB": "bar" +} +---- +// NOTCONSOLE + +Perform a term aggregation on the `termA` field using an `rrf` retriever: +[source,js] +---- +{ + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "term": { + "termB": "bar" + } + } + } + }, + { + "standard": { + "query": { + "match_all": { } + } + } + } + ], + "rank_window_size": 1 + } + }, + "size": 1, + "aggs": { + "termA_agg": { + "terms": { + "field": "termA" + } + } + } +} +---- +// NOTCONSOLE + +The aggregation results will include *all* matching documents, regardless of `rank_window_size`. +[source, js] +---- +{ + "foo": 3, + "aardvark": 1 +} + +---- +// NOTCONSOLE + +==== Highlighting in RRF + +Using the `rrf` retriever, you can add <> to show relevant text snippets in your search results. Highlighted snippets are computed based +on the matching text queries defined on the sub-retrievers. + +IMPORTANT: Highlighting on vector fields, using either the `knn` retriever or a `knn` query, is not supported. + +A more specific example of highlighting in RRF can also be found in the <> page. + +==== Inner hits in RRF + +The `rrf` retriever supports <> functionality, allowing you to retrieve +related nested or parent/child documents alongside your main search results. Inner hits can be +specified as part of any nested sub-retriever and will be propagated to the top-level parent +retriever. Note that the inner hit computation will take place only at end of `rrf` retriever's +evaluation on the top matching documents, and not as part of the query execution of the nested +sub-retrievers. + +[IMPORTANT] +==== +When defining multiple `inner_hits` sections across sub-retrievers: + +* Each `inner_hits` section must have a unique name +* Names must be unique across all sub-retrievers in the search request +==== diff --git a/docs/reference/search/search-your-data/retrievers-examples.asciidoc b/docs/reference/search/search-your-data/retrievers-examples.asciidoc index 8cd1a4bf5ce98..ad1cc32dcee01 100644 --- a/docs/reference/search/search-your-data/retrievers-examples.asciidoc +++ b/docs/reference/search/search-your-data/retrievers-examples.asciidoc @@ -1,31 +1,16 @@ [[retrievers-examples]] -=== Retrievers examples Learn how to combine different retrievers in these hands-on examples. -To demonstrate the full functionality of retrievers, these examples require access to a <> set up using the <>. + +=== Retrievers examples [discrete] [[retrievers-examples-setup]] ==== Add example data -To begin with, we'll set up the necessary services and have them in place for later use. - -[source,js] ----- -// Setup rerank task stored as `my-rerank-model` -PUT _inference/rerank/my-rerank-model -{ - "service": "cohere", - "service_settings": { - "model_id": "rerank-english-v3.0", - "api_key": "{{COHERE_API_KEY}}" - } -} ----- -//NOTCONSOLE +To begin with, lets create the `retrievers_example` index, and add some documents to it. -Now that we have our reranking service in place, lets create the `retrievers_example` index, and add some documents to it. -[source,js] +[source,console] ---- PUT retrievers_example { @@ -49,11 +34,7 @@ PUT retrievers_example } } } ----- -//NOTCONSOLE -[source,js] ----- POST /retrievers_example/_doc/1 { "vector": [0.23, 0.67, 0.89], @@ -94,10 +75,12 @@ POST /retrievers_example/_doc/5 "topic": ["documentation", "observability", "elastic"] } +POST /retrievers_example/_refresh + ---- -//NOTCONSOLE +// TESTSETUP -Now that we also have our documents in place, let's try to run some queries using retrievers. +Now that we have our documents in place, let's try to run some queries using retrievers. [discrete] [[retrievers-examples-combining-standard-knn-retrievers-with-rrf]] @@ -112,170 +95,272 @@ To implement this in the retriever framework, we start with the top-level elemen retriever. This retriever operates on top of two other retrievers: a `knn` retriever and a `standard` retriever. Our query structure would look like this: -[source,js] +[source,console] ---- GET /retrievers_example/_search { - "retriever":{ - "rrf": { - "retrievers":[ - { - "standard":{ - "query":{ - "query_string":{ - "query": "(information retrieval) OR (artificial intelligence)", - "default_field": "text" - } - } - } - }, - { - "knn": { - "field": "vector", - "query_vector": [ - 0.23, - 0.67, - 0.89 - ], - "k": 3, - "num_candidates": 5 - } - } - ], - "rank_window_size": 10, - "rank_constant": 1 - } - }, - "_source": ["text", "topic"] + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "query_string": { + "query": "(information retrieval) OR (artificial intelligence)", + "default_field": "text" + } + } + } + }, + { + "knn": { + "field": "vector", + "query_vector": [ + 0.23, + 0.67, + 0.89 + ], + "k": 3, + "num_candidates": 5 + } + } + ], + "rank_window_size": 10, + "rank_constant": 1 + } + }, + "_source": false +} +---- +// TEST + +This returns the following response based on the final rrf score for each result. + +.Example response +[%collapsible] +============== +[source,console-result] +---- +{ + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 3, + "relation": "eq" + }, + "max_score": 0.8333334, + "hits": [ + { + "_index": "retrievers_example", + "_id": "1", + "_score": 0.8333334 + }, + { + "_index": "retrievers_example", + "_id": "2", + "_score": 0.8333334 + }, + { + "_index": "retrievers_example", + "_id": "3", + "_score": 0.25 + } + ] + } } ---- -//NOTCONSOLE +// TESTRESPONSE[s/"took": 42/"took": $body.took/] +============== [discrete] [[retrievers-examples-collapsing-retriever-results]] ==== Example: Grouping results by year with `collapse` In our result set, we have many documents with the same `year` value. We can clean this -up using the `collapse` parameter with our retriever. This enables grouping results by -any field and returns only the highest-scoring document from each group. In this example +up using the `collapse` parameter with our retriever. This, as with the standard <> feature, +enables grouping results by any field and returns only the highest-scoring document from each group. In this example we'll collapse our results based on the `year` field. -[source,js] +[source,console] ---- GET /retrievers_example/_search { - "retriever":{ - "rrf": { - "retrievers":[ - { - "standard":{ - "query":{ - "query_string":{ - "query": "(information retrieval) OR (artificial intelligence)", - "default_field": "text" - } - } - } - }, - { - "knn": { - "field": "vector", - "query_vector": [ - 0.23, - 0.67, - 0.89 - ], - "k": 3, - "num_candidates": 5 - } - } - ], - "rank_window_size": 10, - "rank_constant": 1 - } - }, - "collapse": { - "field": "year", - "inner_hits": { - "name": "topic related documents", - "_source": ["text", "year"] - } - }, - "_source": ["text", "topic"] + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "query_string": { + "query": "(information retrieval) OR (artificial intelligence)", + "default_field": "text" + } + } + } + }, + { + "knn": { + "field": "vector", + "query_vector": [ + 0.23, + 0.67, + 0.89 + ], + "k": 3, + "num_candidates": 5 + } + } + ], + "rank_window_size": 10, + "rank_constant": 1 + } + }, + "collapse": { + "field": "year", + "inner_hits": { + "name": "topic related documents", + "_source": [ + "year" + ] + } + }, + "_source": false } ---- -//NOTCONSOLE +// TEST[continued] -[discrete] -[[retrievers-examples-text-similarity-reranker-on-top-of-rrf]] -==== Example: Rerank results of an RRF retriever +This returns the following response with collapsed results. -Previously, we used a `text_similarity_reranker` retriever within an `rrf` retriever. -Because retrievers support full composability, we can also rerank the results of an -`rrf` retriever. Let's apply this to our first example. - -[source,js] +.Example response +[%collapsible] +============== +[source,console-result] ---- -GET retrievers_example/_search { - "retriever": { - "text_similarity_reranker": { - "retriever": { - "rrf": { - "retrievers": [ - { - "standard":{ - "query":{ - "query_string":{ - "query": "(information retrieval) OR (artificial intelligence)", - "default_field": "text" - } - } - } - }, - { - "knn": { - "field": "vector", - "query_vector": [ - 0.23, - 0.67, - 0.89 - ], - "k": 3, - "num_candidates": 5 - } - } - ], - "rank_window_size": 10, - "rank_constant": 1 - } - }, - "field": "text", - "inference_id": "my-rerank-model", - "inference_text": "What are the state of the art applications of AI in information retrieval?" - } - }, - "_source": ["text", "topic"] + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 3, + "relation": "eq" + }, + "max_score": 0.8333334, + "hits": [ + { + "_index": "retrievers_example", + "_id": "1", + "_score": 0.8333334, + "fields": { + "year": [ + 2024 + ] + }, + "inner_hits": { + "topic related documents": { + "hits": { + "total": { + "value": 2, + "relation": "eq" + }, + "max_score": 0.8333334, + "hits": [ + { + "_index": "retrievers_example", + "_id": "1", + "_score": 0.8333334, + "_source": { + "year": 2024 + } + }, + { + "_index": "retrievers_example", + "_id": "3", + "_score": 0.25, + "_source": { + "year": 2024 + } + } + ] + } + } + } + }, + { + "_index": "retrievers_example", + "_id": "2", + "_score": 0.8333334, + "fields": { + "year": [ + 2023 + ] + }, + "inner_hits": { + "topic related documents": { + "hits": { + "total": { + "value": 1, + "relation": "eq" + }, + "max_score": 0.8333334, + "hits": [ + { + "_index": "retrievers_example", + "_id": "2", + "_score": 0.8333334, + "_source": { + "year": 2023 + } + } + ] + } + } + } + } + ] + } } - ---- -//NOTCONSOLE +// TESTRESPONSE[s/"took": 42/"took": $body.took/] +============== [discrete] -[[retrievers-examples-rrf-ranking-on-text-similarity-reranker-results]] -==== Example: RRF with semantic reranker +[[retrievers-examples-highlighting-retriever-results]] +==== Example: Highlighting results based on nested sub-retrievers -For this example, we'll replace our semantic query with the `my-rerank-model` -reranker we previously configured. Since this is a reranker, it needs an initial pool of -documents to work with. In this case, we'll filter for documents about `ai` topics. +Highlighting is now also available for nested sub-retrievers matches. For example, consider the same +`rrf` retriever as above, with a `knn` and `standard` retriever as its sub-retrievers. We can specify a `highlight` +section, as defined in <> documentation, and compute highlights for the top results. -[source,js] +[source,console] ---- GET /retrievers_example/_search { "retriever": { "rrf": { "retrievers": [ + { + "standard": { + "query": { + "query_string": { + "query": "(information retrieval) OR (artificial intelligence)", + "default_field": "text" + } + } + } + }, { "knn": { "field": "vector", @@ -287,21 +372,221 @@ GET /retrievers_example/_search "k": 3, "num_candidates": 5 } - }, + } + ], + "rank_window_size": 10, + "rank_constant": 1 + } + }, + "highlight": { + "fields": { + "text": { + "fragment_size": 150, + "number_of_fragments": 3 + } + } + }, + "_source": false +} +---- +// TEST[continued] + +This would highlight the `text` field, based on the matches produced by the `standard` retriever. The highlighted snippets +would then be included in the response as usual, i.e. under each search hit. + +.Example response +[%collapsible] +============== +[source,console-result] +---- +{ + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 3, + "relation": "eq" + }, + "max_score": 0.8333334, + "hits": [ + { + "_index": "retrievers_example", + "_id": "1", + "_score": 0.8333334, + "highlight": { + "text": [ + "Large language models are revolutionizing information retrieval by boosting search precision, deepening contextual understanding, and reshaping user experiences" + ] + } + }, + { + "_index": "retrievers_example", + "_id": "2", + "_score": 0.8333334, + "highlight": { + "text": [ + "Artificial intelligence is transforming medicine, from advancing diagnostics and tailoring treatment plans to empowering predictive patient care for improved" + ] + } + }, + { + "_index": "retrievers_example", + "_id": "3", + "_score": 0.25 + } + ] + } +} +---- +// TESTRESPONSE[s/"took": 42/"took": $body.took/] +============== + +[discrete] +[[retrievers-examples-inner-hits-retriever-results]] +==== Example: Computing inner hits from nested sub-retrievers + +We can also define `inner_hits` to be computed on any of the sub-retrievers, and propagate those computations to the top +level compound retriever. For example, let's create a new index with a `knn` field, nested under the `nested_field` field, +and index a couple of documents. + +[source,console] +---- +PUT retrievers_example_nested +{ + "mappings": { + "properties": { + "nested_field": { + "type": "nested", + "properties": { + "paragraph_id": { + "type": "keyword" + }, + "nested_vector": { + "type": "dense_vector", + "dims": 3, + "similarity": "l2_norm", + "index": true + } + } + }, + "topic": { + "type": "keyword" + } + } + } +} + +POST /retrievers_example_nested/_doc/1 +{ + "nested_field": [ + { + "paragraph_id": "1a", + "nested_vector": [ + -1.12, + -0.59, + 0.78 + ] + }, + { + "paragraph_id": "1b", + "nested_vector": [ + -0.12, + 1.56, + 0.42 + ] + }, + { + "paragraph_id": "1c", + "nested_vector": [ + 1, + -1, + 0 + ] + } + ], + "topic": [ + "ai" + ] +} + +POST /retrievers_example_nested/_doc/2 +{ + "nested_field": [ + { + "paragraph_id": "2a", + "nested_vector": [ + 0.23, + 1.24, + 0.65 + ] + } + ], + "topic": [ + "information_retrieval" + ] +} + +POST /retrievers_example_nested/_doc/3 +{ + "topic": [ + "ai" + ] +} + +POST /retrievers_example_nested/_refresh +---- +// TEST[continued] + +Now we can run an `rrf` retriever query and also compute <> for the `nested_field.nested_vector` +field, based on the `knn` query specified. + +[source,console] +---- +GET /retrievers_example_nested/_search +{ + "retriever": { + "rrf": { + "retrievers": [ { - "text_similarity_reranker": { - "retriever": { - "standard": { + "standard": { + "query": { + "nested": { + "path": "nested_field", + "inner_hits": { + "name": "nested_vector", + "_source": false, + "fields": [ + "nested_field.paragraph_id" + ] + }, "query": { - "term": { - "topic": "ai" + "knn": { + "field": "nested_field.nested_vector", + "query_vector": [ + 1, + 0, + 0.5 + ], + "k": 10 } } } - }, - "field": "text", - "inference_id": "my-rerank-model", - "inference_text": "Can I use generative AI to identify user intent and improve search relevance?" + } + } + }, + { + "standard": { + "query": { + "term": { + "topic": "ai" + } + } } } ], @@ -310,64 +595,184 @@ GET /retrievers_example/_search } }, "_source": [ - "text", "topic" ] } ---- -//NOTCONSOLE - -[discrete] -[[retrievers-examples-chaining-text-similarity-reranker-retrievers]] -==== Example: Chaining multiple semantic rerankers +// TEST[continued] -Full composability means we can chain together multiple retrievers of the same type. For instance, imagine we have a computationally expensive reranker that's specialized for AI content. We can rerank the results of a `text_similarity_reranker` using another `text_similarity_reranker` retriever. Each reranker can operate on different fields and/or use different inference services. +This would propagate the `inner_hits` defined for the `knn` query to the `rrf` retriever, and compute inner hits for `rrf`'s top results. -[source,js] +.Example response +[%collapsible] +============== +[source,console-result] ---- -GET retrievers_example/_search { - "retriever": { - "text_similarity_reranker": { - "retriever": { - "text_similarity_reranker": { - "retriever": { - "knn": { - "field": "vector", - "query_vector": [ - 0.23, - 0.67, - 0.89 - ], - "k": 3, - "num_candidates": 5 - } - }, - "rank_window_size": 100, - "field": "text", - "inference_id": "my-rerank-model", - "inference_text": "What are the state of the art applications of AI in information retrieval?" - } - }, - "rank_window_size": 10, - "field": "text", - "inference_id": "my-other-more-expensive-rerank-model", - "inference_text": "Applications of Large Language Models in technology and their impact on user satisfaction" - } - }, - "_source": [ - "text", - "topic" - ] + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 3, + "relation": "eq" + }, + "max_score": 1.0, + "hits": [ + { + "_index": "retrievers_example_nested", + "_id": "1", + "_score": 1.0, + "_source": { + "topic": [ + "ai" + ] + }, + "inner_hits": { + "nested_vector": { + "hits": { + "total": { + "value": 3, + "relation": "eq" + }, + "max_score": 0.44353113, + "hits": [ + { + "_index": "retrievers_example_nested", + "_id": "1", + "_nested": { + "field": "nested_field", + "offset": 2 + }, + "_score": 0.44353113, + "fields": { + "nested_field": [ + { + "paragraph_id": [ + "1c" + ] + } + ] + } + }, + { + "_index": "retrievers_example_nested", + "_id": "1", + "_nested": { + "field": "nested_field", + "offset": 1 + }, + "_score": 0.26567122, + "fields": { + "nested_field": [ + { + "paragraph_id": [ + "1b" + ] + } + ] + } + }, + { + "_index": "retrievers_example_nested", + "_id": "1", + "_nested": { + "field": "nested_field", + "offset": 0 + }, + "_score": 0.18478848, + "fields": { + "nested_field": [ + { + "paragraph_id": [ + "1a" + ] + } + ] + } + } + ] + } + } + } + }, + { + "_index": "retrievers_example_nested", + "_id": "2", + "_score": 0.33333334, + "_source": { + "topic": [ + "information_retrieval" + ] + }, + "inner_hits": { + "nested_vector": { + "hits": { + "total": { + "value": 1, + "relation": "eq" + }, + "max_score": 0.32002488, + "hits": [ + { + "_index": "retrievers_example_nested", + "_id": "2", + "_nested": { + "field": "nested_field", + "offset": 0 + }, + "_score": 0.32002488, + "fields": { + "nested_field": [ + { + "paragraph_id": [ + "2a" + ] + } + ] + } + } + ] + } + } + } + }, + { + "_index": "retrievers_example_nested", + "_id": "3", + "_score": 0.33333334, + "_source": { + "topic": [ + "ai" + ] + }, + "inner_hits": { + "nested_vector": { + "hits": { + "total": { + "value": 0, + "relation": "eq" + }, + "max_score": null, + "hits": [] + } + } + } + } + ] + } } ---- -//NOTCONSOLE +// TESTRESPONSE[s/"took": 42/"took": $body.took/] +============== - -Note that our example applies two reranking steps. First, we rerank the top 100 -documents from the `knn` search using the `my-rerank-model` reranker. Then we -pick the top 10 results and rerank them using the more fine-grained -`my-other-more-expensive-rerank-model`. +Note: if using more than one `inner_hits` we need to provide custom names for each `inner_hits` so that they +are unique across all retrievers within the request. [discrete] [[retrievers-examples-rrf-and-aggregations]] @@ -380,7 +785,7 @@ the `terms` aggregation for the `topic` field will include all results, not just from the 2 nested retrievers, i.e. all documents whose `year` field is greater than 2023, and whose `topic` field matches the term `elastic`. -[source,js] +[source,console] ---- GET retrievers_example/_search { @@ -412,10 +817,7 @@ GET retrievers_example/_search "rank_constant": 1 } }, - "_source": [ - "text", - "topic" - ], + "_source": false, "aggs": { "topics": { "terms": { @@ -425,4 +827,436 @@ GET retrievers_example/_search } } ---- -//NOTCONSOLE +// TEST[continued] + +.Example response +[%collapsible] +============== +[source, console-result] +---- +{ + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 4, + "relation": "eq" + }, + "max_score": 0.5833334, + "hits": [ + { + "_index": "retrievers_example", + "_id": "5", + "_score": 0.5833334 + }, + { + "_index": "retrievers_example", + "_id": "1", + "_score": 0.5 + }, + { + "_index": "retrievers_example", + "_id": "4", + "_score": 0.5 + }, + { + "_index": "retrievers_example", + "_id": "3", + "_score": 0.33333334 + } + ] + }, + "aggregations": { + "topics": { + "doc_count_error_upper_bound": 0, + "sum_other_doc_count": 0, + "buckets": [ + { + "key": "ai", + "doc_count": 3 + }, + { + "key": "elastic", + "doc_count": 2 + }, + { + "key": "assistant", + "doc_count": 1 + }, + { + "key": "documentation", + "doc_count": 1 + }, + { + "key": "information_retrieval", + "doc_count": 1 + }, + { + "key": "llm", + "doc_count": 1 + }, + { + "key": "observability", + "doc_count": 1 + }, + { + "key": "security", + "doc_count": 1 + } + ] + } + } +} +---- +// TESTRESPONSE[s/"took": 42/"took": $body.took/] +============== + +[discrete] +[[retrievers-examples-explain-multiple-rrf]] +==== Example: Explainability with multiple retrievers + +By adding `explain: true` to the request, each retriever will now provide a detailed explanation of all the steps +and calculations required to compute the final score. Composability is fully supported in the context of `explain`, and +each retriever will provide its own explanation, as shown in the example below. + +[source,console] +---- +GET /retrievers_example/_search +{ + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "term": { + "topic": "elastic" + } + } + } + }, + { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "query_string": { + "query": "(information retrieval) OR (artificial intelligence)", + "default_field": "text" + } + } + } + }, + { + "knn": { + "field": "vector", + "query_vector": [ + 0.23, + 0.67, + 0.89 + ], + "k": 3, + "num_candidates": 5 + } + } + ], + "rank_window_size": 10, + "rank_constant": 1 + } + } + ], + "rank_window_size": 10, + "rank_constant": 1 + } + }, + "_source": false, + "size": 1, + "explain": true +} +---- +// TEST[continued] + +The output of which, albeit a bit verbose, will provide all the necessary info to assist in debugging and reason with ranking. + +.Example response +[%collapsible] +============== +[source, console-result] +---- +{ + "took": 42, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 5, + "relation": "eq" + }, + "max_score": 0.5, + "hits": [ + { + "_shard": "[retrievers_example][0]", + "_node": "jnrdZFKS3abUgWVsVdj2Vg", + "_index": "retrievers_example", + "_id": "1", + "_score": 0.5, + "_explanation": { + "value": 0.5, + "description": "rrf score: [0.5] computed for initial ranks [0, 1] with rankConstant: [1] as sum of [1 / (rank + rankConstant)] for each query", + "details": [ + { + "value": 0.0, + "description": "rrf score: [0], result not found in query at index [0]", + "details": [] + }, + { + "value": 1, + "description": "rrf score: [0.5], for rank [1] in query at index [1] computed as [1 / (1 + 1)], for matching query with score", + "details": [ + { + "value": 0.8333334, + "description": "rrf score: [0.8333334] computed for initial ranks [2, 1] with rankConstant: [1] as sum of [1 / (rank + rankConstant)] for each query", + "details": [ + { + "value": 2, + "description": "rrf score: [0.33333334], for rank [2] in query at index [0] computed as [1 / (2 + 1)], for matching query with score", + "details": [ + { + "value": 2.8129659, + "description": "sum of:", + "details": [ + { + "value": 1.4064829, + "description": "weight(text:information in 0) [PerFieldSimilarity], result of:", + "details": [ + *** + ] + }, + { + "value": 1.4064829, + "description": "weight(text:retrieval in 0) [PerFieldSimilarity], result of:", + "details": [ + *** + ] + } + ] + } + ] + }, + { + "value": 1, + "description": "rrf score: [0.5], for rank [1] in query at index [1] computed as [1 / (1 + 1)], for matching query with score", + "details": [ + { + "value": 1, + "description": "doc [0] with an original score of [1.0] is at rank [1] from the following source queries.", + "details": [ + { + "value": 1.0, + "description": "found vector with calculated similarity: 1.0", + "details": [] + } + ] + } + ] + } + ] + } + ] + } + ] + } + } + ] + } +} +---- +// TESTRESPONSE[s/"took": 42/"took": $body.took/] +// TESTRESPONSE[s/\.\.\./$body.hits.hits.0._explanation.details.1.details.0.details.0.details.0.details.0.details.0/] +// TESTRESPONSE[s/\*\*\*/$body.hits.hits.0._explanation.details.1.details.0.details.0.details.0.details.1.details.0/] +// TESTRESPONSE[s/jnrdZFKS3abUgWVsVdj2Vg/$body.hits.hits.0._node/] +============== + +[discrete] +[[retrievers-examples-text-similarity-reranker-on-top-of-rrf]] +==== Example: Rerank results of an RRF retriever + +To demonstrate the full functionality of retrievers, the following examples also require access to a <> set up using the <>. + +In this example we'll set up a reranking service and use it with the `text_similarity_reranker` retriever to rerank our top results. + +[source,console] +---- +PUT _inference/rerank/my-rerank-model +{ + "service": "cohere", + "service_settings": { + "model_id": "rerank-english-v3.0", + "api_key": "{{COHERE_API_KEY}}" + } +} +---- +// TEST[skip: no_access_to_ml] + +Let's start by reranking the results of the `rrf` retriever in our previous example. + +[source,console] +---- +GET retrievers_example/_search +{ + "retriever": { + "text_similarity_reranker": { + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "query_string": { + "query": "(information retrieval) OR (artificial intelligence)", + "default_field": "text" + } + } + } + }, + { + "knn": { + "field": "vector", + "query_vector": [ + 0.23, + 0.67, + 0.89 + ], + "k": 3, + "num_candidates": 5 + } + } + ], + "rank_window_size": 10, + "rank_constant": 1 + } + }, + "field": "text", + "inference_id": "my-rerank-model", + "inference_text": "What are the state of the art applications of AI in information retrieval?" + } + }, + "_source": false +} + +---- +// TEST[skip: no_access_to_ml] + +[discrete] +[[retrievers-examples-rrf-ranking-on-text-similarity-reranker-results]] +==== Example: RRF with semantic reranker + +For this example, we'll replace the rrf's `standard` retriever with the `text_similarity_reranker` retriever, using the +`my-rerank-model` reranker we previously configured. Since this is a reranker, it needs an initial pool of +documents to work with. In this case, we'll rerank the top `rank_window_size` documents matching the `ai` topic. + +[source,console] +---- +GET /retrievers_example/_search +{ + "retriever": { + "rrf": { + "retrievers": [ + { + "knn": { + "field": "vector", + "query_vector": [ + 0.23, + 0.67, + 0.89 + ], + "k": 3, + "num_candidates": 5 + } + }, + { + "text_similarity_reranker": { + "retriever": { + "standard": { + "query": { + "term": { + "topic": "ai" + } + } + } + }, + "field": "text", + "inference_id": "my-rerank-model", + "inference_text": "Can I use generative AI to identify user intent and improve search relevance?" + } + } + ], + "rank_window_size": 10, + "rank_constant": 1 + } + }, + "_source": false +} +---- +// TEST[skip: no_access_to_ml] + +[discrete] +[[retrievers-examples-chaining-text-similarity-reranker-retrievers]] +==== Example: Chaining multiple semantic rerankers + +Full composability means we can chain together multiple retrievers of the same type. For instance, +imagine we have a computationally expensive reranker that's specialized for AI content. We can rerank the results of a `text_similarity_reranker` using another `text_similarity_reranker` retriever. Each reranker can operate on different fields and/or use different inference services. + +[source,console] +---- +GET retrievers_example/_search +{ + "retriever": { + "text_similarity_reranker": { + "retriever": { + "text_similarity_reranker": { + "retriever": { + "knn": { + "field": "vector", + "query_vector": [ + 0.23, + 0.67, + 0.89 + ], + "k": 3, + "num_candidates": 5 + } + }, + "rank_window_size": 100, + "field": "text", + "inference_id": "my-rerank-model", + "inference_text": "What are the state of the art applications of AI in information retrieval?" + } + }, + "rank_window_size": 10, + "field": "text", + "inference_id": "my-other-more-expensive-rerank-model", + "inference_text": "Applications of Large Language Models in technology and their impact on user satisfaction" + } + }, + "_source": false +} +---- +// TEST[skip: no_access_to_ml] + +Note that our example applies two reranking steps. First, we rerank the top 100 +documents from the `knn` search using the `my-rerank-model` reranker. Then we +pick the top 10 results and rerank them using the more fine-grained +`my-other-more-expensive-rerank-model`. From 6417e0912f2876c00f4e3b970af84875f23cd943 Mon Sep 17 00:00:00 2001 From: Dimitris Rempapis Date: Fri, 29 Nov 2024 14:53:20 +0200 Subject: [PATCH 322/386] CrossClusterIT testCancel failure (#117750) Investigate and fix test failure --- docs/changelog/117750.yaml | 6 ++++++ .../java/org/elasticsearch/search/ccs/CrossClusterIT.java | 4 ++-- 2 files changed, 8 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/117750.yaml diff --git a/docs/changelog/117750.yaml b/docs/changelog/117750.yaml new file mode 100644 index 0000000000000..3ba3f1693f4df --- /dev/null +++ b/docs/changelog/117750.yaml @@ -0,0 +1,6 @@ +pr: 117750 +summary: '`CrossClusterIT` `testCancel` failure' +area: Search +type: bug +issues: + - 108061 diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterIT.java index 5d2d5c917415a..cb4d0681cdb23 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterIT.java @@ -63,6 +63,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -189,7 +190,6 @@ public void testProxyConnectionDisconnect() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108061") public void testCancel() throws Exception { assertAcked(client(LOCAL_CLUSTER).admin().indices().prepareCreate("demo")); indexDocs(client(LOCAL_CLUSTER), "demo"); @@ -307,7 +307,7 @@ public void testCancel() throws Exception { } }); - RuntimeException e = expectThrows(RuntimeException.class, () -> queryFuture.result()); + ExecutionException e = expectThrows(ExecutionException.class, () -> queryFuture.result()); assertNotNull(e); assertNotNull(e.getCause()); Throwable t = ExceptionsHelper.unwrap(e, TaskCancelledException.class); From e19f2b7fbb908228a9b53821e275b8ccb58e7029 Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Fri, 29 Nov 2024 17:22:37 +0400 Subject: [PATCH 323/386] Remove unsupported async_search parameters from rest-api-spec (#117626) --- .../rest-api-spec/api/async_search.submit.json | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.submit.json b/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.submit.json index a7a7ebe838eab..3de0dec85f547 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.submit.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/async_search.submit.json @@ -43,11 +43,6 @@ "description":"Control whether the response should be stored in the cluster if it completed within the provided [wait_for_completion] time (default: false)", "default":false }, - "keep_alive": { - "type": "time", - "description": "Update the time interval in which the results (partial or final) for this search will be available", - "default": "5d" - }, "batched_reduce_size":{ "type":"number", "description":"The number of shard results that should be reduced at once on the coordinating node. This value should be used as the granularity at which progress results will be made available.", @@ -131,11 +126,6 @@ "type":"string", "description":"Specify the node or shard the operation should be performed on (default: random)" }, - "pre_filter_shard_size":{ - "type":"number", - "default": 1, - "description":"Cannot be changed: this is to enforce the execution of a pre-filter roundtrip to retrieve statistics from each shard so that the ones that surely don’t hold any document matching the query get skipped." - }, "rest_total_hits_as_int":{ "type":"boolean", "description":"Indicates whether hits.total should be rendered as an integer or an object in the rest search response", From 60ce74a7870a9e050ddac64900c3b35682e8e355 Mon Sep 17 00:00:00 2001 From: Tommaso Teofili Date: Fri, 29 Nov 2024 15:38:12 +0100 Subject: [PATCH 324/386] mute csv test for scoring in esql for mixed cluster (#117767) --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 96631d15f374f..f5f6b84ab8639 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -222,6 +222,9 @@ tests: - class: "org.elasticsearch.xpack.esql.qa.single_node.EsqlSpecIT" method: "test {scoring.*}" issue: https://github.com/elastic/elasticsearch/issues/117641 +- class: "org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT" + method: "test {scoring.*}" + issue: https://github.com/elastic/elasticsearch/issues/117641 - class: org.elasticsearch.xpack.inference.InferenceCrudIT method: testSupportedStream issue: https://github.com/elastic/elasticsearch/issues/117745 From 5f045c05811ffd30f480d08403e3139c9686d97b Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Fri, 29 Nov 2024 16:20:39 +0100 Subject: [PATCH 325/386] One Categorize BlockHash (#117723) * Move all categorize blockhash code to one "CategorizeBlockHash". * close resources in case of failure --- .../AbstractCategorizeBlockHash.java | 132 -------- .../aggregation/blockhash/BlockHash.java | 4 +- .../blockhash/CategorizeBlockHash.java | 309 ++++++++++++++++++ .../blockhash/CategorizeRawBlockHash.java | 147 --------- .../CategorizedIntermediateBlockHash.java | 92 ------ .../blockhash/CategorizeBlockHashTests.java | 8 +- .../function/grouping/Categorize.java | 6 +- 7 files changed, 315 insertions(+), 383 deletions(-) delete mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/AbstractCategorizeBlockHash.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHash.java delete mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeRawBlockHash.java delete mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizedIntermediateBlockHash.java diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/AbstractCategorizeBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/AbstractCategorizeBlockHash.java deleted file mode 100644 index 0e89d77820883..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/AbstractCategorizeBlockHash.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.aggregation.blockhash; - -import org.apache.lucene.util.BytesRefBuilder; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.BitArray; -import org.elasticsearch.common.util.BytesRefHash; -import org.elasticsearch.compute.aggregation.SeenGroupIds; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.core.ReleasableIterator; -import org.elasticsearch.xpack.ml.aggs.categorization.CategorizationBytesRefHash; -import org.elasticsearch.xpack.ml.aggs.categorization.CategorizationPartOfSpeechDictionary; -import org.elasticsearch.xpack.ml.aggs.categorization.SerializableTokenListCategory; -import org.elasticsearch.xpack.ml.aggs.categorization.TokenListCategorizer; - -import java.io.IOException; - -/** - * Base BlockHash implementation for {@code Categorize} grouping function. - */ -public abstract class AbstractCategorizeBlockHash extends BlockHash { - protected static final int NULL_ORD = 0; - - // TODO: this should probably also take an emitBatchSize - private final int channel; - private final boolean outputPartial; - protected final TokenListCategorizer.CloseableTokenListCategorizer categorizer; - - /** - * Store whether we've seen any {@code null} values. - *

    - * Null gets the {@link #NULL_ORD} ord. - *

    - */ - protected boolean seenNull = false; - - AbstractCategorizeBlockHash(BlockFactory blockFactory, int channel, boolean outputPartial) { - super(blockFactory); - this.channel = channel; - this.outputPartial = outputPartial; - this.categorizer = new TokenListCategorizer.CloseableTokenListCategorizer( - new CategorizationBytesRefHash(new BytesRefHash(2048, blockFactory.bigArrays())), - CategorizationPartOfSpeechDictionary.getInstance(), - 0.70f - ); - } - - protected int channel() { - return channel; - } - - @Override - public Block[] getKeys() { - return new Block[] { outputPartial ? buildIntermediateBlock() : buildFinalBlock() }; - } - - @Override - public IntVector nonEmpty() { - return IntVector.range(seenNull ? 0 : 1, categorizer.getCategoryCount() + 1, blockFactory); - } - - @Override - public BitArray seenGroupIds(BigArrays bigArrays) { - return new SeenGroupIds.Range(seenNull ? 0 : 1, Math.toIntExact(categorizer.getCategoryCount() + 1)).seenGroupIds(bigArrays); - } - - @Override - public final ReleasableIterator lookup(Page page, ByteSizeValue targetBlockSize) { - throw new UnsupportedOperationException(); - } - - /** - * Serializes the intermediate state into a single BytesRef block, or an empty Null block if there are no categories. - */ - private Block buildIntermediateBlock() { - if (categorizer.getCategoryCount() == 0) { - return blockFactory.newConstantNullBlock(seenNull ? 1 : 0); - } - try (BytesStreamOutput out = new BytesStreamOutput()) { - // TODO be more careful here. - out.writeBoolean(seenNull); - out.writeVInt(categorizer.getCategoryCount()); - for (SerializableTokenListCategory category : categorizer.toCategoriesById()) { - category.writeTo(out); - } - // We're returning a block with N positions just because the Page must have all blocks with the same position count! - int positionCount = categorizer.getCategoryCount() + (seenNull ? 1 : 0); - return blockFactory.newConstantBytesRefBlockWith(out.bytes().toBytesRef(), positionCount); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - private Block buildFinalBlock() { - BytesRefBuilder scratch = new BytesRefBuilder(); - - if (seenNull) { - try (BytesRefBlock.Builder result = blockFactory.newBytesRefBlockBuilder(categorizer.getCategoryCount())) { - result.appendNull(); - for (SerializableTokenListCategory category : categorizer.toCategoriesById()) { - scratch.copyChars(category.getRegex()); - result.appendBytesRef(scratch.get()); - scratch.clear(); - } - return result.build(); - } - } - - try (BytesRefVector.Builder result = blockFactory.newBytesRefVectorBuilder(categorizer.getCategoryCount())) { - for (SerializableTokenListCategory category : categorizer.toCategoriesById()) { - scratch.copyChars(category.getRegex()); - result.appendBytesRef(scratch.get()); - scratch.clear(); - } - return result.build().asBlock(); - } - } -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java index ea76c3bd0a0aa..30afa7ae3128d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java @@ -180,9 +180,7 @@ public static BlockHash buildCategorizeBlockHash( throw new IllegalArgumentException("only a single CATEGORIZE group can used"); } - return aggregatorMode.isInputPartial() - ? new CategorizedIntermediateBlockHash(groups.get(0).channel, blockFactory, aggregatorMode.isOutputPartial()) - : new CategorizeRawBlockHash(groups.get(0).channel, blockFactory, aggregatorMode.isOutputPartial(), analysisRegistry); + return new CategorizeBlockHash(blockFactory, groups.get(0).channel, aggregatorMode, analysisRegistry); } /** diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHash.java new file mode 100644 index 0000000000000..35c6faf84e623 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHash.java @@ -0,0 +1,309 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.blockhash; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.BitArray; +import org.elasticsearch.common.util.BytesRefHash; +import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.xpack.core.ml.job.config.CategorizationAnalyzerConfig; +import org.elasticsearch.xpack.ml.aggs.categorization.CategorizationBytesRefHash; +import org.elasticsearch.xpack.ml.aggs.categorization.CategorizationPartOfSpeechDictionary; +import org.elasticsearch.xpack.ml.aggs.categorization.SerializableTokenListCategory; +import org.elasticsearch.xpack.ml.aggs.categorization.TokenListCategorizer; +import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzer; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Base BlockHash implementation for {@code Categorize} grouping function. + */ +public class CategorizeBlockHash extends BlockHash { + + private static final CategorizationAnalyzerConfig ANALYZER_CONFIG = CategorizationAnalyzerConfig.buildStandardCategorizationAnalyzer( + List.of() + ); + private static final int NULL_ORD = 0; + + // TODO: this should probably also take an emitBatchSize + private final int channel; + private final AggregatorMode aggregatorMode; + private final TokenListCategorizer.CloseableTokenListCategorizer categorizer; + + private final CategorizeEvaluator evaluator; + + /** + * Store whether we've seen any {@code null} values. + *

    + * Null gets the {@link #NULL_ORD} ord. + *

    + */ + private boolean seenNull = false; + + CategorizeBlockHash(BlockFactory blockFactory, int channel, AggregatorMode aggregatorMode, AnalysisRegistry analysisRegistry) { + super(blockFactory); + + this.channel = channel; + this.aggregatorMode = aggregatorMode; + + this.categorizer = new TokenListCategorizer.CloseableTokenListCategorizer( + new CategorizationBytesRefHash(new BytesRefHash(2048, blockFactory.bigArrays())), + CategorizationPartOfSpeechDictionary.getInstance(), + 0.70f + ); + + if (aggregatorMode.isInputPartial() == false) { + CategorizationAnalyzer analyzer; + try { + Objects.requireNonNull(analysisRegistry); + analyzer = new CategorizationAnalyzer(analysisRegistry, ANALYZER_CONFIG); + } catch (Exception e) { + categorizer.close(); + throw new RuntimeException(e); + } + this.evaluator = new CategorizeEvaluator(analyzer); + } else { + this.evaluator = null; + } + } + + @Override + public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { + if (aggregatorMode.isInputPartial() == false) { + addInitial(page, addInput); + } else { + addIntermediate(page, addInput); + } + } + + @Override + public Block[] getKeys() { + return new Block[] { aggregatorMode.isOutputPartial() ? buildIntermediateBlock() : buildFinalBlock() }; + } + + @Override + public IntVector nonEmpty() { + return IntVector.range(seenNull ? 0 : 1, categorizer.getCategoryCount() + 1, blockFactory); + } + + @Override + public BitArray seenGroupIds(BigArrays bigArrays) { + return new SeenGroupIds.Range(seenNull ? 0 : 1, Math.toIntExact(categorizer.getCategoryCount() + 1)).seenGroupIds(bigArrays); + } + + @Override + public final ReleasableIterator lookup(Page page, ByteSizeValue targetBlockSize) { + throw new UnsupportedOperationException(); + } + + @Override + public void close() { + Releasables.close(evaluator, categorizer); + } + + /** + * Adds initial (raw) input to the state. + */ + private void addInitial(Page page, GroupingAggregatorFunction.AddInput addInput) { + try (IntBlock result = (IntBlock) evaluator.eval(page.getBlock(channel))) { + addInput.add(0, result); + } + } + + /** + * Adds intermediate state to the state. + */ + private void addIntermediate(Page page, GroupingAggregatorFunction.AddInput addInput) { + if (page.getPositionCount() == 0) { + return; + } + BytesRefBlock categorizerState = page.getBlock(channel); + if (categorizerState.areAllValuesNull()) { + seenNull = true; + try (var newIds = blockFactory.newConstantIntVector(NULL_ORD, 1)) { + addInput.add(0, newIds); + } + return; + } + + Map idMap = readIntermediate(categorizerState.getBytesRef(0, new BytesRef())); + try (IntBlock.Builder newIdsBuilder = blockFactory.newIntBlockBuilder(idMap.size())) { + int fromId = idMap.containsKey(0) ? 0 : 1; + int toId = fromId + idMap.size(); + for (int i = fromId; i < toId; i++) { + newIdsBuilder.appendInt(idMap.get(i)); + } + try (IntBlock newIds = newIdsBuilder.build()) { + addInput.add(0, newIds); + } + } + } + + /** + * Read intermediate state from a block. + * + * @return a map from the old category id to the new one. The old ids go from 0 to {@code size - 1}. + */ + private Map readIntermediate(BytesRef bytes) { + Map idMap = new HashMap<>(); + try (StreamInput in = new BytesArray(bytes).streamInput()) { + if (in.readBoolean()) { + seenNull = true; + idMap.put(NULL_ORD, NULL_ORD); + } + int count = in.readVInt(); + for (int oldCategoryId = 0; oldCategoryId < count; oldCategoryId++) { + int newCategoryId = categorizer.mergeWireCategory(new SerializableTokenListCategory(in)).getId(); + // +1 because the 0 ordinal is reserved for null + idMap.put(oldCategoryId + 1, newCategoryId + 1); + } + return idMap; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + /** + * Serializes the intermediate state into a single BytesRef block, or an empty Null block if there are no categories. + */ + private Block buildIntermediateBlock() { + if (categorizer.getCategoryCount() == 0) { + return blockFactory.newConstantNullBlock(seenNull ? 1 : 0); + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeBoolean(seenNull); + out.writeVInt(categorizer.getCategoryCount()); + for (SerializableTokenListCategory category : categorizer.toCategoriesById()) { + category.writeTo(out); + } + // We're returning a block with N positions just because the Page must have all blocks with the same position count! + int positionCount = categorizer.getCategoryCount() + (seenNull ? 1 : 0); + return blockFactory.newConstantBytesRefBlockWith(out.bytes().toBytesRef(), positionCount); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private Block buildFinalBlock() { + BytesRefBuilder scratch = new BytesRefBuilder(); + + if (seenNull) { + try (BytesRefBlock.Builder result = blockFactory.newBytesRefBlockBuilder(categorizer.getCategoryCount())) { + result.appendNull(); + for (SerializableTokenListCategory category : categorizer.toCategoriesById()) { + scratch.copyChars(category.getRegex()); + result.appendBytesRef(scratch.get()); + scratch.clear(); + } + return result.build(); + } + } + + try (BytesRefVector.Builder result = blockFactory.newBytesRefVectorBuilder(categorizer.getCategoryCount())) { + for (SerializableTokenListCategory category : categorizer.toCategoriesById()) { + scratch.copyChars(category.getRegex()); + result.appendBytesRef(scratch.get()); + scratch.clear(); + } + return result.build().asBlock(); + } + } + + /** + * Similar implementation to an Evaluator. + */ + private final class CategorizeEvaluator implements Releasable { + private final CategorizationAnalyzer analyzer; + + CategorizeEvaluator(CategorizationAnalyzer analyzer) { + this.analyzer = analyzer; + } + + Block eval(BytesRefBlock vBlock) { + BytesRefVector vVector = vBlock.asVector(); + if (vVector == null) { + return eval(vBlock.getPositionCount(), vBlock); + } + IntVector vector = eval(vBlock.getPositionCount(), vVector); + return vector.asBlock(); + } + + IntBlock eval(int positionCount, BytesRefBlock vBlock) { + try (IntBlock.Builder result = blockFactory.newIntBlockBuilder(positionCount)) { + BytesRef vScratch = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + if (vBlock.isNull(p)) { + seenNull = true; + result.appendInt(NULL_ORD); + continue; + } + int first = vBlock.getFirstValueIndex(p); + int count = vBlock.getValueCount(p); + if (count == 1) { + result.appendInt(process(vBlock.getBytesRef(first, vScratch))); + continue; + } + int end = first + count; + result.beginPositionEntry(); + for (int i = first; i < end; i++) { + result.appendInt(process(vBlock.getBytesRef(i, vScratch))); + } + result.endPositionEntry(); + } + return result.build(); + } + } + + IntVector eval(int positionCount, BytesRefVector vVector) { + try (IntVector.FixedBuilder result = blockFactory.newIntVectorFixedBuilder(positionCount)) { + BytesRef vScratch = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + result.appendInt(p, process(vVector.getBytesRef(p, vScratch))); + } + return result.build(); + } + } + + int process(BytesRef v) { + var category = categorizer.computeCategory(v.utf8ToString(), analyzer); + if (category == null) { + seenNull = true; + return NULL_ORD; + } + return category.getId() + 1; + } + + @Override + public void close() { + analyzer.close(); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeRawBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeRawBlockHash.java deleted file mode 100644 index 47dd7f650dffa..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeRawBlockHash.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.aggregation.blockhash; - -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.Releasables; -import org.elasticsearch.index.analysis.AnalysisRegistry; -import org.elasticsearch.xpack.core.ml.job.config.CategorizationAnalyzerConfig; -import org.elasticsearch.xpack.ml.aggs.categorization.TokenListCategorizer; -import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzer; - -import java.io.IOException; -import java.util.List; - -/** - * BlockHash implementation for {@code Categorize} grouping function. - *

    - * This implementation expects rows, and can't deserialize intermediate states coming from other nodes. - *

    - */ -public class CategorizeRawBlockHash extends AbstractCategorizeBlockHash { - private static final CategorizationAnalyzerConfig ANALYZER_CONFIG = CategorizationAnalyzerConfig.buildStandardCategorizationAnalyzer( - List.of() - ); - - private final CategorizeEvaluator evaluator; - - CategorizeRawBlockHash(int channel, BlockFactory blockFactory, boolean outputPartial, AnalysisRegistry analysisRegistry) { - super(blockFactory, channel, outputPartial); - - CategorizationAnalyzer analyzer; - try { - analyzer = new CategorizationAnalyzer(analysisRegistry, ANALYZER_CONFIG); - } catch (IOException e) { - categorizer.close(); - throw new RuntimeException(e); - } - - this.evaluator = new CategorizeEvaluator(analyzer, categorizer, blockFactory); - } - - @Override - public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { - try (IntBlock result = (IntBlock) evaluator.eval(page.getBlock(channel()))) { - addInput.add(0, result); - } - } - - @Override - public void close() { - evaluator.close(); - } - - /** - * Similar implementation to an Evaluator. - */ - public final class CategorizeEvaluator implements Releasable { - private final CategorizationAnalyzer analyzer; - - private final TokenListCategorizer.CloseableTokenListCategorizer categorizer; - - private final BlockFactory blockFactory; - - public CategorizeEvaluator( - CategorizationAnalyzer analyzer, - TokenListCategorizer.CloseableTokenListCategorizer categorizer, - BlockFactory blockFactory - ) { - this.analyzer = analyzer; - this.categorizer = categorizer; - this.blockFactory = blockFactory; - } - - public Block eval(BytesRefBlock vBlock) { - BytesRefVector vVector = vBlock.asVector(); - if (vVector == null) { - return eval(vBlock.getPositionCount(), vBlock); - } - IntVector vector = eval(vBlock.getPositionCount(), vVector); - return vector.asBlock(); - } - - public IntBlock eval(int positionCount, BytesRefBlock vBlock) { - try (IntBlock.Builder result = blockFactory.newIntBlockBuilder(positionCount)) { - BytesRef vScratch = new BytesRef(); - for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p)) { - seenNull = true; - result.appendInt(NULL_ORD); - continue; - } - int first = vBlock.getFirstValueIndex(p); - int count = vBlock.getValueCount(p); - if (count == 1) { - result.appendInt(process(vBlock.getBytesRef(first, vScratch))); - continue; - } - int end = first + count; - result.beginPositionEntry(); - for (int i = first; i < end; i++) { - result.appendInt(process(vBlock.getBytesRef(i, vScratch))); - } - result.endPositionEntry(); - } - return result.build(); - } - } - - public IntVector eval(int positionCount, BytesRefVector vVector) { - try (IntVector.FixedBuilder result = blockFactory.newIntVectorFixedBuilder(positionCount)) { - BytesRef vScratch = new BytesRef(); - for (int p = 0; p < positionCount; p++) { - result.appendInt(p, process(vVector.getBytesRef(p, vScratch))); - } - return result.build(); - } - } - - private int process(BytesRef v) { - var category = categorizer.computeCategory(v.utf8ToString(), analyzer); - if (category == null) { - seenNull = true; - return NULL_ORD; - } - return category.getId() + 1; - } - - @Override - public void close() { - Releasables.closeExpectNoException(analyzer, categorizer); - } - } -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizedIntermediateBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizedIntermediateBlockHash.java deleted file mode 100644 index c774d3b26049d..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizedIntermediateBlockHash.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.aggregation.blockhash; - -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; -import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.xpack.ml.aggs.categorization.SerializableTokenListCategory; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -/** - * BlockHash implementation for {@code Categorize} grouping function. - *

    - * This implementation expects a single intermediate state in a block, as generated by {@link AbstractCategorizeBlockHash}. - *

    - */ -public class CategorizedIntermediateBlockHash extends AbstractCategorizeBlockHash { - - CategorizedIntermediateBlockHash(int channel, BlockFactory blockFactory, boolean outputPartial) { - super(blockFactory, channel, outputPartial); - } - - @Override - public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { - if (page.getPositionCount() == 0) { - // No categories - return; - } - BytesRefBlock categorizerState = page.getBlock(channel()); - if (categorizerState.areAllValuesNull()) { - seenNull = true; - try (var newIds = blockFactory.newConstantIntVector(NULL_ORD, 1)) { - addInput.add(0, newIds); - } - return; - } - - Map idMap = readIntermediate(categorizerState.getBytesRef(0, new BytesRef())); - try (IntBlock.Builder newIdsBuilder = blockFactory.newIntBlockBuilder(idMap.size())) { - int fromId = idMap.containsKey(0) ? 0 : 1; - int toId = fromId + idMap.size(); - for (int i = fromId; i < toId; i++) { - newIdsBuilder.appendInt(idMap.get(i)); - } - try (IntBlock newIds = newIdsBuilder.build()) { - addInput.add(0, newIds); - } - } - } - - /** - * Read intermediate state from a block. - * - * @return a map from the old category id to the new one. The old ids go from 0 to {@code size - 1}. - */ - private Map readIntermediate(BytesRef bytes) { - Map idMap = new HashMap<>(); - try (StreamInput in = new BytesArray(bytes).streamInput()) { - if (in.readBoolean()) { - seenNull = true; - idMap.put(NULL_ORD, NULL_ORD); - } - int count = in.readVInt(); - for (int oldCategoryId = 0; oldCategoryId < count; oldCategoryId++) { - int newCategoryId = categorizer.mergeWireCategory(new SerializableTokenListCategory(in)).getId(); - // +1 because the 0 ordinal is reserved for null - idMap.put(oldCategoryId + 1, newCategoryId + 1); - } - return idMap; - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - @Override - public void close() { - categorizer.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java index 8a3c723557151..3c47e85a4a9c8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java @@ -95,7 +95,7 @@ public void testCategorizeRaw() { page = new Page(builder.build()); } - try (BlockHash hash = new CategorizeRawBlockHash(0, blockFactory, true, analysisRegistry)) { + try (BlockHash hash = new CategorizeBlockHash(blockFactory, 0, AggregatorMode.INITIAL, analysisRegistry)) { hash.add(page, new GroupingAggregatorFunction.AddInput() { @Override public void add(int positionOffset, IntBlock groupIds) { @@ -168,8 +168,8 @@ public void testCategorizeIntermediate() { // Fill intermediatePages with the intermediate state from the raw hashes try ( - BlockHash rawHash1 = new CategorizeRawBlockHash(0, blockFactory, true, analysisRegistry); - BlockHash rawHash2 = new CategorizeRawBlockHash(0, blockFactory, true, analysisRegistry); + BlockHash rawHash1 = new CategorizeBlockHash(blockFactory, 0, AggregatorMode.INITIAL, analysisRegistry); + BlockHash rawHash2 = new CategorizeBlockHash(blockFactory, 0, AggregatorMode.INITIAL, analysisRegistry); ) { rawHash1.add(page1, new GroupingAggregatorFunction.AddInput() { @Override @@ -226,7 +226,7 @@ public void close() { page2.releaseBlocks(); } - try (BlockHash intermediateHash = new CategorizedIntermediateBlockHash(0, blockFactory, true)) { + try (BlockHash intermediateHash = new CategorizeBlockHash(blockFactory, 0, AggregatorMode.INTERMEDIATE, null)) { intermediateHash.add(intermediatePage1, new GroupingAggregatorFunction.AddInput() { @Override public void add(int positionOffset, IntBlock groupIds) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java index 31b603ecef889..63b5073c2217a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java @@ -32,12 +32,8 @@ * This function has no evaluators, as it works like an aggregation (Accumulates values, stores intermediate states, etc). *

    *

    - * For the implementation, see: + * For the implementation, see {@link org.elasticsearch.compute.aggregation.blockhash.CategorizeBlockHash} *

    - *
      - *
    • {@link org.elasticsearch.compute.aggregation.blockhash.CategorizedIntermediateBlockHash}
    • - *
    • {@link org.elasticsearch.compute.aggregation.blockhash.CategorizeRawBlockHash}
    • - *
    */ public class Categorize extends GroupingFunction implements Validatable { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( From 64107e0a0b032c0ee1ed319f0d6bfefce23def9a Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Fri, 29 Nov 2024 17:34:05 +0100 Subject: [PATCH 326/386] Compute output of LookupJoinExec dynamically (#117763) LookupJoinExec should not assume its output but instead compute it from - Its input fields from the left - The fields added from the lookup index Currently, LookupJoinExec's output is determined when the logical plan is mapped to a physical one, and thereafter the output cannot be changed anymore. This makes it impossible to have late materialization of fields from the left hand side via field extractions, because we are forced to extract *all* fields before the LookupJoinExec, otherwise we do not achieve the prescribed output. Avoid that by tracking only which fields the LookupJoinExec will add from the lookup index instead of tracking the whole output (that was only correct for the logical plan). **Note:** While this PR is a refactoring for the current functionality, it should unblock @craigtaverner 's ongoing work related to field extractions and getting multiple LOOKUP JOIN queries to work correctly without adding hacks. --- .../xpack/esql/ccq/MultiClusterSpecIT.java | 4 +- .../src/main/resources/lookup-join.csv-spec | 10 +-- .../xpack/esql/action/EsqlCapabilities.java | 2 +- .../optimizer/PhysicalOptimizerRules.java | 32 --------- .../physical/local/InsertFieldExtraction.java | 4 +- .../xpack/esql/plan/logical/join/Join.java | 31 +++++---- .../esql/plan/physical/LookupJoinExec.java | 65 ++++++++----------- .../esql/planner/LocalExecutionPlanner.java | 6 +- .../esql/planner/mapper/LocalMapper.java | 10 +-- .../xpack/esql/planner/mapper/Mapper.java | 10 +-- .../elasticsearch/xpack/esql/CsvTests.java | 2 +- 11 files changed, 60 insertions(+), 116 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index 8f4522573f880..af5eadc7358a2 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -47,7 +47,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS_V2; -import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V2; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V3; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_PLANNING_V1; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.METADATA_FIELDS_REMOTE_TEST; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.SYNC; @@ -125,7 +125,7 @@ protected void shouldSkipTest(String testName) throws IOException { assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS_V2.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_PLANNING_V1.capabilityName())); - assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V2.capabilityName())); + assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V3.capabilityName())); } private TestFeatureService remoteFeaturesService() throws IOException { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec index 11786fb905c60..5de353978b307 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -5,7 +5,7 @@ //TODO: this sometimes returns null instead of the looked up value (likely related to the execution order) basicOnTheDataNode-Ignore -required_capability: join_lookup_v2 +required_capability: join_lookup_v3 FROM employees | EVAL language_code = languages @@ -22,7 +22,7 @@ emp_no:integer | language_code:integer | language_name:keyword ; basicRow-Ignore -required_capability: join_lookup +required_capability: join_lookup_v3 ROW language_code = 1 | LOOKUP JOIN languages_lookup ON language_code @@ -33,7 +33,7 @@ language_code:keyword | language_name:keyword ; basicOnTheCoordinator -required_capability: join_lookup_v2 +required_capability: join_lookup_v3 FROM employees | SORT emp_no @@ -51,7 +51,7 @@ emp_no:integer | language_code:integer | language_name:keyword //TODO: this sometimes returns null instead of the looked up value (likely related to the execution order) subsequentEvalOnTheDataNode-Ignore -required_capability: join_lookup_v2 +required_capability: join_lookup_v3 FROM employees | EVAL language_code = languages @@ -69,7 +69,7 @@ emp_no:integer | language_code:integer | language_name:keyword | language_code_x ; subsequentEvalOnTheCoordinator -required_capability: join_lookup_v2 +required_capability: join_lookup_v3 FROM employees | SORT emp_no diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 373be23cdf847..dc3329a906741 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -521,7 +521,7 @@ public enum Cap { /** * LOOKUP JOIN */ - JOIN_LOOKUP_V2(Build.current().isSnapshot()), + JOIN_LOOKUP_V3(Build.current().isSnapshot()), /** * Fix for https://github.com/elastic/elasticsearch/issues/117054 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalOptimizerRules.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalOptimizerRules.java index 482a89b50c865..ee192c2420da8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalOptimizerRules.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalOptimizerRules.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.esql.optimizer; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.util.ReflectionUtils; import org.elasticsearch.xpack.esql.optimizer.rules.logical.OptimizerRules.TransformDirection; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.rule.ParameterizedRule; @@ -62,34 +60,4 @@ public final PhysicalPlan apply(PhysicalPlan plan) { protected abstract PhysicalPlan rule(SubPlan plan); } - - public abstract static class OptimizerExpressionRule extends Rule { - - private final TransformDirection direction; - // overriding type token which returns the correct class but does an uncheck cast to LogicalPlan due to its generic bound - // a proper solution is to wrap the Expression rule into a Plan rule but that would affect the rule declaration - // so instead this is hacked here - private final Class expressionTypeToken = ReflectionUtils.detectSuperTypeForRuleLike(getClass()); - - public OptimizerExpressionRule(TransformDirection direction) { - this.direction = direction; - } - - @Override - public final PhysicalPlan apply(PhysicalPlan plan) { - return direction == TransformDirection.DOWN - ? plan.transformExpressionsDown(expressionTypeToken, this::rule) - : plan.transformExpressionsUp(expressionTypeToken, this::rule); - } - - protected PhysicalPlan rule(PhysicalPlan plan) { - return plan; - } - - protected abstract Expression rule(E e); - - public Class expressionToken() { - return expressionTypeToken; - } - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java index 72573821dfeb8..cafe3726f92ac 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java @@ -104,15 +104,15 @@ private static Set missingAttributes(PhysicalPlan p) { var missing = new LinkedHashSet(); var inputSet = p.inputSet(); - // FIXME: the extractors should work on the right side as well + // TODO: We need to extract whatever fields are missing from the left hand side. // skip the lookup join since the right side is always materialized and a projection if (p instanceof LookupJoinExec join) { - // collect fields used in the join condition return Collections.emptySet(); } var input = inputSet; // collect field attributes used inside expressions + // TODO: Rather than going over all expressions manually, this should just call .references() p.forEachExpression(TypedAttribute.class, f -> { if (f instanceof FieldAttribute || f instanceof MetadataAttribute) { if (input.contains(f) == false) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java index dd6b3ea3455f7..6af29fb23b3bb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -23,12 +23,9 @@ import java.util.ArrayList; import java.util.List; import java.util.Objects; -import java.util.Set; -import java.util.stream.Collectors; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; import static org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.LEFT; -import static org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.RIGHT; public class Join extends BinaryPlan { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Join", Join::new); @@ -100,6 +97,19 @@ public List output() { return lazyOutput; } + public List rightOutputFields() { + AttributeSet leftInputs = left().outputSet(); + + List rightOutputFields = new ArrayList<>(); + for (Attribute attr : output()) { + if (leftInputs.contains(attr) == false) { + rightOutputFields.add(attr); + } + } + + return rightOutputFields; + } + /** * Combine the two lists of attributes into one. * In case of (name) conflicts, specify which sides wins, that is overrides the other column - the left or the right. @@ -108,18 +118,11 @@ public static List computeOutput(List leftOutput, List output; // TODO: make the other side nullable - Set matchFieldNames = config.matchFields().stream().map(NamedExpression::name).collect(Collectors.toSet()); if (LEFT.equals(joinType)) { - // right side becomes nullable and overrides left except for match fields, which we preserve from the left - List rightOutputWithoutMatchFields = rightOutput.stream() - .filter(attr -> matchFieldNames.contains(attr.name()) == false) - .toList(); + // right side becomes nullable and overrides left except for join keys, which we preserve from the left + AttributeSet rightKeys = new AttributeSet(config.rightFields()); + List rightOutputWithoutMatchFields = rightOutput.stream().filter(attr -> rightKeys.contains(attr) == false).toList(); output = mergeOutputAttributes(rightOutputWithoutMatchFields, leftOutput); - } else if (RIGHT.equals(joinType)) { - List leftOutputWithoutMatchFields = leftOutput.stream() - .filter(attr -> matchFieldNames.contains(attr.name()) == false) - .toList(); - output = mergeOutputAttributes(leftOutputWithoutMatchFields, rightOutput); } else { throw new IllegalArgumentException(joinType.joinName() + " unsupported"); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java index e01451ceaecac..2d3caa27da4cd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Comparator; import java.util.List; import java.util.Objects; @@ -30,43 +29,43 @@ public class LookupJoinExec extends BinaryExec implements EstimatesRowSize { LookupJoinExec::new ); - private final List matchFields; private final List leftFields; private final List rightFields; - private final List output; - private List lazyAddedFields; + /** + * These cannot be computed from the left + right outputs, because + * {@link org.elasticsearch.xpack.esql.optimizer.rules.physical.local.ReplaceSourceAttributes} will replace the {@link EsSourceExec} on + * the right hand side by a {@link EsQueryExec}, and thus lose the information of which fields we'll get from the lookup index. + */ + private final List addedFields; + private List lazyOutput; public LookupJoinExec( Source source, PhysicalPlan left, PhysicalPlan lookup, - List matchFields, List leftFields, List rightFields, - List output + List addedFields ) { super(source, left, lookup); - this.matchFields = matchFields; this.leftFields = leftFields; this.rightFields = rightFields; - this.output = output; + this.addedFields = addedFields; } private LookupJoinExec(StreamInput in) throws IOException { super(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(PhysicalPlan.class), in.readNamedWriteable(PhysicalPlan.class)); - this.matchFields = in.readNamedWriteableCollectionAsList(Attribute.class); this.leftFields = in.readNamedWriteableCollectionAsList(Attribute.class); this.rightFields = in.readNamedWriteableCollectionAsList(Attribute.class); - this.output = in.readNamedWriteableCollectionAsList(Attribute.class); + this.addedFields = in.readNamedWriteableCollectionAsList(Attribute.class); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeNamedWriteableCollection(matchFields); out.writeNamedWriteableCollection(leftFields); out.writeNamedWriteableCollection(rightFields); - out.writeNamedWriteableCollection(output); + out.writeNamedWriteableCollection(addedFields); } @Override @@ -78,10 +77,6 @@ public PhysicalPlan lookup() { return right(); } - public List matchFields() { - return matchFields; - } - public List leftFields() { return leftFields; } @@ -91,29 +86,26 @@ public List rightFields() { } public List addedFields() { - if (lazyAddedFields == null) { - AttributeSet set = outputSet(); - set.removeAll(left().output()); - for (Attribute m : matchFields) { - set.removeIf(a -> a.name().equals(m.name())); + return addedFields; + } + + @Override + public List output() { + if (lazyOutput == null) { + lazyOutput = new ArrayList<>(left().output()); + for (Attribute attr : addedFields) { + lazyOutput.add(attr); } - lazyAddedFields = new ArrayList<>(set); - lazyAddedFields.sort(Comparator.comparing(Attribute::name)); } - return lazyAddedFields; + return lazyOutput; } @Override public PhysicalPlan estimateRowSize(State state) { - state.add(false, output); + state.add(false, output()); return this; } - @Override - public List output() { - return output; - } - @Override public AttributeSet inputSet() { // TODO: this is a hack since the right side is always materialized - instead this should @@ -129,12 +121,12 @@ protected AttributeSet computeReferences() { @Override public LookupJoinExec replaceChildren(PhysicalPlan left, PhysicalPlan right) { - return new LookupJoinExec(source(), left, right, matchFields, leftFields, rightFields, output); + return new LookupJoinExec(source(), left, right, leftFields, rightFields, addedFields); } @Override protected NodeInfo info() { - return NodeInfo.create(this, LookupJoinExec::new, left(), right(), matchFields, leftFields, rightFields, output); + return NodeInfo.create(this, LookupJoinExec::new, left(), right(), leftFields, rightFields, addedFields); } @Override @@ -148,15 +140,12 @@ public boolean equals(Object o) { if (super.equals(o) == false) { return false; } - LookupJoinExec hash = (LookupJoinExec) o; - return matchFields.equals(hash.matchFields) - && leftFields.equals(hash.leftFields) - && rightFields.equals(hash.rightFields) - && output.equals(hash.output); + LookupJoinExec other = (LookupJoinExec) o; + return leftFields.equals(other.leftFields) && rightFields.equals(other.rightFields) && addedFields.equals(other.addedFields); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), matchFields, leftFields, rightFields, output); + return Objects.hash(super.hashCode(), leftFields, rightFields, addedFields); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 1ffc652e54337..a8afaa4d8119b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -583,8 +583,8 @@ private PhysicalOperation planLookupJoin(LookupJoinExec join, LocalExecutionPlan if (localSourceExec.indexMode() != IndexMode.LOOKUP) { throw new IllegalArgumentException("can't plan [" + join + "]"); } - List matchFields = new ArrayList<>(join.matchFields().size()); - for (Attribute m : join.matchFields()) { + List matchFields = new ArrayList<>(join.leftFields().size()); + for (Attribute m : join.leftFields()) { Layout.ChannelAndType t = source.layout.get(m.id()); if (t == null) { throw new IllegalArgumentException("can't plan [" + join + "][" + m + "]"); @@ -604,7 +604,7 @@ private PhysicalOperation planLookupJoin(LookupJoinExec join, LocalExecutionPlan lookupFromIndexService, matchFields.getFirst().type(), localSourceExec.index().name(), - join.matchFields().getFirst().name(), + join.leftFields().getFirst().name(), join.addedFields().stream().map(f -> (NamedExpression) f).toList(), join.source() ), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java index fc52f2d5a9d23..f95ae0e0783e5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java @@ -120,15 +120,7 @@ private PhysicalPlan mapBinary(BinaryPlan binary) { ); } if (right instanceof EsSourceExec source && source.indexMode() == IndexMode.LOOKUP) { - return new LookupJoinExec( - join.source(), - left, - right, - config.matchFields(), - config.leftFields(), - config.rightFields(), - join.output() - ); + return new LookupJoinExec(join.source(), left, right, config.leftFields(), config.rightFields(), join.rightOutputFields()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java index 23e6f4fb91d18..8a4325ed84b2a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java @@ -207,15 +207,7 @@ private PhysicalPlan mapBinary(BinaryPlan bp) { if (right instanceof FragmentExec fragment && fragment.fragment() instanceof EsRelation relation && relation.indexMode() == IndexMode.LOOKUP) { - return new LookupJoinExec( - join.source(), - left, - right, - config.matchFields(), - config.leftFields(), - config.rightFields(), - join.output() - ); + return new LookupJoinExec(join.source(), left, right, config.leftFields(), config.rightFields(), join.rightOutputFields()); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 6763988eac638..df974a88a4c57 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -263,7 +263,7 @@ public final void test() throws Throwable { ); assumeFalse( "lookup join disabled for csv tests", - testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP_V2.capabilityName()) + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP_V3.capabilityName()) ); if (Build.current().isSnapshot()) { assertThat( From 39481e912f10f9ce4ca85176b1bee9a9b97c43f6 Mon Sep 17 00:00:00 2001 From: Mikhail Berezovskiy Date: Fri, 29 Nov 2024 09:40:31 -0800 Subject: [PATCH 327/386] trash derived buffers (#117744) --- .../transport/netty4/NettyAllocator.java | 43 -- .../transport/netty4/TrashingByteBuf.java | 536 ++++++++++++++++++ .../transport/netty4/NettyAllocatorTests.java | 1 - 3 files changed, 536 insertions(+), 44 deletions(-) create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/TrashingByteBuf.java diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java index 1eb7e13889338..e8bd5514947d6 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java @@ -362,49 +362,6 @@ public ByteBufAllocator getDelegate() { } } - static class TrashingByteBuf extends WrappedByteBuf { - - private boolean trashed = false; - - protected TrashingByteBuf(ByteBuf buf) { - super(buf); - } - - @Override - public boolean release() { - if (refCnt() == 1) { - // see [NOTE on racy trashContent() calls] - trashContent(); - } - return super.release(); - } - - @Override - public boolean release(int decrement) { - if (refCnt() == decrement && refCnt() > 0) { - // see [NOTE on racy trashContent() calls] - trashContent(); - } - return super.release(decrement); - } - - // [NOTE on racy trashContent() calls]: We trash the buffer content _before_ reducing the ref - // count to zero, which looks racy because in principle a concurrent caller could come along - // and successfully retain() this buffer to keep it alive after it's been trashed. Such a - // caller would sometimes get an IllegalReferenceCountException ofc but that's something it - // could handle - see for instance org.elasticsearch.transport.netty4.Netty4Utils.ByteBufRefCounted.tryIncRef. - // Yet in practice this should never happen, we only ever retain() these buffers while we - // know them to be alive (i.e. via RefCounted#mustIncRef or its moral equivalents) so it'd - // be a bug for a caller to retain() a buffer whose ref count is heading to zero and whose - // contents we've already decided to trash. - private void trashContent() { - if (trashed == false) { - trashed = true; - TrashingByteBufAllocator.trashBuffer(buf); - } - } - } - static class TrashingCompositeByteBuf extends CompositeByteBuf { TrashingCompositeByteBuf(ByteBufAllocator alloc, boolean direct, int maxNumComponents) { diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/TrashingByteBuf.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/TrashingByteBuf.java new file mode 100644 index 0000000000000..ead0d595f0105 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/TrashingByteBuf.java @@ -0,0 +1,536 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.transport.netty4; + +import io.netty.buffer.ByteBuf; + +import java.io.IOException; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; + +class TrashingByteBuf extends WrappedByteBuf { + + private boolean trashed = false; + + protected TrashingByteBuf(ByteBuf buf) { + super(buf); + } + + static TrashingByteBuf newBuf(ByteBuf buf) { + return new TrashingByteBuf(buf); + } + + @Override + public boolean release() { + if (refCnt() == 1) { + // see [NOTE on racy trashContent() calls] + trashContent(); + } + return super.release(); + } + + @Override + public boolean release(int decrement) { + if (refCnt() == decrement && refCnt() > 0) { + // see [NOTE on racy trashContent() calls] + trashContent(); + } + return super.release(decrement); + } + + // [NOTE on racy trashContent() calls]: We trash the buffer content _before_ reducing the ref + // count to zero, which looks racy because in principle a concurrent caller could come along + // and successfully retain() this buffer to keep it alive after it's been trashed. Such a + // caller would sometimes get an IllegalReferenceCountException ofc but that's something it + // could handle - see for instance org.elasticsearch.transport.netty4.Netty4Utils.ByteBufRefCounted.tryIncRef. + // Yet in practice this should never happen, we only ever retain() these buffers while we + // know them to be alive (i.e. via RefCounted#mustIncRef or its moral equivalents) so it'd + // be a bug for a caller to retain() a buffer whose ref count is heading to zero and whose + // contents we've already decided to trash. + private void trashContent() { + if (trashed == false) { + trashed = true; + NettyAllocator.TrashingByteBufAllocator.trashBuffer(buf); + } + } + + @Override + public ByteBuf capacity(int newCapacity) { + super.capacity(newCapacity); + return this; + } + + @Override + public ByteBuf order(ByteOrder endianness) { + return newBuf(super.order(endianness)); + } + + @Override + public ByteBuf asReadOnly() { + return newBuf(super.asReadOnly()); + } + + @Override + public ByteBuf setIndex(int readerIndex, int writerIndex) { + super.setIndex(readerIndex, writerIndex); + return this; + } + + @Override + public ByteBuf discardReadBytes() { + super.discardReadBytes(); + return this; + } + + @Override + public ByteBuf discardSomeReadBytes() { + super.discardSomeReadBytes(); + return this; + } + + @Override + public ByteBuf ensureWritable(int minWritableBytes) { + super.ensureWritable(minWritableBytes); + return this; + } + + @Override + public ByteBuf getBytes(int index, ByteBuf dst) { + super.getBytes(index, dst); + return this; + } + + @Override + public ByteBuf getBytes(int index, ByteBuf dst, int length) { + super.getBytes(index, dst, length); + return this; + } + + @Override + public ByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length) { + super.getBytes(index, dst, dstIndex, length); + return this; + } + + @Override + public ByteBuf getBytes(int index, byte[] dst) { + super.getBytes(index, dst); + return this; + } + + @Override + public ByteBuf getBytes(int index, byte[] dst, int dstIndex, int length) { + super.getBytes(index, dst, dstIndex, length); + return this; + } + + @Override + public ByteBuf getBytes(int index, ByteBuffer dst) { + super.getBytes(index, dst); + return this; + } + + @Override + public ByteBuf getBytes(int index, OutputStream out, int length) throws IOException { + super.getBytes(index, out, length); + return this; + } + + @Override + public ByteBuf setBoolean(int index, boolean value) { + super.setBoolean(index, value); + return this; + } + + @Override + public ByteBuf setByte(int index, int value) { + super.setByte(index, value); + return this; + } + + @Override + public ByteBuf setShort(int index, int value) { + super.setShort(index, value); + return this; + } + + @Override + public ByteBuf setShortLE(int index, int value) { + super.setShortLE(index, value); + return this; + } + + @Override + public ByteBuf setMedium(int index, int value) { + super.setMedium(index, value); + return this; + } + + @Override + public ByteBuf setMediumLE(int index, int value) { + super.setMediumLE(index, value); + return this; + } + + @Override + public ByteBuf setInt(int index, int value) { + super.setInt(index, value); + return this; + } + + @Override + public ByteBuf setIntLE(int index, int value) { + super.setIntLE(index, value); + return this; + } + + @Override + public ByteBuf setLong(int index, long value) { + super.setLong(index, value); + return this; + } + + @Override + public ByteBuf setLongLE(int index, long value) { + super.setLongLE(index, value); + return this; + } + + @Override + public ByteBuf setChar(int index, int value) { + super.setChar(index, value); + return this; + } + + @Override + public ByteBuf setFloat(int index, float value) { + super.setFloat(index, value); + return this; + } + + @Override + public ByteBuf setDouble(int index, double value) { + super.setDouble(index, value); + return this; + } + + @Override + public ByteBuf setBytes(int index, ByteBuf src) { + super.setBytes(index, src); + return this; + } + + @Override + public ByteBuf setBytes(int index, ByteBuf src, int length) { + super.setBytes(index, src, length); + return this; + } + + @Override + public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) { + super.setBytes(index, src, srcIndex, length); + return this; + } + + @Override + public ByteBuf setBytes(int index, byte[] src) { + super.setBytes(index, src); + return this; + } + + @Override + public ByteBuf setBytes(int index, byte[] src, int srcIndex, int length) { + super.setBytes(index, src, srcIndex, length); + return this; + } + + @Override + public ByteBuf setBytes(int index, ByteBuffer src) { + super.setBytes(index, src); + return this; + } + + @Override + public ByteBuf readBytes(int length) { + return newBuf(super.readBytes(length)); + } + + @Override + public ByteBuf readSlice(int length) { + return newBuf(super.readSlice(length)); + } + + @Override + public ByteBuf readRetainedSlice(int length) { + return newBuf(super.readRetainedSlice(length)); + } + + @Override + public ByteBuf readBytes(ByteBuf dst) { + super.readBytes(dst); + return this; + } + + @Override + public ByteBuf readBytes(ByteBuf dst, int length) { + super.readBytes(dst, length); + return this; + } + + @Override + public ByteBuf readBytes(ByteBuf dst, int dstIndex, int length) { + super.readBytes(dst, dstIndex, length); + return this; + } + + @Override + public ByteBuf readBytes(byte[] dst) { + super.readBytes(dst); + return this; + } + + @Override + public ByteBuf readBytes(ByteBuffer dst) { + super.readBytes(dst); + return this; + } + + @Override + public ByteBuf readBytes(byte[] dst, int dstIndex, int length) { + super.readBytes(dst, dstIndex, length); + return this; + } + + @Override + public ByteBuf readBytes(OutputStream out, int length) throws IOException { + super.readBytes(out, length); + return this; + } + + @Override + public ByteBuf skipBytes(int length) { + super.skipBytes(length); + return this; + } + + @Override + public ByteBuf writeBoolean(boolean value) { + super.writeBoolean(value); + return this; + } + + @Override + public ByteBuf writeByte(int value) { + super.writeByte(value); + return this; + } + + @Override + public ByteBuf writeShort(int value) { + super.writeShort(value); + return this; + } + + @Override + public ByteBuf writeShortLE(int value) { + super.writeShortLE(value); + return this; + } + + @Override + public ByteBuf writeMedium(int value) { + super.writeMedium(value); + return this; + } + + @Override + public ByteBuf writeMediumLE(int value) { + super.writeMediumLE(value); + return this; + } + + @Override + public ByteBuf writeInt(int value) { + super.writeInt(value); + return this; + + } + + @Override + public ByteBuf writeIntLE(int value) { + super.writeIntLE(value); + return this; + } + + @Override + public ByteBuf writeLong(long value) { + super.writeLong(value); + return this; + } + + @Override + public ByteBuf writeLongLE(long value) { + super.writeLongLE(value); + return this; + } + + @Override + public ByteBuf writeChar(int value) { + super.writeChar(value); + return this; + } + + @Override + public ByteBuf writeFloat(float value) { + super.writeFloat(value); + return this; + } + + @Override + public ByteBuf writeDouble(double value) { + super.writeDouble(value); + return this; + } + + @Override + public ByteBuf writeBytes(ByteBuf src) { + super.writeBytes(src); + return this; + } + + @Override + public ByteBuf writeBytes(ByteBuf src, int length) { + super.writeBytes(src, length); + return this; + } + + @Override + public ByteBuf writeBytes(ByteBuf src, int srcIndex, int length) { + super.writeBytes(src, srcIndex, length); + return this; + } + + @Override + public ByteBuf writeBytes(byte[] src) { + super.writeBytes(src); + return this; + } + + @Override + public ByteBuf writeBytes(byte[] src, int srcIndex, int length) { + super.writeBytes(src, srcIndex, length); + return this; + } + + @Override + public ByteBuf writeBytes(ByteBuffer src) { + super.writeBytes(src); + return this; + } + + @Override + public ByteBuf writeZero(int length) { + super.writeZero(length); + return this; + } + + @Override + public ByteBuf copy() { + return newBuf(super.copy()); + } + + @Override + public ByteBuf copy(int index, int length) { + return newBuf(super.copy(index, length)); + } + + @Override + public ByteBuf slice() { + return newBuf(super.slice()); + } + + @Override + public ByteBuf retainedSlice() { + return newBuf(super.retainedSlice()); + } + + @Override + public ByteBuf slice(int index, int length) { + return newBuf(super.slice(index, length)); + } + + @Override + public ByteBuf retainedSlice(int index, int length) { + return newBuf(super.retainedSlice(index, length)); + } + + @Override + public ByteBuf duplicate() { + return newBuf(super.duplicate()); + } + + @Override + public ByteBuf retainedDuplicate() { + return newBuf(super.retainedDuplicate()); + } + + @Override + public ByteBuf retain(int increment) { + super.retain(increment); + return this; + } + + @Override + public ByteBuf touch(Object hint) { + super.touch(hint); + return this; + } + + @Override + public ByteBuf retain() { + super.retain(); + return this; + } + + @Override + public ByteBuf touch() { + super.touch(); + return this; + } + + @Override + public ByteBuf setFloatLE(int index, float value) { + return super.setFloatLE(index, value); + } + + @Override + public ByteBuf setDoubleLE(int index, double value) { + super.setDoubleLE(index, value); + return this; + } + + @Override + public ByteBuf writeFloatLE(float value) { + super.writeFloatLE(value); + return this; + } + + @Override + public ByteBuf writeDoubleLE(double value) { + super.writeDoubleLE(value); + return this; + } + + @Override + public ByteBuf asByteBuf() { + return this; + } +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyAllocatorTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyAllocatorTests.java index a76eb9fa4875b..b9e9b667e72fe 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyAllocatorTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyAllocatorTests.java @@ -20,7 +20,6 @@ import java.nio.ByteBuffer; import java.util.List; -import static org.elasticsearch.transport.netty4.NettyAllocator.TrashingByteBuf; import static org.elasticsearch.transport.netty4.NettyAllocator.TrashingByteBufAllocator; public class NettyAllocatorTests extends ESTestCase { From 0b764adbc19a99ee14d88b96f5f99002fabc19cb Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 30 Nov 2024 08:29:46 +1100 Subject: [PATCH 328/386] Mute org.elasticsearch.search.ccs.CrossClusterIT testCancel #108061 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index f5f6b84ab8639..b82e95ea26890 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -231,6 +231,9 @@ tests: - class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT method: test {scoring.QstrWithFieldAndScoringSortedEval} issue: https://github.com/elastic/elasticsearch/issues/117751 +- class: org.elasticsearch.search.ccs.CrossClusterIT + method: testCancel + issue: https://github.com/elastic/elasticsearch/issues/108061 # Examples: # From c74c06daee0583562c82597b19178268b9f415e5 Mon Sep 17 00:00:00 2001 From: Nick Tindall Date: Sat, 30 Nov 2024 11:33:20 +1100 Subject: [PATCH 329/386] Deduplicate Range header parsing (#117304) --- ...CloudStorageBlobContainerRetriesTests.java | 7 +-- .../java/fixture/azure/AzureHttpHandler.java | 23 ++++---- .../gcs/GoogleCloudStorageHttpHandler.java | 17 +++--- test/fixtures/s3-fixture/build.gradle | 2 +- .../main/java/fixture/s3/S3HttpHandler.java | 20 +++---- .../src/main/java/fixture/url/URLFixture.java | 16 +++--- .../AbstractBlobContainerRetriesTestCase.java | 24 ++++----- .../test/fixture/HttpHeaderParser.java | 42 +++++++++++++++ .../http/HttpHeaderParserTests.java | 53 +++++++++++++++++++ 9 files changed, 141 insertions(+), 63 deletions(-) create mode 100644 test/framework/src/main/java/org/elasticsearch/test/fixture/HttpHeaderParser.java create mode 100644 test/framework/src/test/java/org/elasticsearch/http/HttpHeaderParserTests.java diff --git a/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java b/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java index 110c31b212ea1..a53ec71f66376 100644 --- a/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java +++ b/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java @@ -41,6 +41,7 @@ import org.elasticsearch.repositories.blobstore.ESMockAPIBasedRepositoryIntegTestCase; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestUtils; +import org.elasticsearch.test.fixture.HttpHeaderParser; import org.threeten.bp.Duration; import java.io.IOException; @@ -177,9 +178,9 @@ public void testReadLargeBlobWithRetries() throws Exception { httpServer.createContext(downloadStorageEndpoint(blobContainer, "large_blob_retries"), exchange -> { Streams.readFully(exchange.getRequestBody()); exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); - final Tuple range = getRange(exchange); - final int offset = Math.toIntExact(range.v1()); - final byte[] chunk = Arrays.copyOfRange(bytes, offset, Math.toIntExact(Math.min(range.v2() + 1, bytes.length))); + final HttpHeaderParser.Range range = getRange(exchange); + final int offset = Math.toIntExact(range.start()); + final byte[] chunk = Arrays.copyOfRange(bytes, offset, Math.toIntExact(Math.min(range.end() + 1, bytes.length))); exchange.sendResponseHeaders(RestStatus.OK.getStatus(), chunk.length); if (randomBoolean() && countDown.decrementAndGet() >= 0) { exchange.getResponseBody().write(chunk, 0, chunk.length - 1); diff --git a/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java b/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java index 904f4581ad2c9..cb7c700376a1a 100644 --- a/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java +++ b/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java @@ -22,6 +22,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestUtils; +import org.elasticsearch.test.fixture.HttpHeaderParser; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; @@ -42,8 +43,6 @@ import java.util.Set; import java.util.UUID; import java.util.function.Predicate; -import java.util.regex.Matcher; -import java.util.regex.Pattern; import static fixture.azure.MockAzureBlobStore.failTestWithAssertionError; import static org.elasticsearch.repositories.azure.AzureFixtureHelper.assertValidBlockId; @@ -54,7 +53,6 @@ @SuppressForbidden(reason = "Uses a HttpServer to emulate an Azure endpoint") public class AzureHttpHandler implements HttpHandler { private static final Logger logger = LogManager.getLogger(AzureHttpHandler.class); - private static final Pattern RANGE_HEADER_PATTERN = Pattern.compile("^bytes=([0-9]+)-([0-9]+)$"); static final String X_MS_LEASE_ID = "x-ms-lease-id"; static final String X_MS_PROPOSED_LEASE_ID = "x-ms-proposed-lease-id"; static final String X_MS_LEASE_DURATION = "x-ms-lease-duration"; @@ -232,29 +230,26 @@ public void handle(final HttpExchange exchange) throws IOException { final BytesReference responseContent; final RestStatus successStatus; // see Constants.HeaderConstants.STORAGE_RANGE_HEADER - final String range = exchange.getRequestHeaders().getFirst("x-ms-range"); - if (range != null) { - final Matcher matcher = RANGE_HEADER_PATTERN.matcher(range); - if (matcher.matches() == false) { + final String rangeHeader = exchange.getRequestHeaders().getFirst("x-ms-range"); + if (rangeHeader != null) { + final HttpHeaderParser.Range range = HttpHeaderParser.parseRangeHeader(rangeHeader); + if (range == null) { throw new MockAzureBlobStore.BadRequestException( "InvalidHeaderValue", - "Range header does not match expected format: " + range + "Range header does not match expected format: " + rangeHeader ); } - final long start = Long.parseLong(matcher.group(1)); - final long end = Long.parseLong(matcher.group(2)); - final BytesReference blobContents = blob.getContents(); - if (blobContents.length() <= start) { + if (blobContents.length() <= range.start()) { exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); exchange.sendResponseHeaders(RestStatus.REQUESTED_RANGE_NOT_SATISFIED.getStatus(), -1); return; } responseContent = blobContents.slice( - Math.toIntExact(start), - Math.toIntExact(Math.min(end - start + 1, blobContents.length() - start)) + Math.toIntExact(range.start()), + Math.toIntExact(Math.min(range.end() - range.start() + 1, blobContents.length() - range.start())) ); successStatus = RestStatus.PARTIAL_CONTENT; } else { diff --git a/test/fixtures/gcs-fixture/src/main/java/fixture/gcs/GoogleCloudStorageHttpHandler.java b/test/fixtures/gcs-fixture/src/main/java/fixture/gcs/GoogleCloudStorageHttpHandler.java index 51e3185623360..f6b52a32a9a1d 100644 --- a/test/fixtures/gcs-fixture/src/main/java/fixture/gcs/GoogleCloudStorageHttpHandler.java +++ b/test/fixtures/gcs-fixture/src/main/java/fixture/gcs/GoogleCloudStorageHttpHandler.java @@ -24,6 +24,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestUtils; +import org.elasticsearch.test.fixture.HttpHeaderParser; import java.io.BufferedReader; import java.io.IOException; @@ -58,8 +59,6 @@ public class GoogleCloudStorageHttpHandler implements HttpHandler { private static final Logger logger = LogManager.getLogger(GoogleCloudStorageHttpHandler.class); - private static final Pattern RANGE_MATCHER = Pattern.compile("bytes=([0-9]*)-([0-9]*)"); - private final ConcurrentMap blobs; private final String bucket; @@ -131,19 +130,19 @@ public void handle(final HttpExchange exchange) throws IOException { // Download Object https://cloud.google.com/storage/docs/request-body BytesReference blob = blobs.get(exchange.getRequestURI().getPath().replace("/download/storage/v1/b/" + bucket + "/o/", "")); if (blob != null) { - final String range = exchange.getRequestHeaders().getFirst("Range"); + final String rangeHeader = exchange.getRequestHeaders().getFirst("Range"); final long offset; final long end; - if (range == null) { + if (rangeHeader == null) { offset = 0L; end = blob.length() - 1; } else { - Matcher matcher = RANGE_MATCHER.matcher(range); - if (matcher.find() == false) { - throw new AssertionError("Range bytes header does not match expected format: " + range); + final HttpHeaderParser.Range range = HttpHeaderParser.parseRangeHeader(rangeHeader); + if (range == null) { + throw new AssertionError("Range bytes header does not match expected format: " + rangeHeader); } - offset = Long.parseLong(matcher.group(1)); - end = Long.parseLong(matcher.group(2)); + offset = range.start(); + end = range.end(); } if (offset >= blob.length()) { diff --git a/test/fixtures/s3-fixture/build.gradle b/test/fixtures/s3-fixture/build.gradle index d628800497293..e4c35464608a8 100644 --- a/test/fixtures/s3-fixture/build.gradle +++ b/test/fixtures/s3-fixture/build.gradle @@ -15,5 +15,5 @@ dependencies { api("junit:junit:${versions.junit}") { transitive = false } - testImplementation project(':test:framework') + implementation project(':test:framework') } diff --git a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java index 56d3454aa5544..bfc0428731c56 100644 --- a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java +++ b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java @@ -28,6 +28,7 @@ import org.elasticsearch.logging.Logger; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestUtils; +import org.elasticsearch.test.fixture.HttpHeaderParser; import java.io.IOException; import java.io.InputStreamReader; @@ -269,8 +270,8 @@ public void handle(final HttpExchange exchange) throws IOException { exchange.sendResponseHeaders(RestStatus.NOT_FOUND.getStatus(), -1); return; } - final String range = exchange.getRequestHeaders().getFirst("Range"); - if (range == null) { + final String rangeHeader = exchange.getRequestHeaders().getFirst("Range"); + if (rangeHeader == null) { exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); exchange.sendResponseHeaders(RestStatus.OK.getStatus(), blob.length()); blob.writeTo(exchange.getResponseBody()); @@ -281,17 +282,12 @@ public void handle(final HttpExchange exchange) throws IOException { // requests with a header value like "Range: bytes=start-end" where both {@code start} and {@code end} are always defined // (sometimes to very high value for {@code end}). It would be too tedious to fully support the RFC so S3HttpHandler only // supports when both {@code start} and {@code end} are defined to match the SDK behavior. - final Matcher matcher = Pattern.compile("^bytes=([0-9]+)-([0-9]+)$").matcher(range); - if (matcher.matches() == false) { - throw new AssertionError("Bytes range does not match expected pattern: " + range); - } - var groupStart = matcher.group(1); - var groupEnd = matcher.group(2); - if (groupStart == null || groupEnd == null) { - throw new AssertionError("Bytes range does not match expected pattern: " + range); + final HttpHeaderParser.Range range = HttpHeaderParser.parseRangeHeader(rangeHeader); + if (range == null) { + throw new AssertionError("Bytes range does not match expected pattern: " + rangeHeader); } - long start = Long.parseLong(groupStart); - long end = Long.parseLong(groupEnd); + long start = range.start(); + long end = range.end(); if (end < start) { exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); exchange.sendResponseHeaders(RestStatus.OK.getStatus(), blob.length()); diff --git a/test/fixtures/url-fixture/src/main/java/fixture/url/URLFixture.java b/test/fixtures/url-fixture/src/main/java/fixture/url/URLFixture.java index 4c3159fc3c849..860f6ff141689 100644 --- a/test/fixtures/url-fixture/src/main/java/fixture/url/URLFixture.java +++ b/test/fixtures/url-fixture/src/main/java/fixture/url/URLFixture.java @@ -10,6 +10,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.fixture.AbstractHttpFixture; +import org.elasticsearch.test.fixture.HttpHeaderParser; import org.junit.rules.TemporaryFolder; import org.junit.rules.TestRule; @@ -21,15 +22,12 @@ import java.nio.file.Path; import java.util.HashMap; import java.util.Map; -import java.util.regex.Matcher; -import java.util.regex.Pattern; /** * This {@link URLFixture} exposes a filesystem directory over HTTP. It is used in repository-url * integration tests to expose a directory created by a regular FS repository. */ public class URLFixture extends AbstractHttpFixture implements TestRule { - private static final Pattern RANGE_PATTERN = Pattern.compile("bytes=(\\d+)-(\\d+)$"); private final TemporaryFolder temporaryFolder; private Path repositoryDir; @@ -60,19 +58,19 @@ private AbstractHttpFixture.Response handleGetRequest(Request request) throws IO if (normalizedPath.startsWith(normalizedRepositoryDir)) { if (Files.exists(normalizedPath) && Files.isReadable(normalizedPath) && Files.isRegularFile(normalizedPath)) { - final String range = request.getHeader("Range"); + final String rangeHeader = request.getHeader("Range"); final Map headers = new HashMap<>(contentType("application/octet-stream")); - if (range == null) { + if (rangeHeader == null) { byte[] content = Files.readAllBytes(normalizedPath); headers.put("Content-Length", String.valueOf(content.length)); return new Response(RestStatus.OK.getStatus(), headers, content); } else { - final Matcher matcher = RANGE_PATTERN.matcher(range); - if (matcher.matches() == false) { + final HttpHeaderParser.Range range = HttpHeaderParser.parseRangeHeader(rangeHeader); + if (range == null) { return new Response(RestStatus.REQUESTED_RANGE_NOT_SATISFIED.getStatus(), TEXT_PLAIN_CONTENT_TYPE, EMPTY_BYTE); } else { - long start = Long.parseLong(matcher.group(1)); - long end = Long.parseLong(matcher.group(2)); + long start = range.start(); + long end = range.end(); long rangeLength = end - start + 1; final long fileSize = Files.size(normalizedPath); if (start >= fileSize || start > end || rangeLength > fileSize) { diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java index 12094b31a049d..17768c54b2eaf 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java @@ -23,9 +23,9 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.Tuple; import org.elasticsearch.mocksocket.MockHttpServer; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.fixture.HttpHeaderParser; import org.junit.After; import org.junit.Before; @@ -40,8 +40,6 @@ import java.util.OptionalInt; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; -import java.util.regex.Matcher; -import java.util.regex.Pattern; import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.test.NeverMatcher.never; @@ -371,28 +369,24 @@ protected static byte[] randomBlobContent(int minSize) { return randomByteArrayOfLength(randomIntBetween(minSize, frequently() ? 512 : 1 << 20)); // rarely up to 1mb } - private static final Pattern RANGE_PATTERN = Pattern.compile("^bytes=([0-9]+)-([0-9]+)$"); - - protected static Tuple getRange(HttpExchange exchange) { + protected static HttpHeaderParser.Range getRange(HttpExchange exchange) { final String rangeHeader = exchange.getRequestHeaders().getFirst("Range"); if (rangeHeader == null) { - return Tuple.tuple(0L, MAX_RANGE_VAL); + return new HttpHeaderParser.Range(0L, MAX_RANGE_VAL); } - final Matcher matcher = RANGE_PATTERN.matcher(rangeHeader); - assertTrue(rangeHeader + " matches expected pattern", matcher.matches()); - long rangeStart = Long.parseLong(matcher.group(1)); - long rangeEnd = Long.parseLong(matcher.group(2)); - assertThat(rangeStart, lessThanOrEqualTo(rangeEnd)); - return Tuple.tuple(rangeStart, rangeEnd); + final HttpHeaderParser.Range range = HttpHeaderParser.parseRangeHeader(rangeHeader); + assertNotNull(rangeHeader + " matches expected pattern", range); + assertThat(range.start(), lessThanOrEqualTo(range.end())); + return range; } protected static int getRangeStart(HttpExchange exchange) { - return Math.toIntExact(getRange(exchange).v1()); + return Math.toIntExact(getRange(exchange).start()); } protected static OptionalInt getRangeEnd(HttpExchange exchange) { - final long rangeEnd = getRange(exchange).v2(); + final long rangeEnd = getRange(exchange).end(); if (rangeEnd == MAX_RANGE_VAL) { return OptionalInt.empty(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/fixture/HttpHeaderParser.java b/test/framework/src/main/java/org/elasticsearch/test/fixture/HttpHeaderParser.java new file mode 100644 index 0000000000000..7018e5e259584 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/fixture/HttpHeaderParser.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.test.fixture; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public enum HttpHeaderParser { + ; + + private static final Pattern RANGE_HEADER_PATTERN = Pattern.compile("bytes=([0-9]+)-([0-9]+)"); + + /** + * Parse a "Range" header + * + * Note: only a single bounded range is supported (e.g. Range: bytes={range_start}-{range_end}) + * + * @see MDN: Range header + * @param rangeHeaderValue The header value as a string + * @return a {@link Range} instance representing the parsed value, or null if the header is malformed + */ + public static Range parseRangeHeader(String rangeHeaderValue) { + final Matcher matcher = RANGE_HEADER_PATTERN.matcher(rangeHeaderValue); + if (matcher.matches()) { + try { + return new Range(Long.parseLong(matcher.group(1)), Long.parseLong(matcher.group(2))); + } catch (NumberFormatException e) { + return null; + } + } + return null; + } + + public record Range(long start, long end) {} +} diff --git a/test/framework/src/test/java/org/elasticsearch/http/HttpHeaderParserTests.java b/test/framework/src/test/java/org/elasticsearch/http/HttpHeaderParserTests.java new file mode 100644 index 0000000000000..e025e7770ea4c --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/http/HttpHeaderParserTests.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.http; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.fixture.HttpHeaderParser; + +import java.math.BigInteger; + +public class HttpHeaderParserTests extends ESTestCase { + + public void testParseRangeHeader() { + final long start = randomLongBetween(0, 10_000); + final long end = randomLongBetween(start, start + 10_000); + assertEquals(new HttpHeaderParser.Range(start, end), HttpHeaderParser.parseRangeHeader("bytes=" + start + "-" + end)); + } + + public void testParseRangeHeaderInvalidLong() { + final BigInteger longOverflow = BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE).add(randomBigInteger()); + assertNull(HttpHeaderParser.parseRangeHeader("bytes=123-" + longOverflow)); + assertNull(HttpHeaderParser.parseRangeHeader("bytes=" + longOverflow + "-123")); + } + + public void testParseRangeHeaderMultipleRangesNotMatched() { + assertNull( + HttpHeaderParser.parseRangeHeader( + Strings.format( + "bytes=%d-%d,%d-%d", + randomIntBetween(0, 99), + randomIntBetween(100, 199), + randomIntBetween(200, 299), + randomIntBetween(300, 399) + ) + ) + ); + } + + public void testParseRangeHeaderEndlessRangeNotMatched() { + assertNull(HttpHeaderParser.parseRangeHeader(Strings.format("bytes=%d-", randomLongBetween(0, Long.MAX_VALUE)))); + } + + public void testParseRangeHeaderSuffixLengthNotMatched() { + assertNull(HttpHeaderParser.parseRangeHeader(Strings.format("bytes=-%d", randomLongBetween(0, Long.MAX_VALUE)))); + } +} From c77f09e436563fa312db791a9ea4c8ac5d97a623 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Sat, 30 Nov 2024 09:38:40 +0100 Subject: [PATCH 330/386] [Entitlements] Refactor InstrumenterImpl tests (#117688) Following up https://github.com/elastic/elasticsearch/pull/117332#discussion_r1856803255, I refactored `InstrumenterImpl` tests, splitting them into 2 suites: - `SyntheticInstrumenterImplTests`, which tests the mechanics of instrumentation using ad-hoc test cases. This should see little change now that we have our Instrumenter working as intended - `InstrumenterImplTests`, which is back to its original intent to make sure (1) the right arguments make it all the way to the check methods, and (2) if the check method throws, that exception correctly bubbles up through the instrumented method. The PR also includes a little change to `InstrumenterImpl` construction to clean it up a bit and make it more testable. --- .../impl/InstrumentationServiceImpl.java | 28 +- .../impl/InstrumenterImpl.java | 61 +-- .../impl/InstrumentationServiceImplTests.java | 42 +- .../impl/InstrumenterTests.java | 378 ++++------------- .../impl/SyntheticInstrumenterTests.java | 383 ++++++++++++++++++ .../instrumentation/impl/TestException.java | 12 + .../instrumentation/impl/TestLoader.java | 20 + .../instrumentation/impl/TestMethodUtils.java | 81 ++++ .../EntitlementInitialization.java | 8 +- .../{CheckerMethod.java => CheckMethod.java} | 4 +- .../InstrumentationService.java | 10 +- 11 files changed, 646 insertions(+), 381 deletions(-) create mode 100644 libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/SyntheticInstrumenterTests.java create mode 100644 libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/TestException.java create mode 100644 libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/TestLoader.java create mode 100644 libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/TestMethodUtils.java rename libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/{CheckerMethod.java => CheckMethod.java} (82%) diff --git a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java index 16bd04e60c5e3..9e23d2c0412c3 100644 --- a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java @@ -9,7 +9,7 @@ package org.elasticsearch.entitlement.instrumentation.impl; -import org.elasticsearch.entitlement.instrumentation.CheckerMethod; +import org.elasticsearch.entitlement.instrumentation.CheckMethod; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.instrumentation.Instrumenter; import org.elasticsearch.entitlement.instrumentation.MethodKey; @@ -20,37 +20,23 @@ import org.objectweb.asm.Type; import java.io.IOException; -import java.lang.reflect.Method; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.stream.Stream; public class InstrumentationServiceImpl implements InstrumentationService { @Override - public Instrumenter newInstrumenter(String classNameSuffix, Map instrumentationMethods) { - return new InstrumenterImpl(classNameSuffix, instrumentationMethods); - } - - /** - * @return a {@link MethodKey} suitable for looking up the given {@code targetMethod} in the entitlements trampoline - */ - public MethodKey methodKeyForTarget(Method targetMethod) { - Type actualType = Type.getMethodType(Type.getMethodDescriptor(targetMethod)); - return new MethodKey( - Type.getInternalName(targetMethod.getDeclaringClass()), - targetMethod.getName(), - Stream.of(actualType.getArgumentTypes()).map(Type::getInternalName).toList() - ); + public Instrumenter newInstrumenter(Map checkMethods) { + return InstrumenterImpl.create(checkMethods); } @Override - public Map lookupMethodsToInstrument(String entitlementCheckerClassName) throws ClassNotFoundException, + public Map lookupMethodsToInstrument(String entitlementCheckerClassName) throws ClassNotFoundException, IOException { - var methodsToInstrument = new HashMap(); + var methodsToInstrument = new HashMap(); var checkerClass = Class.forName(entitlementCheckerClassName); var classFileInfo = InstrumenterImpl.getClassFileInfo(checkerClass); ClassReader reader = new ClassReader(classFileInfo.bytecodes()); @@ -69,9 +55,9 @@ public MethodVisitor visitMethod( var methodToInstrument = parseCheckerMethodSignature(checkerMethodName, checkerMethodArgumentTypes); var checkerParameterDescriptors = Arrays.stream(checkerMethodArgumentTypes).map(Type::getDescriptor).toList(); - var checkerMethod = new CheckerMethod(Type.getInternalName(checkerClass), checkerMethodName, checkerParameterDescriptors); + var checkMethod = new CheckMethod(Type.getInternalName(checkerClass), checkerMethodName, checkerParameterDescriptors); - methodsToInstrument.put(methodToInstrument, checkerMethod); + methodsToInstrument.put(methodToInstrument, checkMethod); return mv; } diff --git a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java index 4d762dc997383..57e30c01c5c28 100644 --- a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java @@ -9,7 +9,7 @@ package org.elasticsearch.entitlement.instrumentation.impl; -import org.elasticsearch.entitlement.instrumentation.CheckerMethod; +import org.elasticsearch.entitlement.instrumentation.CheckMethod; import org.elasticsearch.entitlement.instrumentation.Instrumenter; import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.objectweb.asm.AnnotationVisitor; @@ -37,9 +37,28 @@ public class InstrumenterImpl implements Instrumenter { - private static final String checkerClassDescriptor; - private static final String handleClass; - static { + private final String getCheckerClassMethodDescriptor; + private final String handleClass; + + /** + * To avoid class name collisions during testing without an agent to replace classes in-place. + */ + private final String classNameSuffix; + private final Map checkMethods; + + InstrumenterImpl( + String handleClass, + String getCheckerClassMethodDescriptor, + String classNameSuffix, + Map checkMethods + ) { + this.handleClass = handleClass; + this.getCheckerClassMethodDescriptor = getCheckerClassMethodDescriptor; + this.classNameSuffix = classNameSuffix; + this.checkMethods = checkMethods; + } + + static String getCheckerClassName() { int javaVersion = Runtime.version().feature(); final String classNamePrefix; if (javaVersion >= 23) { @@ -47,20 +66,14 @@ public class InstrumenterImpl implements Instrumenter { } else { classNamePrefix = ""; } - String checkerClass = "org/elasticsearch/entitlement/bridge/" + classNamePrefix + "EntitlementChecker"; - handleClass = checkerClass + "Handle"; - checkerClassDescriptor = Type.getObjectType(checkerClass).getDescriptor(); + return "org/elasticsearch/entitlement/bridge/" + classNamePrefix + "EntitlementChecker"; } - /** - * To avoid class name collisions during testing without an agent to replace classes in-place. - */ - private final String classNameSuffix; - private final Map instrumentationMethods; - - public InstrumenterImpl(String classNameSuffix, Map instrumentationMethods) { - this.classNameSuffix = classNameSuffix; - this.instrumentationMethods = instrumentationMethods; + public static InstrumenterImpl create(Map checkMethods) { + String checkerClass = getCheckerClassName(); + String handleClass = checkerClass + "Handle"; + String getCheckerClassMethodDescriptor = Type.getMethodDescriptor(Type.getObjectType(checkerClass)); + return new InstrumenterImpl(handleClass, getCheckerClassMethodDescriptor, "", checkMethods); } public ClassFileInfo instrumentClassFile(Class clazz) throws IOException { @@ -156,7 +169,7 @@ public MethodVisitor visitMethod(int access, String name, String descriptor, Str boolean isStatic = (access & ACC_STATIC) != 0; boolean isCtor = "".equals(name); var key = new MethodKey(className, name, Stream.of(Type.getArgumentTypes(descriptor)).map(Type::getInternalName).toList()); - var instrumentationMethod = instrumentationMethods.get(key); + var instrumentationMethod = checkMethods.get(key); if (instrumentationMethod != null) { // LOGGER.debug("Will instrument method {}", key); return new EntitlementMethodVisitor(Opcodes.ASM9, mv, isStatic, isCtor, descriptor, instrumentationMethod); @@ -190,7 +203,7 @@ class EntitlementMethodVisitor extends MethodVisitor { private final boolean instrumentedMethodIsStatic; private final boolean instrumentedMethodIsCtor; private final String instrumentedMethodDescriptor; - private final CheckerMethod instrumentationMethod; + private final CheckMethod checkMethod; private boolean hasCallerSensitiveAnnotation = false; EntitlementMethodVisitor( @@ -199,13 +212,13 @@ class EntitlementMethodVisitor extends MethodVisitor { boolean instrumentedMethodIsStatic, boolean instrumentedMethodIsCtor, String instrumentedMethodDescriptor, - CheckerMethod instrumentationMethod + CheckMethod checkMethod ) { super(api, methodVisitor); this.instrumentedMethodIsStatic = instrumentedMethodIsStatic; this.instrumentedMethodIsCtor = instrumentedMethodIsCtor; this.instrumentedMethodDescriptor = instrumentedMethodDescriptor; - this.instrumentationMethod = instrumentationMethod; + this.checkMethod = checkMethod; } @Override @@ -278,11 +291,11 @@ private void forwardIncomingArguments() { private void invokeInstrumentationMethod() { mv.visitMethodInsn( INVOKEINTERFACE, - instrumentationMethod.className(), - instrumentationMethod.methodName(), + checkMethod.className(), + checkMethod.methodName(), Type.getMethodDescriptor( Type.VOID_TYPE, - instrumentationMethod.parameterDescriptors().stream().map(Type::getType).toArray(Type[]::new) + checkMethod.parameterDescriptors().stream().map(Type::getType).toArray(Type[]::new) ), true ); @@ -290,7 +303,7 @@ private void invokeInstrumentationMethod() { } protected void pushEntitlementChecker(MethodVisitor mv) { - mv.visitMethodInsn(INVOKESTATIC, handleClass, "instance", "()" + checkerClassDescriptor, false); + mv.visitMethodInsn(INVOKESTATIC, handleClass, "instance", getCheckerClassMethodDescriptor, false); } public record ClassFileInfo(String fileName, byte[] bytecodes) {} diff --git a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java index 5eee0bf27d1df..9ccb72637d463 100644 --- a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java +++ b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java @@ -9,7 +9,7 @@ package org.elasticsearch.entitlement.instrumentation.impl; -import org.elasticsearch.entitlement.instrumentation.CheckerMethod; +import org.elasticsearch.entitlement.instrumentation.CheckMethod; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.elasticsearch.test.ESTestCase; @@ -52,15 +52,15 @@ interface TestCheckerCtors { } public void testInstrumentationTargetLookup() throws IOException, ClassNotFoundException { - Map methodsMap = instrumentationService.lookupMethodsToInstrument(TestChecker.class.getName()); + Map checkMethods = instrumentationService.lookupMethodsToInstrument(TestChecker.class.getName()); - assertThat(methodsMap, aMapWithSize(3)); + assertThat(checkMethods, aMapWithSize(3)); assertThat( - methodsMap, + checkMethods, hasEntry( equalTo(new MethodKey("org/example/TestTargetClass", "staticMethod", List.of("I", "java/lang/String", "java/lang/Object"))), equalTo( - new CheckerMethod( + new CheckMethod( "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker", "check$org_example_TestTargetClass$staticMethod", List.of("Ljava/lang/Class;", "I", "Ljava/lang/String;", "Ljava/lang/Object;") @@ -69,7 +69,7 @@ public void testInstrumentationTargetLookup() throws IOException, ClassNotFoundE ) ); assertThat( - methodsMap, + checkMethods, hasEntry( equalTo( new MethodKey( @@ -79,7 +79,7 @@ public void testInstrumentationTargetLookup() throws IOException, ClassNotFoundE ) ), equalTo( - new CheckerMethod( + new CheckMethod( "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker", "check$$instanceMethodNoArgs", List.of( @@ -91,7 +91,7 @@ public void testInstrumentationTargetLookup() throws IOException, ClassNotFoundE ) ); assertThat( - methodsMap, + checkMethods, hasEntry( equalTo( new MethodKey( @@ -101,7 +101,7 @@ public void testInstrumentationTargetLookup() throws IOException, ClassNotFoundE ) ), equalTo( - new CheckerMethod( + new CheckMethod( "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker", "check$$instanceMethodWithArgs", List.of( @@ -117,15 +117,15 @@ public void testInstrumentationTargetLookup() throws IOException, ClassNotFoundE } public void testInstrumentationTargetLookupWithOverloads() throws IOException, ClassNotFoundException { - Map methodsMap = instrumentationService.lookupMethodsToInstrument(TestCheckerOverloads.class.getName()); + Map checkMethods = instrumentationService.lookupMethodsToInstrument(TestCheckerOverloads.class.getName()); - assertThat(methodsMap, aMapWithSize(2)); + assertThat(checkMethods, aMapWithSize(2)); assertThat( - methodsMap, + checkMethods, hasEntry( equalTo(new MethodKey("org/example/TestTargetClass", "staticMethodWithOverload", List.of("I", "java/lang/String"))), equalTo( - new CheckerMethod( + new CheckMethod( "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerOverloads", "check$org_example_TestTargetClass$staticMethodWithOverload", List.of("Ljava/lang/Class;", "I", "Ljava/lang/String;") @@ -134,11 +134,11 @@ public void testInstrumentationTargetLookupWithOverloads() throws IOException, C ) ); assertThat( - methodsMap, + checkMethods, hasEntry( equalTo(new MethodKey("org/example/TestTargetClass", "staticMethodWithOverload", List.of("I", "I"))), equalTo( - new CheckerMethod( + new CheckMethod( "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerOverloads", "check$org_example_TestTargetClass$staticMethodWithOverload", List.of("Ljava/lang/Class;", "I", "I") @@ -149,15 +149,15 @@ public void testInstrumentationTargetLookupWithOverloads() throws IOException, C } public void testInstrumentationTargetLookupWithCtors() throws IOException, ClassNotFoundException { - Map methodsMap = instrumentationService.lookupMethodsToInstrument(TestCheckerCtors.class.getName()); + Map checkMethods = instrumentationService.lookupMethodsToInstrument(TestCheckerCtors.class.getName()); - assertThat(methodsMap, aMapWithSize(2)); + assertThat(checkMethods, aMapWithSize(2)); assertThat( - methodsMap, + checkMethods, hasEntry( equalTo(new MethodKey("org/example/TestTargetClass", "", List.of("I", "java/lang/String"))), equalTo( - new CheckerMethod( + new CheckMethod( "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerCtors", "check$org_example_TestTargetClass$", List.of("Ljava/lang/Class;", "I", "Ljava/lang/String;") @@ -166,11 +166,11 @@ public void testInstrumentationTargetLookupWithCtors() throws IOException, Class ) ); assertThat( - methodsMap, + checkMethods, hasEntry( equalTo(new MethodKey("org/example/TestTargetClass", "", List.of())), equalTo( - new CheckerMethod( + new CheckMethod( "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerCtors", "check$org_example_TestTargetClass$", List.of("Ljava/lang/Class;") diff --git a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java index 40f0162d2eaa2..c8e1b26d1fc52 100644 --- a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java +++ b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java @@ -9,10 +9,8 @@ package org.elasticsearch.entitlement.instrumentation.impl; -import org.elasticsearch.common.Strings; import org.elasticsearch.entitlement.bridge.EntitlementChecker; -import org.elasticsearch.entitlement.instrumentation.CheckerMethod; -import org.elasticsearch.entitlement.instrumentation.InstrumentationService; +import org.elasticsearch.entitlement.instrumentation.CheckMethod; import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -23,16 +21,21 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.net.MalformedURLException; +import java.net.URI; import java.net.URL; import java.net.URLStreamHandlerFactory; -import java.util.Arrays; import java.util.List; import java.util.Map; import static org.elasticsearch.entitlement.instrumentation.impl.ASMUtils.bytecode2text; -import static org.elasticsearch.entitlement.instrumentation.impl.InstrumenterImpl.getClassFileInfo; +import static org.elasticsearch.entitlement.instrumentation.impl.TestMethodUtils.callStaticMethod; +import static org.elasticsearch.entitlement.instrumentation.impl.TestMethodUtils.getCheckMethod; +import static org.elasticsearch.entitlement.instrumentation.impl.TestMethodUtils.methodKeyForConstructor; +import static org.elasticsearch.entitlement.instrumentation.impl.TestMethodUtils.methodKeyForTarget; +import static org.hamcrest.Matchers.arrayContaining; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.startsWith; import static org.objectweb.asm.Opcodes.INVOKESTATIC; /** @@ -42,7 +45,6 @@ */ @ESTestCase.WithoutSecurityManager public class InstrumenterTests extends ESTestCase { - final InstrumentationService instrumentationService = new InstrumentationServiceImpl(); static volatile TestEntitlementChecker testChecker; @@ -59,12 +61,7 @@ public void initialize() { * Contains all the virtual methods from {@link ClassToInstrument}, * allowing this test to call them on the dynamically loaded instrumented class. */ - public interface Testable { - // This method is here to demonstrate Instrumenter does not get confused by overloads - void someMethod(int arg); - - void someMethod(int arg, String anotherArg); - } + public interface Testable {} /** * This is a placeholder for real class library methods. @@ -78,41 +75,24 @@ public static class ClassToInstrument implements Testable { public ClassToInstrument() {} - public ClassToInstrument(int arg) {} + // URLClassLoader ctor + public ClassToInstrument(URL[] urls) {} public static void systemExit(int status) { assertEquals(123, status); } - - public static void anotherSystemExit(int status) { - assertEquals(123, status); - } - - public void someMethod(int arg) {} - - public void someMethod(int arg, String anotherArg) {} - - public static void someStaticMethod(int arg) {} - - public static void someStaticMethod(int arg, String anotherArg) {} } - static final class TestException extends RuntimeException {} + private static final String SAMPLE_NAME = "TEST"; - /** - * Interface to test specific, "synthetic" cases (e.g. overloaded methods, overloaded constructors, etc.) that - * may be not present/may be difficult to find or not clear in the production EntitlementChecker interface - */ - public interface MockEntitlementChecker extends EntitlementChecker { - void checkSomeStaticMethod(Class clazz, int arg); - - void checkSomeStaticMethod(Class clazz, int arg, String anotherArg); - - void checkSomeInstanceMethod(Class clazz, Testable that, int arg, String anotherArg); + private static final URL SAMPLE_URL = createSampleUrl(); - void checkCtor(Class clazz); - - void checkCtor(Class clazz, int arg); + private static URL createSampleUrl() { + try { + return URI.create("file:/test/example").toURL(); + } catch (MalformedURLException e) { + return null; + } } /** @@ -122,7 +102,7 @@ public interface MockEntitlementChecker extends EntitlementChecker { * just to demonstrate that the injected bytecodes succeed in calling these methods. * It also asserts that the arguments are correct. */ - public static class TestEntitlementChecker implements MockEntitlementChecker { + public static class TestEntitlementChecker implements EntitlementChecker { /** * This allows us to test that the instrumentation is correct in both cases: * if the check throws, and if it doesn't. @@ -130,104 +110,84 @@ public static class TestEntitlementChecker implements MockEntitlementChecker { volatile boolean isActive; int checkSystemExitCallCount = 0; - int checkSomeStaticMethodIntCallCount = 0; - int checkSomeStaticMethodIntStringCallCount = 0; - int checkSomeInstanceMethodCallCount = 0; - - int checkCtorCallCount = 0; - int checkCtorIntCallCount = 0; + int checkURLClassLoaderCallCount = 0; @Override public void check$java_lang_System$exit(Class callerClass, int status) { checkSystemExitCallCount++; - assertSame(InstrumenterTests.class, callerClass); + assertSame(TestMethodUtils.class, callerClass); assertEquals(123, status); throwIfActive(); } @Override - public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls) {} - - @Override - public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent) {} - - @Override - public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent, URLStreamHandlerFactory factory) {} - - @Override - public void check$java_net_URLClassLoader$(Class callerClass, String name, URL[] urls, ClassLoader parent) {} - - @Override - public void check$java_net_URLClassLoader$( - Class callerClass, - String name, - URL[] urls, - ClassLoader parent, - URLStreamHandlerFactory factory - ) {} - - private void throwIfActive() { - if (isActive) { - throw new TestException(); - } - } - - @Override - public void checkSomeStaticMethod(Class callerClass, int arg) { - checkSomeStaticMethodIntCallCount++; + public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls) { + checkURLClassLoaderCallCount++; assertSame(InstrumenterTests.class, callerClass); - assertEquals(123, arg); + assertThat(urls, arrayContaining(SAMPLE_URL)); throwIfActive(); } @Override - public void checkSomeStaticMethod(Class callerClass, int arg, String anotherArg) { - checkSomeStaticMethodIntStringCallCount++; + public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent) { + checkURLClassLoaderCallCount++; assertSame(InstrumenterTests.class, callerClass); - assertEquals(123, arg); - assertEquals("abc", anotherArg); + assertThat(urls, arrayContaining(SAMPLE_URL)); + assertThat(parent, equalTo(ClassLoader.getSystemClassLoader())); throwIfActive(); } @Override - public void checkSomeInstanceMethod(Class callerClass, Testable that, int arg, String anotherArg) { - checkSomeInstanceMethodCallCount++; + public void check$java_net_URLClassLoader$(Class callerClass, URL[] urls, ClassLoader parent, URLStreamHandlerFactory factory) { + checkURLClassLoaderCallCount++; assertSame(InstrumenterTests.class, callerClass); - assertThat( - that.getClass().getName(), - startsWith("org.elasticsearch.entitlement.instrumentation.impl.InstrumenterTests$ClassToInstrument") - ); - assertEquals(123, arg); - assertEquals("def", anotherArg); + assertThat(urls, arrayContaining(SAMPLE_URL)); + assertThat(parent, equalTo(ClassLoader.getSystemClassLoader())); throwIfActive(); } @Override - public void checkCtor(Class callerClass) { - checkCtorCallCount++; + public void check$java_net_URLClassLoader$(Class callerClass, String name, URL[] urls, ClassLoader parent) { + checkURLClassLoaderCallCount++; assertSame(InstrumenterTests.class, callerClass); + assertThat(name, equalTo(SAMPLE_NAME)); + assertThat(urls, arrayContaining(SAMPLE_URL)); + assertThat(parent, equalTo(ClassLoader.getSystemClassLoader())); throwIfActive(); } @Override - public void checkCtor(Class callerClass, int arg) { - checkCtorIntCallCount++; + public void check$java_net_URLClassLoader$( + Class callerClass, + String name, + URL[] urls, + ClassLoader parent, + URLStreamHandlerFactory factory + ) { + checkURLClassLoaderCallCount++; assertSame(InstrumenterTests.class, callerClass); - assertEquals(123, arg); + assertThat(name, equalTo(SAMPLE_NAME)); + assertThat(urls, arrayContaining(SAMPLE_URL)); + assertThat(parent, equalTo(ClassLoader.getSystemClassLoader())); throwIfActive(); } + + private void throwIfActive() { + if (isActive) { + throw new TestException(); + } + } } - public void testClassIsInstrumented() throws Exception { + public void testSystemExitIsInstrumented() throws Exception { var classToInstrument = ClassToInstrument.class; - CheckerMethod checkerMethod = getCheckerMethod(EntitlementChecker.class, "check$java_lang_System$exit", Class.class, int.class); - Map methods = Map.of( - instrumentationService.methodKeyForTarget(classToInstrument.getMethod("systemExit", int.class)), - checkerMethod + Map checkMethods = Map.of( + methodKeyForTarget(classToInstrument.getMethod("systemExit", int.class)), + getCheckMethod(EntitlementChecker.class, "check$java_lang_System$exit", Class.class, int.class) ); - var instrumenter = createInstrumenter(methods); + var instrumenter = createInstrumenter(checkMethods); byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); @@ -251,86 +211,15 @@ public void testClassIsInstrumented() throws Exception { assertThrows(TestException.class, () -> callStaticMethod(newClass, "systemExit", 123)); } - public void testClassIsNotInstrumentedTwice() throws Exception { - var classToInstrument = ClassToInstrument.class; - - CheckerMethod checkerMethod = getCheckerMethod(EntitlementChecker.class, "check$java_lang_System$exit", Class.class, int.class); - Map methods = Map.of( - instrumentationService.methodKeyForTarget(classToInstrument.getMethod("systemExit", int.class)), - checkerMethod - ); - - var instrumenter = createInstrumenter(methods); - - InstrumenterImpl.ClassFileInfo initial = getClassFileInfo(classToInstrument); - var internalClassName = Type.getInternalName(classToInstrument); - - byte[] instrumentedBytecode = instrumenter.instrumentClass(internalClassName, initial.bytecodes()); - byte[] instrumentedTwiceBytecode = instrumenter.instrumentClass(internalClassName, instrumentedBytecode); - - logger.trace(() -> Strings.format("Bytecode after 1st instrumentation:\n%s", bytecode2text(instrumentedBytecode))); - logger.trace(() -> Strings.format("Bytecode after 2nd instrumentation:\n%s", bytecode2text(instrumentedTwiceBytecode))); - - Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( - classToInstrument.getName() + "_NEW_NEW", - instrumentedTwiceBytecode - ); - - getTestEntitlementChecker().isActive = true; - getTestEntitlementChecker().checkSystemExitCallCount = 0; - - assertThrows(TestException.class, () -> callStaticMethod(newClass, "systemExit", 123)); - assertEquals(1, getTestEntitlementChecker().checkSystemExitCallCount); - } - - public void testClassAllMethodsAreInstrumentedFirstPass() throws Exception { + public void testURLClassLoaderIsInstrumented() throws Exception { var classToInstrument = ClassToInstrument.class; - CheckerMethod checkerMethod = getCheckerMethod(EntitlementChecker.class, "check$java_lang_System$exit", Class.class, int.class); - Map methods = Map.of( - instrumentationService.methodKeyForTarget(classToInstrument.getMethod("systemExit", int.class)), - checkerMethod, - instrumentationService.methodKeyForTarget(classToInstrument.getMethod("anotherSystemExit", int.class)), - checkerMethod + Map checkMethods = Map.of( + methodKeyForConstructor(classToInstrument, List.of(Type.getInternalName(URL[].class))), + getCheckMethod(EntitlementChecker.class, "check$java_net_URLClassLoader$", Class.class, URL[].class) ); - var instrumenter = createInstrumenter(methods); - - InstrumenterImpl.ClassFileInfo initial = getClassFileInfo(classToInstrument); - var internalClassName = Type.getInternalName(classToInstrument); - - byte[] instrumentedBytecode = instrumenter.instrumentClass(internalClassName, initial.bytecodes()); - byte[] instrumentedTwiceBytecode = instrumenter.instrumentClass(internalClassName, instrumentedBytecode); - - logger.trace(() -> Strings.format("Bytecode after 1st instrumentation:\n%s", bytecode2text(instrumentedBytecode))); - logger.trace(() -> Strings.format("Bytecode after 2nd instrumentation:\n%s", bytecode2text(instrumentedTwiceBytecode))); - - Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( - classToInstrument.getName() + "_NEW_NEW", - instrumentedTwiceBytecode - ); - - getTestEntitlementChecker().isActive = true; - getTestEntitlementChecker().checkSystemExitCallCount = 0; - - assertThrows(TestException.class, () -> callStaticMethod(newClass, "systemExit", 123)); - assertEquals(1, getTestEntitlementChecker().checkSystemExitCallCount); - - assertThrows(TestException.class, () -> callStaticMethod(newClass, "anotherSystemExit", 123)); - assertEquals(2, getTestEntitlementChecker().checkSystemExitCallCount); - } - - public void testInstrumenterWorksWithOverloads() throws Exception { - var classToInstrument = ClassToInstrument.class; - - Map methods = Map.of( - instrumentationService.methodKeyForTarget(classToInstrument.getMethod("someStaticMethod", int.class)), - getCheckerMethod(MockEntitlementChecker.class, "checkSomeStaticMethod", Class.class, int.class), - instrumentationService.methodKeyForTarget(classToInstrument.getMethod("someStaticMethod", int.class, String.class)), - getCheckerMethod(MockEntitlementChecker.class, "checkSomeStaticMethod", Class.class, int.class, String.class) - ); - - var instrumenter = createInstrumenter(methods); + var instrumenter = createInstrumenter(checkMethods); byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); @@ -343,80 +232,19 @@ public void testInstrumenterWorksWithOverloads() throws Exception { newBytecode ); - getTestEntitlementChecker().isActive = true; - - // After checking is activated, everything should throw - assertThrows(TestException.class, () -> callStaticMethod(newClass, "someStaticMethod", 123)); - assertThrows(TestException.class, () -> callStaticMethod(newClass, "someStaticMethod", 123, "abc")); - - assertEquals(1, getTestEntitlementChecker().checkSomeStaticMethodIntCallCount); - assertEquals(1, getTestEntitlementChecker().checkSomeStaticMethodIntStringCallCount); - } - - public void testInstrumenterWorksWithInstanceMethodsAndOverloads() throws Exception { - var classToInstrument = ClassToInstrument.class; - - Map methods = Map.of( - instrumentationService.methodKeyForTarget(classToInstrument.getMethod("someMethod", int.class, String.class)), - getCheckerMethod(MockEntitlementChecker.class, "checkSomeInstanceMethod", Class.class, Testable.class, int.class, String.class) - ); - - var instrumenter = createInstrumenter(methods); - - byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); - - if (logger.isTraceEnabled()) { - logger.trace("Bytecode after instrumentation:\n{}", bytecode2text(newBytecode)); - } + getTestEntitlementChecker().isActive = false; - Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( - classToInstrument.getName() + "_NEW", - newBytecode - ); + // Before checking is active, nothing should throw + newClass.getConstructor(URL[].class).newInstance((Object) new URL[] { SAMPLE_URL }); getTestEntitlementChecker().isActive = true; - Testable testTargetClass = (Testable) (newClass.getConstructor().newInstance()); - - // This overload is not instrumented, so it will not throw - testTargetClass.someMethod(123); - assertThrows(TestException.class, () -> testTargetClass.someMethod(123, "def")); - - assertEquals(1, getTestEntitlementChecker().checkSomeInstanceMethodCallCount); - } - - public void testInstrumenterWorksWithConstructors() throws Exception { - var classToInstrument = ClassToInstrument.class; - - Map methods = Map.of( - new MethodKey(classToInstrument.getName().replace('.', '/'), "", List.of()), - getCheckerMethod(MockEntitlementChecker.class, "checkCtor", Class.class), - new MethodKey(classToInstrument.getName().replace('.', '/'), "", List.of("I")), - getCheckerMethod(MockEntitlementChecker.class, "checkCtor", Class.class, int.class) - ); - - var instrumenter = createInstrumenter(methods); - - byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); - - if (logger.isTraceEnabled()) { - logger.trace("Bytecode after instrumentation:\n{}", bytecode2text(newBytecode)); - } - - Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( - classToInstrument.getName() + "_NEW", - newBytecode + // After checking is activated, everything should throw + var exception = assertThrows( + InvocationTargetException.class, + () -> newClass.getConstructor(URL[].class).newInstance((Object) new URL[] { SAMPLE_URL }) ); - - getTestEntitlementChecker().isActive = true; - - var ex = assertThrows(InvocationTargetException.class, () -> newClass.getConstructor().newInstance()); - assertThat(ex.getCause(), instanceOf(TestException.class)); - var ex2 = assertThrows(InvocationTargetException.class, () -> newClass.getConstructor(int.class).newInstance(123)); - assertThat(ex2.getCause(), instanceOf(TestException.class)); - - assertEquals(1, getTestEntitlementChecker().checkCtorCallCount); - assertEquals(1, getTestEntitlementChecker().checkCtorIntCallCount); + assertThat(exception.getCause(), instanceOf(TestException.class)); } /** This test doesn't replace classToInstrument in-place but instead loads a separate @@ -425,9 +253,10 @@ public void testInstrumenterWorksWithConstructors() throws Exception { * MethodKey and instrumentationMethod with slightly different signatures (using the common interface * Testable) which is not what would happen when it's run by the agent. */ - private InstrumenterImpl createInstrumenter(Map methods) throws NoSuchMethodException { + private InstrumenterImpl createInstrumenter(Map checkMethods) throws NoSuchMethodException { Method getter = InstrumenterTests.class.getMethod("getTestEntitlementChecker"); - return new InstrumenterImpl("_NEW", methods) { + + return new InstrumenterImpl(null, null, "_NEW", checkMethods) { /** * We're not testing the bridge library here. * Just call our own getter instead. @@ -445,58 +274,5 @@ protected void pushEntitlementChecker(MethodVisitor mv) { }; } - private static CheckerMethod getCheckerMethod(Class clazz, String methodName, Class... parameterTypes) - throws NoSuchMethodException { - var method = clazz.getMethod(methodName, parameterTypes); - return new CheckerMethod( - Type.getInternalName(clazz), - method.getName(), - Arrays.stream(Type.getArgumentTypes(method)).map(Type::getDescriptor).toList() - ); - } - - /** - * Calling a static method of a dynamically loaded class is significantly more cumbersome - * than calling a virtual method. - */ - private static void callStaticMethod(Class c, String methodName, int arg) throws NoSuchMethodException, IllegalAccessException { - try { - c.getMethod(methodName, int.class).invoke(null, arg); - } catch (InvocationTargetException e) { - Throwable cause = e.getCause(); - if (cause instanceof TestException n) { - // Sometimes we're expecting this one! - throw n; - } else { - throw new AssertionError(cause); - } - } - } - - private static void callStaticMethod(Class c, String methodName, int arg1, String arg2) throws NoSuchMethodException, - IllegalAccessException { - try { - c.getMethod(methodName, int.class, String.class).invoke(null, arg1, arg2); - } catch (InvocationTargetException e) { - Throwable cause = e.getCause(); - if (cause instanceof TestException n) { - // Sometimes we're expecting this one! - throw n; - } else { - throw new AssertionError(cause); - } - } - } - - static class TestLoader extends ClassLoader { - TestLoader(ClassLoader parent) { - super(parent); - } - - public Class defineClassFromBytes(String name, byte[] bytes) { - return defineClass(name, bytes, 0, bytes.length); - } - } - private static final Logger logger = LogManager.getLogger(InstrumenterTests.class); } diff --git a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/SyntheticInstrumenterTests.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/SyntheticInstrumenterTests.java new file mode 100644 index 0000000000000..8e0409971ba61 --- /dev/null +++ b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/SyntheticInstrumenterTests.java @@ -0,0 +1,383 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.instrumentation.impl; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.entitlement.instrumentation.CheckMethod; +import org.elasticsearch.entitlement.instrumentation.MethodKey; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.test.ESTestCase; +import org.objectweb.asm.Type; + +import java.lang.reflect.InvocationTargetException; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.entitlement.instrumentation.impl.ASMUtils.bytecode2text; +import static org.elasticsearch.entitlement.instrumentation.impl.InstrumenterImpl.getClassFileInfo; +import static org.elasticsearch.entitlement.instrumentation.impl.TestMethodUtils.callStaticMethod; +import static org.elasticsearch.entitlement.instrumentation.impl.TestMethodUtils.getCheckMethod; +import static org.elasticsearch.entitlement.instrumentation.impl.TestMethodUtils.methodKeyForTarget; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.startsWith; + +/** + * This tests {@link InstrumenterImpl} with some ad-hoc instrumented method and checker methods, to allow us to check + * some ad-hoc test cases (e.g. overloaded methods, overloaded targets, multiple instrumentation, etc.) + */ +@ESTestCase.WithoutSecurityManager +public class SyntheticInstrumenterTests extends ESTestCase { + private static final Logger logger = LogManager.getLogger(SyntheticInstrumenterTests.class); + + /** + * Contains all the virtual methods from {@link TestClassToInstrument}, + * allowing this test to call them on the dynamically loaded instrumented class. + */ + public interface Testable { + // This method is here to demonstrate Instrumenter does not get confused by overloads + void someMethod(int arg); + + void someMethod(int arg, String anotherArg); + } + + /** + * This is a placeholder for real class library methods. + * Without the java agent, we can't instrument the real methods, so we instrument this instead. + *

    + * Methods of this class must have the same signature and the same static/virtual condition as the corresponding real method. + * They should assert that the arguments came through correctly. + * They must not throw {@link TestException}. + */ + public static class TestClassToInstrument implements Testable { + + public TestClassToInstrument() {} + + public TestClassToInstrument(int arg) {} + + public void someMethod(int arg) {} + + public void someMethod(int arg, String anotherArg) {} + + public static void someStaticMethod(int arg) {} + + public static void someStaticMethod(int arg, String anotherArg) {} + + public static void anotherStaticMethod(int arg) {} + } + + /** + * Interface to test specific, "synthetic" cases (e.g. overloaded methods, overloaded constructors, etc.) that + * may be not present/may be difficult to find or not clear in the production EntitlementChecker interface + */ + public interface MockEntitlementChecker { + void checkSomeStaticMethod(Class clazz, int arg); + + void checkSomeStaticMethod(Class clazz, int arg, String anotherArg); + + void checkSomeInstanceMethod(Class clazz, Testable that, int arg, String anotherArg); + + void checkCtor(Class clazz); + + void checkCtor(Class clazz, int arg); + } + + public static class TestEntitlementCheckerHolder { + static TestEntitlementChecker checkerInstance = new TestEntitlementChecker(); + + public static MockEntitlementChecker instance() { + return checkerInstance; + } + } + + public static class TestEntitlementChecker implements MockEntitlementChecker { + /** + * This allows us to test that the instrumentation is correct in both cases: + * if the check throws, and if it doesn't. + */ + volatile boolean isActive; + + int checkSomeStaticMethodIntCallCount = 0; + int checkSomeStaticMethodIntStringCallCount = 0; + int checkSomeInstanceMethodCallCount = 0; + + int checkCtorCallCount = 0; + int checkCtorIntCallCount = 0; + + private void throwIfActive() { + if (isActive) { + throw new TestException(); + } + } + + @Override + public void checkSomeStaticMethod(Class callerClass, int arg) { + checkSomeStaticMethodIntCallCount++; + assertSame(TestMethodUtils.class, callerClass); + assertEquals(123, arg); + throwIfActive(); + } + + @Override + public void checkSomeStaticMethod(Class callerClass, int arg, String anotherArg) { + checkSomeStaticMethodIntStringCallCount++; + assertSame(TestMethodUtils.class, callerClass); + assertEquals(123, arg); + assertEquals("abc", anotherArg); + throwIfActive(); + } + + @Override + public void checkSomeInstanceMethod(Class callerClass, Testable that, int arg, String anotherArg) { + checkSomeInstanceMethodCallCount++; + assertSame(SyntheticInstrumenterTests.class, callerClass); + assertThat( + that.getClass().getName(), + startsWith("org.elasticsearch.entitlement.instrumentation.impl.SyntheticInstrumenterTests$TestClassToInstrument") + ); + assertEquals(123, arg); + assertEquals("def", anotherArg); + throwIfActive(); + } + + @Override + public void checkCtor(Class callerClass) { + checkCtorCallCount++; + assertSame(SyntheticInstrumenterTests.class, callerClass); + throwIfActive(); + } + + @Override + public void checkCtor(Class callerClass, int arg) { + checkCtorIntCallCount++; + assertSame(SyntheticInstrumenterTests.class, callerClass); + assertEquals(123, arg); + throwIfActive(); + } + } + + public void testClassIsInstrumented() throws Exception { + var classToInstrument = TestClassToInstrument.class; + + CheckMethod checkMethod = getCheckMethod(MockEntitlementChecker.class, "checkSomeStaticMethod", Class.class, int.class); + Map checkMethods = Map.of( + methodKeyForTarget(classToInstrument.getMethod("someStaticMethod", int.class)), + checkMethod + ); + + var instrumenter = createInstrumenter(checkMethods); + + byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); + + if (logger.isTraceEnabled()) { + logger.trace("Bytecode after instrumentation:\n{}", bytecode2text(newBytecode)); + } + + Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( + classToInstrument.getName() + "_NEW", + newBytecode + ); + + TestEntitlementCheckerHolder.checkerInstance.isActive = false; + + // Before checking is active, nothing should throw + callStaticMethod(newClass, "someStaticMethod", 123); + + TestEntitlementCheckerHolder.checkerInstance.isActive = true; + + // After checking is activated, everything should throw + assertThrows(TestException.class, () -> callStaticMethod(newClass, "someStaticMethod", 123)); + } + + public void testClassIsNotInstrumentedTwice() throws Exception { + var classToInstrument = TestClassToInstrument.class; + + CheckMethod checkMethod = getCheckMethod(MockEntitlementChecker.class, "checkSomeStaticMethod", Class.class, int.class); + Map checkMethods = Map.of( + methodKeyForTarget(classToInstrument.getMethod("someStaticMethod", int.class)), + checkMethod + ); + + var instrumenter = createInstrumenter(checkMethods); + + InstrumenterImpl.ClassFileInfo initial = getClassFileInfo(classToInstrument); + var internalClassName = Type.getInternalName(classToInstrument); + + byte[] instrumentedBytecode = instrumenter.instrumentClass(internalClassName, initial.bytecodes()); + byte[] instrumentedTwiceBytecode = instrumenter.instrumentClass(internalClassName, instrumentedBytecode); + + logger.trace(() -> Strings.format("Bytecode after 1st instrumentation:\n%s", bytecode2text(instrumentedBytecode))); + logger.trace(() -> Strings.format("Bytecode after 2nd instrumentation:\n%s", bytecode2text(instrumentedTwiceBytecode))); + + Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( + classToInstrument.getName() + "_NEW_NEW", + instrumentedTwiceBytecode + ); + + TestEntitlementCheckerHolder.checkerInstance.isActive = true; + TestEntitlementCheckerHolder.checkerInstance.checkSomeStaticMethodIntCallCount = 0; + + assertThrows(TestException.class, () -> callStaticMethod(newClass, "someStaticMethod", 123)); + assertEquals(1, TestEntitlementCheckerHolder.checkerInstance.checkSomeStaticMethodIntCallCount); + } + + public void testClassAllMethodsAreInstrumentedFirstPass() throws Exception { + var classToInstrument = TestClassToInstrument.class; + + CheckMethod checkMethod = getCheckMethod(MockEntitlementChecker.class, "checkSomeStaticMethod", Class.class, int.class); + Map checkMethods = Map.of( + methodKeyForTarget(classToInstrument.getMethod("someStaticMethod", int.class)), + checkMethod, + methodKeyForTarget(classToInstrument.getMethod("anotherStaticMethod", int.class)), + checkMethod + ); + + var instrumenter = createInstrumenter(checkMethods); + + InstrumenterImpl.ClassFileInfo initial = getClassFileInfo(classToInstrument); + var internalClassName = Type.getInternalName(classToInstrument); + + byte[] instrumentedBytecode = instrumenter.instrumentClass(internalClassName, initial.bytecodes()); + byte[] instrumentedTwiceBytecode = instrumenter.instrumentClass(internalClassName, instrumentedBytecode); + + logger.trace(() -> Strings.format("Bytecode after 1st instrumentation:\n%s", bytecode2text(instrumentedBytecode))); + logger.trace(() -> Strings.format("Bytecode after 2nd instrumentation:\n%s", bytecode2text(instrumentedTwiceBytecode))); + + Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( + classToInstrument.getName() + "_NEW_NEW", + instrumentedTwiceBytecode + ); + + TestEntitlementCheckerHolder.checkerInstance.isActive = true; + TestEntitlementCheckerHolder.checkerInstance.checkSomeStaticMethodIntCallCount = 0; + + assertThrows(TestException.class, () -> callStaticMethod(newClass, "someStaticMethod", 123)); + assertEquals(1, TestEntitlementCheckerHolder.checkerInstance.checkSomeStaticMethodIntCallCount); + + assertThrows(TestException.class, () -> callStaticMethod(newClass, "anotherStaticMethod", 123)); + assertEquals(2, TestEntitlementCheckerHolder.checkerInstance.checkSomeStaticMethodIntCallCount); + } + + public void testInstrumenterWorksWithOverloads() throws Exception { + var classToInstrument = TestClassToInstrument.class; + + Map checkMethods = Map.of( + methodKeyForTarget(classToInstrument.getMethod("someStaticMethod", int.class)), + getCheckMethod(MockEntitlementChecker.class, "checkSomeStaticMethod", Class.class, int.class), + methodKeyForTarget(classToInstrument.getMethod("someStaticMethod", int.class, String.class)), + getCheckMethod(MockEntitlementChecker.class, "checkSomeStaticMethod", Class.class, int.class, String.class) + ); + + var instrumenter = createInstrumenter(checkMethods); + + byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); + + if (logger.isTraceEnabled()) { + logger.trace("Bytecode after instrumentation:\n{}", bytecode2text(newBytecode)); + } + + Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( + classToInstrument.getName() + "_NEW", + newBytecode + ); + + TestEntitlementCheckerHolder.checkerInstance.isActive = true; + TestEntitlementCheckerHolder.checkerInstance.checkSomeStaticMethodIntCallCount = 0; + TestEntitlementCheckerHolder.checkerInstance.checkSomeStaticMethodIntStringCallCount = 0; + + // After checking is activated, everything should throw + assertThrows(TestException.class, () -> callStaticMethod(newClass, "someStaticMethod", 123)); + assertThrows(TestException.class, () -> callStaticMethod(newClass, "someStaticMethod", 123, "abc")); + + assertEquals(1, TestEntitlementCheckerHolder.checkerInstance.checkSomeStaticMethodIntCallCount); + assertEquals(1, TestEntitlementCheckerHolder.checkerInstance.checkSomeStaticMethodIntStringCallCount); + } + + public void testInstrumenterWorksWithInstanceMethodsAndOverloads() throws Exception { + var classToInstrument = TestClassToInstrument.class; + + Map checkMethods = Map.of( + methodKeyForTarget(classToInstrument.getMethod("someMethod", int.class, String.class)), + getCheckMethod(MockEntitlementChecker.class, "checkSomeInstanceMethod", Class.class, Testable.class, int.class, String.class) + ); + + var instrumenter = createInstrumenter(checkMethods); + + byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); + + if (logger.isTraceEnabled()) { + logger.trace("Bytecode after instrumentation:\n{}", bytecode2text(newBytecode)); + } + + Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( + classToInstrument.getName() + "_NEW", + newBytecode + ); + + TestEntitlementCheckerHolder.checkerInstance.isActive = true; + TestEntitlementCheckerHolder.checkerInstance.checkSomeInstanceMethodCallCount = 0; + + Testable testTargetClass = (Testable) (newClass.getConstructor().newInstance()); + + // This overload is not instrumented, so it will not throw + testTargetClass.someMethod(123); + assertThrows(TestException.class, () -> testTargetClass.someMethod(123, "def")); + + assertEquals(1, TestEntitlementCheckerHolder.checkerInstance.checkSomeInstanceMethodCallCount); + } + + public void testInstrumenterWorksWithConstructors() throws Exception { + var classToInstrument = TestClassToInstrument.class; + + Map checkMethods = Map.of( + new MethodKey(classToInstrument.getName().replace('.', '/'), "", List.of()), + getCheckMethod(MockEntitlementChecker.class, "checkCtor", Class.class), + new MethodKey(classToInstrument.getName().replace('.', '/'), "", List.of("I")), + getCheckMethod(MockEntitlementChecker.class, "checkCtor", Class.class, int.class) + ); + + var instrumenter = createInstrumenter(checkMethods); + + byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); + + if (logger.isTraceEnabled()) { + logger.trace("Bytecode after instrumentation:\n{}", bytecode2text(newBytecode)); + } + + Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( + classToInstrument.getName() + "_NEW", + newBytecode + ); + + TestEntitlementCheckerHolder.checkerInstance.isActive = true; + + var ex = assertThrows(InvocationTargetException.class, () -> newClass.getConstructor().newInstance()); + assertThat(ex.getCause(), instanceOf(TestException.class)); + var ex2 = assertThrows(InvocationTargetException.class, () -> newClass.getConstructor(int.class).newInstance(123)); + assertThat(ex2.getCause(), instanceOf(TestException.class)); + + assertEquals(1, TestEntitlementCheckerHolder.checkerInstance.checkCtorCallCount); + assertEquals(1, TestEntitlementCheckerHolder.checkerInstance.checkCtorIntCallCount); + } + + /** This test doesn't replace classToInstrument in-place but instead loads a separate + * class with the same class name plus a "_NEW" suffix (classToInstrument.class.getName() + "_NEW") + * that contains the instrumentation. Because of this, we need to configure the Transformer to use a + * MethodKey and instrumentationMethod with slightly different signatures (using the common interface + * Testable) which is not what would happen when it's run by the agent. + */ + private InstrumenterImpl createInstrumenter(Map checkMethods) { + String checkerClass = Type.getInternalName(SyntheticInstrumenterTests.MockEntitlementChecker.class); + String handleClass = Type.getInternalName(SyntheticInstrumenterTests.TestEntitlementCheckerHolder.class); + String getCheckerClassMethodDescriptor = Type.getMethodDescriptor(Type.getObjectType(checkerClass)); + + return new InstrumenterImpl(handleClass, getCheckerClassMethodDescriptor, "_NEW", checkMethods); + } +} diff --git a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/TestException.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/TestException.java new file mode 100644 index 0000000000000..5e308e5bd4a98 --- /dev/null +++ b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/TestException.java @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.instrumentation.impl; + +final class TestException extends RuntimeException {} diff --git a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/TestLoader.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/TestLoader.java new file mode 100644 index 0000000000000..9eb8e9328ecba --- /dev/null +++ b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/TestLoader.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.instrumentation.impl; + +class TestLoader extends ClassLoader { + TestLoader(ClassLoader parent) { + super(parent); + } + + public Class defineClassFromBytes(String name, byte[] bytes) { + return defineClass(name, bytes, 0, bytes.length); + } +} diff --git a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/TestMethodUtils.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/TestMethodUtils.java new file mode 100644 index 0000000000000..de7822fea926e --- /dev/null +++ b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/TestMethodUtils.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.instrumentation.impl; + +import org.elasticsearch.entitlement.instrumentation.CheckMethod; +import org.elasticsearch.entitlement.instrumentation.MethodKey; +import org.objectweb.asm.Type; + +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Stream; + +class TestMethodUtils { + + /** + * @return a {@link MethodKey} suitable for looking up the given {@code targetMethod} in the entitlements trampoline + */ + static MethodKey methodKeyForTarget(Method targetMethod) { + Type actualType = Type.getMethodType(Type.getMethodDescriptor(targetMethod)); + return new MethodKey( + Type.getInternalName(targetMethod.getDeclaringClass()), + targetMethod.getName(), + Stream.of(actualType.getArgumentTypes()).map(Type::getInternalName).toList() + ); + } + + static MethodKey methodKeyForConstructor(Class classToInstrument, List params) { + return new MethodKey(classToInstrument.getName().replace('.', '/'), "", params); + } + + static CheckMethod getCheckMethod(Class clazz, String methodName, Class... parameterTypes) throws NoSuchMethodException { + var method = clazz.getMethod(methodName, parameterTypes); + return new CheckMethod( + Type.getInternalName(clazz), + method.getName(), + Arrays.stream(Type.getArgumentTypes(method)).map(Type::getDescriptor).toList() + ); + } + + /** + * Calling a static method of a dynamically loaded class is significantly more cumbersome + * than calling a virtual method. + */ + static void callStaticMethod(Class c, String methodName, int arg) throws NoSuchMethodException, IllegalAccessException { + try { + c.getMethod(methodName, int.class).invoke(null, arg); + } catch (InvocationTargetException e) { + Throwable cause = e.getCause(); + if (cause instanceof TestException n) { + // Sometimes we're expecting this one! + throw n; + } else { + throw new AssertionError(cause); + } + } + } + + static void callStaticMethod(Class c, String methodName, int arg1, String arg2) throws NoSuchMethodException, + IllegalAccessException { + try { + c.getMethod(methodName, int.class, String.class).invoke(null, arg1, arg2); + } catch (InvocationTargetException e) { + Throwable cause = e.getCause(); + if (cause instanceof TestException n) { + // Sometimes we're expecting this one! + throw n; + } else { + throw new AssertionError(cause); + } + } + } +} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index 1f87e067e04f1..0ffab5f93969f 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -13,7 +13,7 @@ import org.elasticsearch.core.internal.provider.ProviderLocator; import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; import org.elasticsearch.entitlement.bridge.EntitlementChecker; -import org.elasticsearch.entitlement.instrumentation.CheckerMethod; +import org.elasticsearch.entitlement.instrumentation.CheckMethod; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.elasticsearch.entitlement.instrumentation.Transformer; @@ -63,13 +63,13 @@ public static EntitlementChecker checker() { public static void initialize(Instrumentation inst) throws Exception { manager = initChecker(); - Map methodMap = INSTRUMENTER_FACTORY.lookupMethodsToInstrument( + Map checkMethods = INSTRUMENTER_FACTORY.lookupMethodsToInstrument( "org.elasticsearch.entitlement.bridge.EntitlementChecker" ); - var classesToTransform = methodMap.keySet().stream().map(MethodKey::className).collect(Collectors.toSet()); + var classesToTransform = checkMethods.keySet().stream().map(MethodKey::className).collect(Collectors.toSet()); - inst.addTransformer(new Transformer(INSTRUMENTER_FACTORY.newInstrumenter("", methodMap), classesToTransform), true); + inst.addTransformer(new Transformer(INSTRUMENTER_FACTORY.newInstrumenter(checkMethods), classesToTransform), true); // TODO: should we limit this array somehow? var classesToRetransform = classesToTransform.stream().map(EntitlementInitialization::internalNameToClass).toArray(Class[]::new); inst.retransformClasses(classesToRetransform); diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/CheckerMethod.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/CheckMethod.java similarity index 82% rename from libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/CheckerMethod.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/CheckMethod.java index c20a75a61a608..384d455c7a34b 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/CheckerMethod.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/CheckMethod.java @@ -12,7 +12,7 @@ import java.util.List; /** - * A structure to use as a representation of the checker method the instrumentation will inject. + * A structure to use as a representation of the checkXxx method the instrumentation will inject. * * @param className the "internal name" of the class: includes the package info, but with periods replaced by slashes * @param methodName the checker method name @@ -20,4 +20,4 @@ * type descriptors) * for methodName parameters. */ -public record CheckerMethod(String className, String methodName, List parameterDescriptors) {} +public record CheckMethod(String className, String methodName, List parameterDescriptors) {} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java index 12316bfb043c5..d0331d756d2b2 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java @@ -10,19 +10,13 @@ package org.elasticsearch.entitlement.instrumentation; import java.io.IOException; -import java.lang.reflect.Method; import java.util.Map; /** * The SPI service entry point for instrumentation. */ public interface InstrumentationService { - Instrumenter newInstrumenter(String classNameSuffix, Map instrumentationMethods); + Instrumenter newInstrumenter(Map checkMethods); - /** - * @return a {@link MethodKey} suitable for looking up the given {@code targetMethod} in the entitlements trampoline - */ - MethodKey methodKeyForTarget(Method targetMethod); - - Map lookupMethodsToInstrument(String entitlementCheckerClassName) throws ClassNotFoundException, IOException; + Map lookupMethodsToInstrument(String entitlementCheckerClassName) throws ClassNotFoundException, IOException; } From deb838c027ecd83bc34fd487566571c61bfcd8be Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sun, 1 Dec 2024 01:25:39 +1100 Subject: [PATCH 331/386] Mute org.elasticsearch.xpack.esql.action.CrossClustersCancellationIT testCancel #117568 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index b82e95ea26890..d5e2dbd84cb4a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -234,6 +234,9 @@ tests: - class: org.elasticsearch.search.ccs.CrossClusterIT method: testCancel issue: https://github.com/elastic/elasticsearch/issues/108061 +- class: org.elasticsearch.xpack.esql.action.CrossClustersCancellationIT + method: testCancel + issue: https://github.com/elastic/elasticsearch/issues/117568 # Examples: # From 31cb0f658a8b3239bb38dd190e1efeb79062b2f9 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Sat, 30 Nov 2024 23:32:18 +0100 Subject: [PATCH 332/386] [Build] Replace usage of deprecated develocity system prop (#117793) see https://buildkite.com/elastic/elasticsearch-intake/builds/13680#019374ed-096e-4965-8651-1b3fd26dd9c2/79-392 --- .buildkite/pipelines/intake.template.yml | 16 ++++++++-------- .buildkite/pipelines/intake.yml | 16 ++++++++-------- .../pipelines/lucene-snapshot/run-tests.yml | 16 ++++++++-------- 3 files changed, 24 insertions(+), 24 deletions(-) diff --git a/.buildkite/pipelines/intake.template.yml b/.buildkite/pipelines/intake.template.yml index 57412bbe908bc..9d7cf3c7e0083 100644 --- a/.buildkite/pipelines/intake.template.yml +++ b/.buildkite/pipelines/intake.template.yml @@ -1,6 +1,6 @@ steps: - label: sanity-check - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files precommit + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints precommit timeout_in_minutes: 300 agents: provider: gcp @@ -9,7 +9,7 @@ steps: buildDirectory: /dev/shm/bk - wait - label: part1 - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart1 + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart1 timeout_in_minutes: 300 agents: provider: gcp @@ -17,7 +17,7 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk - label: part2 - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart2 + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart2 timeout_in_minutes: 300 agents: provider: gcp @@ -25,7 +25,7 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk - label: part3 - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart3 + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart3 timeout_in_minutes: 300 agents: provider: gcp @@ -33,7 +33,7 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk - label: part4 - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart4 + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart4 timeout_in_minutes: 300 agents: provider: gcp @@ -41,7 +41,7 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk - label: part5 - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart5 + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart5 timeout_in_minutes: 300 agents: provider: gcp @@ -51,7 +51,7 @@ steps: - group: bwc-snapshots steps: - label: "{{matrix.BWC_VERSION}} / bwc-snapshots" - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files v$$BWC_VERSION#bwcTest + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints v$$BWC_VERSION#bwcTest timeout_in_minutes: 300 matrix: setup: @@ -64,7 +64,7 @@ steps: env: BWC_VERSION: "{{matrix.BWC_VERSION}}" - label: rest-compat - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkRestCompat + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkRestCompat timeout_in_minutes: 300 agents: provider: gcp diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 5be5990cfb203..6c8b8edfcbac1 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -1,7 +1,7 @@ # This file is auto-generated. See .buildkite/pipelines/intake.template.yml steps: - label: sanity-check - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files precommit + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints precommit timeout_in_minutes: 300 agents: provider: gcp @@ -10,7 +10,7 @@ steps: buildDirectory: /dev/shm/bk - wait - label: part1 - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart1 + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart1 timeout_in_minutes: 300 agents: provider: gcp @@ -18,7 +18,7 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk - label: part2 - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart2 + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart2 timeout_in_minutes: 300 agents: provider: gcp @@ -26,7 +26,7 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk - label: part3 - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart3 + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart3 timeout_in_minutes: 300 agents: provider: gcp @@ -34,7 +34,7 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk - label: part4 - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart4 + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart4 timeout_in_minutes: 300 agents: provider: gcp @@ -42,7 +42,7 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk - label: part5 - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart5 + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart5 timeout_in_minutes: 300 agents: provider: gcp @@ -52,7 +52,7 @@ steps: - group: bwc-snapshots steps: - label: "{{matrix.BWC_VERSION}} / bwc-snapshots" - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files v$$BWC_VERSION#bwcTest + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints v$$BWC_VERSION#bwcTest timeout_in_minutes: 300 matrix: setup: @@ -65,7 +65,7 @@ steps: env: BWC_VERSION: "{{matrix.BWC_VERSION}}" - label: rest-compat - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkRestCompat + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkRestCompat timeout_in_minutes: 300 agents: provider: gcp diff --git a/.buildkite/pipelines/lucene-snapshot/run-tests.yml b/.buildkite/pipelines/lucene-snapshot/run-tests.yml index f7293e051467c..ddc63419a2e2f 100644 --- a/.buildkite/pipelines/lucene-snapshot/run-tests.yml +++ b/.buildkite/pipelines/lucene-snapshot/run-tests.yml @@ -1,6 +1,6 @@ steps: - label: sanity-check - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files precommit + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints precommit timeout_in_minutes: 300 agents: provider: gcp @@ -9,7 +9,7 @@ steps: buildDirectory: /dev/shm/bk - wait: null - label: part1 - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart1 + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart1 timeout_in_minutes: 300 agents: provider: gcp @@ -17,7 +17,7 @@ steps: machineType: custom-32-98304 buildDirectory: /dev/shm/bk - label: part2 - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart2 + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart2 timeout_in_minutes: 300 agents: provider: gcp @@ -25,7 +25,7 @@ steps: machineType: custom-32-98304 buildDirectory: /dev/shm/bk - label: part3 - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart3 + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart3 timeout_in_minutes: 300 agents: provider: gcp @@ -33,7 +33,7 @@ steps: machineType: custom-32-98304 buildDirectory: /dev/shm/bk - label: part4 - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart4 + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart4 timeout_in_minutes: 300 agents: provider: gcp @@ -41,7 +41,7 @@ steps: machineType: custom-32-98304 buildDirectory: /dev/shm/bk - label: part5 - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart5 + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkPart5 timeout_in_minutes: 300 agents: provider: gcp @@ -51,7 +51,7 @@ steps: - group: bwc-snapshots steps: - label: "{{matrix.BWC_VERSION}} / bwc-snapshots" - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files v$$BWC_VERSION#bwcTest + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints v$$BWC_VERSION#bwcTest timeout_in_minutes: 300 matrix: setup: @@ -66,7 +66,7 @@ steps: env: BWC_VERSION: "{{matrix.BWC_VERSION}}" - label: rest-compat - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkRestCompat + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-file-fingerprints checkRestCompat timeout_in_minutes: 300 agents: provider: gcp From bda415b7fdf4a73091a198339e5f1660c1378029 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Sat, 30 Nov 2024 20:09:08 -0800 Subject: [PATCH 333/386] Fix CCS cancellation test (#117790) We should have checked that all drivers were canceled, not cancellable (which is always true), before unblocking the compute tasks. Closes #117568 --- muted-tests.yml | 3 -- .../action/CrossClustersCancellationIT.java | 29 ++++++++++--------- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index d5e2dbd84cb4a..b82e95ea26890 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -234,9 +234,6 @@ tests: - class: org.elasticsearch.search.ccs.CrossClusterIT method: testCancel issue: https://github.com/elastic/elasticsearch/issues/108061 -- class: org.elasticsearch.xpack.esql.action.CrossClustersCancellationIT - method: testCancel - issue: https://github.com/elastic/elasticsearch/issues/117568 # Examples: # diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java index c426e0f528eab..5ffc92636b272 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java @@ -179,19 +179,22 @@ public void testCancel() throws Exception { }); var cancelRequest = new CancelTasksRequest().setTargetTaskId(rootTasks.get(0).taskId()).setReason("proxy timeout"); client().execute(TransportCancelTasksAction.TYPE, cancelRequest); - assertBusy(() -> { - List drivers = client(REMOTE_CLUSTER).admin() - .cluster() - .prepareListTasks() - .setActions(DriverTaskRunner.ACTION_NAME) - .get() - .getTasks(); - assertThat(drivers.size(), greaterThanOrEqualTo(1)); - for (TaskInfo driver : drivers) { - assertTrue(driver.cancellable()); - } - }); - PauseFieldPlugin.allowEmitting.countDown(); + try { + assertBusy(() -> { + List drivers = client(REMOTE_CLUSTER).admin() + .cluster() + .prepareListTasks() + .setActions(DriverTaskRunner.ACTION_NAME) + .get() + .getTasks(); + assertThat(drivers.size(), greaterThanOrEqualTo(1)); + for (TaskInfo driver : drivers) { + assertTrue(driver.cancelled()); + } + }); + } finally { + PauseFieldPlugin.allowEmitting.countDown(); + } Exception error = expectThrows(Exception.class, requestFuture::actionGet); assertThat(error.getMessage(), containsString("proxy timeout")); } From 5025f6cd3d9ba7b008ff9bdca91c1a466b36a2e6 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Sun, 1 Dec 2024 10:10:56 +0100 Subject: [PATCH 334/386] Lazy compute description in ReplicationRequest.createTask (#117783) These can at times be quite long strings, no need to materialize unless requested. This is showing up as allocating needless heap of O(GB) in some benchmarks during indexing needlessly. --- .../action/support/replication/ReplicationRequest.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java index 530f22f4bed53..debc64914a171 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java @@ -210,7 +210,12 @@ public void writeThin(StreamOutput out) throws IOException { @Override public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { - return new ReplicationTask(id, type, action, getDescription(), parentTaskId, headers); + return new ReplicationTask(id, type, action, "", parentTaskId, headers) { + @Override + public String getDescription() { + return ReplicationRequest.this.getDescription(); + } + }; } @Override From 3e7159d9e97e2d1645e5d5bc56fb98c653186b9f Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Sun, 1 Dec 2024 21:33:27 +0100 Subject: [PATCH 335/386] [Build] Fix cacheability of discovery-azure-classic (#117806) Also update cache validation scripts --- .buildkite/scripts/gradle-build-cache-validation.sh | 7 +++---- plugins/discovery-azure-classic/build.gradle | 3 ++- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.buildkite/scripts/gradle-build-cache-validation.sh b/.buildkite/scripts/gradle-build-cache-validation.sh index 75dc9b264b8bc..3c5021e436e4a 100755 --- a/.buildkite/scripts/gradle-build-cache-validation.sh +++ b/.buildkite/scripts/gradle-build-cache-validation.sh @@ -2,18 +2,17 @@ set -euo pipefail -VALIDATION_SCRIPTS_VERSION=2.5.1 +VALIDATION_SCRIPTS_VERSION=2.7.1 GRADLE_ENTERPRISE_ACCESS_KEY=$(vault kv get -field=value secret/ci/elastic-elasticsearch/gradle-enterprise-api-key) export GRADLE_ENTERPRISE_ACCESS_KEY - -curl -s -L -O https://github.com/gradle/gradle-enterprise-build-validation-scripts/releases/download/v$VALIDATION_SCRIPTS_VERSION/gradle-enterprise-gradle-build-validation-$VALIDATION_SCRIPTS_VERSION.zip && unzip -q -o gradle-enterprise-gradle-build-validation-$VALIDATION_SCRIPTS_VERSION.zip +curl -s -L -O https://github.com/gradle/gradle-enterprise-build-validation-scripts/releases/download/v$VALIDATION_SCRIPTS_VERSION/develocity-gradle-build-validation-$VALIDATION_SCRIPTS_VERSION.zip && unzip -q -o develocity-gradle-build-validation-$VALIDATION_SCRIPTS_VERSION.zip # Create a temporary file tmpOutputFile=$(mktemp) trap "rm $tmpOutputFile" EXIT set +e -gradle-enterprise-gradle-build-validation/03-validate-local-build-caching-different-locations.sh -r https://github.com/elastic/elasticsearch.git -b $BUILDKITE_BRANCH --gradle-enterprise-server https://gradle-enterprise.elastic.co -t precommit --fail-if-not-fully-cacheable | tee $tmpOutputFile +develocity-gradle-build-validation/03-validate-local-build-caching-different-locations.sh -r https://github.com/elastic/elasticsearch.git -b $BUILDKITE_BRANCH --develocity-server https://gradle-enterprise.elastic.co -t precommit --fail-if-not-fully-cacheable | tee $tmpOutputFile # Capture the return value retval=$? set -e diff --git a/plugins/discovery-azure-classic/build.gradle b/plugins/discovery-azure-classic/build.gradle index 3ec2ec531ae92..9549236775bfe 100644 --- a/plugins/discovery-azure-classic/build.gradle +++ b/plugins/discovery-azure-classic/build.gradle @@ -65,9 +65,10 @@ TaskProvider createKey = tasks.register("createKey", LoggedExec) { outputs.file(keystore).withPropertyName('keystoreFile') executable = "${buildParams.runtimeJavaHome.get()}/bin/keytool" getStandardInput().set('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n') + String keystorePath = projectDir.toPath().relativize(keystore.toPath()).toString() args '-genkey', '-alias', 'test-node', - '-keystore', keystore, + '-keystore', keystorePath, '-keyalg', 'RSA', '-keysize', '2048', '-validity', '712', From 3cfb649661438f002816b1c9bbd17d78c14827a6 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 2 Dec 2024 17:29:57 +1100 Subject: [PATCH 336/386] Mute org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT test {p0=search.highlight/50_synthetic_source/text multi unified from vectors} #117815 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index b82e95ea26890..8d64e1557ca19 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -234,6 +234,9 @@ tests: - class: org.elasticsearch.search.ccs.CrossClusterIT method: testCancel issue: https://github.com/elastic/elasticsearch/issues/108061 +- class: org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT + method: test {p0=search.highlight/50_synthetic_source/text multi unified from vectors} + issue: https://github.com/elastic/elasticsearch/issues/117815 # Examples: # From 2b7adcd89dfb31411f68dda211f689d42b979af8 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 2 Dec 2024 09:58:25 +0200 Subject: [PATCH 337/386] Add debug logging for doc parsing exceptions (#117768) --- .../index/mapper/DocumentMapper.java | 23 ++++++++++++++++--- .../index/mapper/MapperService.java | 9 +++++++- .../index/mapper/DocumentMapperTests.java | 3 ++- .../index/mapper/DocumentParserTests.java | 3 ++- 4 files changed, 32 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 10484a1c26098..1c9321737ab5f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -9,7 +9,9 @@ package org.elasticsearch.index.mapper; +import org.apache.logging.log4j.Logger; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSortConfig; @@ -25,6 +27,7 @@ public class DocumentMapper { private final DocumentParser documentParser; private final MapperMetrics mapperMetrics; private final IndexVersion indexVersion; + private final Logger logger; static final NodeFeature INDEX_SORTING_ON_NESTED = new NodeFeature("mapper.index_sorting_on_nested"); @@ -44,7 +47,8 @@ public static DocumentMapper createEmpty(MapperService mapperService) { mapping, mapping.toCompressedXContent(), IndexVersion.current(), - mapperService.getMapperMetrics() + mapperService.getMapperMetrics(), + mapperService.index().getName() ); } @@ -53,7 +57,8 @@ public static DocumentMapper createEmpty(MapperService mapperService) { Mapping mapping, CompressedXContent source, IndexVersion version, - MapperMetrics mapperMetrics + MapperMetrics mapperMetrics, + String indexName ) { this.documentParser = documentParser; this.type = mapping.getRoot().fullPath(); @@ -61,11 +66,18 @@ public static DocumentMapper createEmpty(MapperService mapperService) { this.mappingSource = source; this.mapperMetrics = mapperMetrics; this.indexVersion = version; + this.logger = Loggers.getLogger(getClass(), indexName); assert mapping.toCompressedXContent().equals(source) || isSyntheticSourceMalformed(source, version) : "provided source [" + source + "] differs from mapping [" + mapping.toCompressedXContent() + "]"; } + private void maybeLogDebug(Exception ex) { + if (logger.isDebugEnabled()) { + logger.debug("Error while parsing document: " + ex.getMessage(), ex); + } + } + /** * Indexes built at v.8.7 were missing an explicit entry for synthetic_source. * This got restored in v.8.10 to avoid confusion. The change is only restricted to mapping printout, it has no @@ -110,7 +122,12 @@ public MappingLookup mappers() { } public ParsedDocument parse(SourceToParse source) throws DocumentParsingException { - return documentParser.parseDocument(source, mappingLookup); + try { + return documentParser.parseDocument(source, mappingLookup); + } catch (Exception e) { + maybeLogDebug(e); + throw e; + } } public void validate(IndexSettings settings, boolean checkLimits) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 7f952153c6453..1673b1719d8bf 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -585,7 +585,14 @@ private DocumentMapper doMerge(String type, MergeReason reason, Map Date: Mon, 2 Dec 2024 08:11:09 +0000 Subject: [PATCH 338/386] Revert "(+Doc) Link split-brain wiki (#108914)" This reverts commit 12aab083301958ddfbeec9ee09d333da8278fd2c. --- docs/reference/modules/discovery/voting.asciidoc | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/reference/modules/discovery/voting.asciidoc b/docs/reference/modules/discovery/voting.asciidoc index 9e483d5883017..04cae9d02ab66 100644 --- a/docs/reference/modules/discovery/voting.asciidoc +++ b/docs/reference/modules/discovery/voting.asciidoc @@ -63,8 +63,7 @@ departed nodes from the voting configuration manually. Use the of resilience. No matter how it is configured, Elasticsearch will not suffer from a -"{wikipedia}/Split-brain_(computing)[split-brain]" inconsistency. -The `cluster.auto_shrink_voting_configuration` +"split-brain" inconsistency. The `cluster.auto_shrink_voting_configuration` setting affects only its availability in the event of the failure of some of its nodes and the administrative tasks that must be performed as nodes join and leave the cluster. From 9dcd9751f481952f5f08332b15aed31179af324d Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 2 Dec 2024 09:01:48 +0000 Subject: [PATCH 339/386] Add IMDSv2 support to `repository-s3` (#117748) The version of the AWS Java SDK we use already magically switches to IMDSv2 if available, but today we cannot claim to support IMDSv2 in Elasticsearch since we have no tests demonstrating that the magic really works for us. In particular, this sort of thing often risks falling foul of some restrictions imposed by the security manager (if not now then maybe in some future release). This commit adds proper support for IMDSv2 by enhancing the test suite to add the missing coverage to avoid any risk of breaking this magical SDK behaviour in future. Closes #105135 Closes ES-9984 --- docs/changelog/117748.yaml | 6 ++ .../snapshot-restore/repository-s3.asciidoc | 42 ++++++----- .../s3/RepositoryS3EcsCredentialsRestIT.java | 2 + .../RepositoryS3ImdsV1CredentialsRestIT.java | 2 + .../RepositoryS3ImdsV2CredentialsRestIT.java | 75 +++++++++++++++++++ .../fixture/aws/imds/Ec2ImdsHttpFixture.java | 10 ++- .../fixture/aws/imds/Ec2ImdsHttpHandler.java | 35 ++++++++- .../java/fixture/aws/imds/Ec2ImdsVersion.java | 26 +++++++ .../aws/imds/Ec2ImdsHttpHandlerTests.java | 67 +++++++++++++++-- 9 files changed, 236 insertions(+), 29 deletions(-) create mode 100644 docs/changelog/117748.yaml create mode 100644 modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV2CredentialsRestIT.java create mode 100644 test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsVersion.java diff --git a/docs/changelog/117748.yaml b/docs/changelog/117748.yaml new file mode 100644 index 0000000000000..615adbae07ad7 --- /dev/null +++ b/docs/changelog/117748.yaml @@ -0,0 +1,6 @@ +pr: 117748 +summary: Add IMDSv2 support to `repository-s3` +area: Snapshot/Restore +type: enhancement +issues: + - 105135 diff --git a/docs/reference/snapshot-restore/repository-s3.asciidoc b/docs/reference/snapshot-restore/repository-s3.asciidoc index 1b08a802a444f..9b71fe9220385 100644 --- a/docs/reference/snapshot-restore/repository-s3.asciidoc +++ b/docs/reference/snapshot-restore/repository-s3.asciidoc @@ -38,7 +38,8 @@ PUT _snapshot/my_s3_repository The client that you use to connect to S3 has a number of settings available. The settings have the form `s3.client.CLIENT_NAME.SETTING_NAME`. By default, `s3` repositories use a client named `default`, but this can be modified using -the <> `client`. For example: +the <> `client`. For example, to +use a client named `my-alternate-client`, register the repository as follows: [source,console] ---- @@ -69,10 +70,19 @@ bin/elasticsearch-keystore add s3.client.default.secret_key bin/elasticsearch-keystore add s3.client.default.session_token ---- -If instead you want to use the instance role or container role to access S3 -then you should leave these settings unset. You can switch from using specific -credentials back to the default of using the instance role or container role by -removing these settings from the keystore as follows: +If you do not configure these settings then {es} will attempt to automatically +obtain credentials from the environment in which it is running: + +* Nodes running on an instance in AWS EC2 will attempt to use the EC2 Instance + Metadata Service (IMDS) to obtain instance role credentials. {es} supports + both IMDS version 1 and IMDS version 2. + +* Nodes running in a container in AWS ECS and AWS EKS will attempt to obtain + container role credentials similarly. + +You can switch from using specific credentials back to the default of using the +instance role or container role by removing these settings from the keystore as +follows: [source,sh] ---- @@ -82,20 +92,14 @@ bin/elasticsearch-keystore remove s3.client.default.secret_key bin/elasticsearch-keystore remove s3.client.default.session_token ---- -*All* client secure settings of this repository type are -{ref}/secure-settings.html#reloadable-secure-settings[reloadable]. -You can define these settings before the node is started, -or call the <> -after the settings are defined to apply them to a running node. - -After you reload the settings, the internal `s3` clients, used to transfer the snapshot -contents, will utilize the latest settings from the keystore. Any existing `s3` -repositories, as well as any newly created ones, will pick up the new values -stored in the keystore. - -NOTE: In-progress snapshot/restore tasks will not be preempted by a *reload* of -the client's secure settings. The task will complete using the client as it was -built when the operation started. +Define the relevant secure settings in each node's keystore before starting the +node. The secure settings described here are all +{ref}/secure-settings.html#reloadable-secure-settings[reloadable] so you may +update the keystore contents on each node while the node is running and then +call the <> to apply the updated settings to the nodes in the cluster. After this API +completes, {es} will use the updated setting values for all future snapshot +operations, but ongoing operations may continue to use older setting values. The following list contains the available client settings. Those that must be stored in the keystore are marked as "secure" and are *reloadable*; the other diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsCredentialsRestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsCredentialsRestIT.java index 267ba6e6b3a13..a79ae4de7cc66 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsCredentialsRestIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsCredentialsRestIT.java @@ -10,6 +10,7 @@ package org.elasticsearch.repositories.s3; import fixture.aws.imds.Ec2ImdsHttpFixture; +import fixture.aws.imds.Ec2ImdsVersion; import fixture.s3.DynamicS3Credentials; import fixture.s3.S3HttpFixture; @@ -36,6 +37,7 @@ public class RepositoryS3EcsCredentialsRestIT extends AbstractRepositoryS3RestTe private static final DynamicS3Credentials dynamicS3Credentials = new DynamicS3Credentials(); private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture( + Ec2ImdsVersion.V1, dynamicS3Credentials::addValidCredentials, Set.of("/ecs_credentials_endpoint") ); diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV1CredentialsRestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV1CredentialsRestIT.java index de9c9b6ae0695..ead91981b3fa8 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV1CredentialsRestIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV1CredentialsRestIT.java @@ -10,6 +10,7 @@ package org.elasticsearch.repositories.s3; import fixture.aws.imds.Ec2ImdsHttpFixture; +import fixture.aws.imds.Ec2ImdsVersion; import fixture.s3.DynamicS3Credentials; import fixture.s3.S3HttpFixture; @@ -36,6 +37,7 @@ public class RepositoryS3ImdsV1CredentialsRestIT extends AbstractRepositoryS3Res private static final DynamicS3Credentials dynamicS3Credentials = new DynamicS3Credentials(); private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture( + Ec2ImdsVersion.V1, dynamicS3Credentials::addValidCredentials, Set.of() ); diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV2CredentialsRestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV2CredentialsRestIT.java new file mode 100644 index 0000000000000..67adb096bd1ba --- /dev/null +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV2CredentialsRestIT.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.repositories.s3; + +import fixture.aws.imds.Ec2ImdsHttpFixture; +import fixture.aws.imds.Ec2ImdsVersion; +import fixture.s3.DynamicS3Credentials; +import fixture.s3.S3HttpFixture; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.util.Set; + +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) +@ThreadLeakScope(ThreadLeakScope.Scope.NONE) // https://github.com/elastic/elasticsearch/issues/102482 +public class RepositoryS3ImdsV2CredentialsRestIT extends AbstractRepositoryS3RestTestCase { + + private static final String PREFIX = getIdentifierPrefix("RepositoryS3ImdsV2CredentialsRestIT"); + private static final String BUCKET = PREFIX + "bucket"; + private static final String BASE_PATH = PREFIX + "base_path"; + private static final String CLIENT = "imdsv2_credentials_client"; + + private static final DynamicS3Credentials dynamicS3Credentials = new DynamicS3Credentials(); + + private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture( + Ec2ImdsVersion.V2, + dynamicS3Credentials::addValidCredentials, + Set.of() + ); + + private static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, dynamicS3Credentials::isAuthorized); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .module("repository-s3") + .setting("s3.client." + CLIENT + ".endpoint", s3Fixture::getAddress) + .systemProperty("com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", ec2ImdsHttpFixture::getAddress) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(ec2ImdsHttpFixture).around(s3Fixture).around(cluster); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected String getBucketName() { + return BUCKET; + } + + @Override + protected String getBasePath() { + return BASE_PATH; + } + + @Override + protected String getClientName() { + return CLIENT; + } +} diff --git a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpFixture.java b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpFixture.java index 13d36c6fc4812..c63c65a750d7c 100644 --- a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpFixture.java +++ b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpFixture.java @@ -24,16 +24,22 @@ public class Ec2ImdsHttpFixture extends ExternalResource { private HttpServer server; + private final Ec2ImdsVersion ec2ImdsVersion; private final BiConsumer newCredentialsConsumer; private final Set alternativeCredentialsEndpoints; - public Ec2ImdsHttpFixture(BiConsumer newCredentialsConsumer, Set alternativeCredentialsEndpoints) { + public Ec2ImdsHttpFixture( + Ec2ImdsVersion ec2ImdsVersion, + BiConsumer newCredentialsConsumer, + Set alternativeCredentialsEndpoints + ) { + this.ec2ImdsVersion = Objects.requireNonNull(ec2ImdsVersion); this.newCredentialsConsumer = Objects.requireNonNull(newCredentialsConsumer); this.alternativeCredentialsEndpoints = Objects.requireNonNull(alternativeCredentialsEndpoints); } protected HttpHandler createHandler() { - return new Ec2ImdsHttpHandler(newCredentialsConsumer, alternativeCredentialsEndpoints); + return new Ec2ImdsHttpHandler(ec2ImdsVersion, newCredentialsConsumer, alternativeCredentialsEndpoints); } public String getAddress() { diff --git a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java index bc87eff592bec..281465b96de05 100644 --- a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java +++ b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java @@ -38,10 +38,18 @@ public class Ec2ImdsHttpHandler implements HttpHandler { private static final String IMDS_SECURITY_CREDENTIALS_PATH = "/latest/meta-data/iam/security-credentials/"; + private final Ec2ImdsVersion ec2ImdsVersion; + private final Set validImdsTokens = ConcurrentCollections.newConcurrentSet(); + private final BiConsumer newCredentialsConsumer; private final Set validCredentialsEndpoints = ConcurrentCollections.newConcurrentSet(); - public Ec2ImdsHttpHandler(BiConsumer newCredentialsConsumer, Collection alternativeCredentialsEndpoints) { + public Ec2ImdsHttpHandler( + Ec2ImdsVersion ec2ImdsVersion, + BiConsumer newCredentialsConsumer, + Collection alternativeCredentialsEndpoints + ) { + this.ec2ImdsVersion = Objects.requireNonNull(ec2ImdsVersion); this.newCredentialsConsumer = Objects.requireNonNull(newCredentialsConsumer); this.validCredentialsEndpoints.addAll(alternativeCredentialsEndpoints); } @@ -55,11 +63,32 @@ public void handle(final HttpExchange exchange) throws IOException { final var requestMethod = exchange.getRequestMethod(); if ("PUT".equals(requestMethod) && "/latest/api/token".equals(path)) { - // Reject IMDSv2 probe - exchange.sendResponseHeaders(RestStatus.METHOD_NOT_ALLOWED.getStatus(), -1); + switch (ec2ImdsVersion) { + case V1 -> exchange.sendResponseHeaders(RestStatus.METHOD_NOT_ALLOWED.getStatus(), -1); + case V2 -> { + final var token = randomSecretKey(); + validImdsTokens.add(token); + final var responseBody = token.getBytes(StandardCharsets.UTF_8); + exchange.getResponseHeaders().add("Content-Type", "text/plain"); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), responseBody.length); + exchange.getResponseBody().write(responseBody); + } + } return; } + if (ec2ImdsVersion == Ec2ImdsVersion.V2) { + final var token = exchange.getRequestHeaders().getFirst("X-aws-ec2-metadata-token"); + if (token == null) { + exchange.sendResponseHeaders(RestStatus.UNAUTHORIZED.getStatus(), -1); + return; + } + if (validImdsTokens.contains(token) == false) { + exchange.sendResponseHeaders(RestStatus.FORBIDDEN.getStatus(), -1); + return; + } + } + if ("GET".equals(requestMethod)) { if (path.equals(IMDS_SECURITY_CREDENTIALS_PATH)) { final var profileName = randomIdentifier(); diff --git a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsVersion.java b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsVersion.java new file mode 100644 index 0000000000000..7ed028c374cc7 --- /dev/null +++ b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsVersion.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package fixture.aws.imds; + +/** + * Represents the IMDS protocol version simulated by the {@link Ec2ImdsHttpHandler}. + */ +public enum Ec2ImdsVersion { + /** + * Classic V1 behavior: plain {@code GET} requests, no tokens. + */ + V1, + + /** + * Newer V2 behavior: {@code GET} requests must include a {@code X-aws-ec2-metadata-token} header providing a token previously obtained + * by calling {@code PUT /latest/api/token}. + */ + V2 +} diff --git a/test/fixtures/ec2-imds-fixture/src/test/java/fixture/aws/imds/Ec2ImdsHttpHandlerTests.java b/test/fixtures/ec2-imds-fixture/src/test/java/fixture/aws/imds/Ec2ImdsHttpHandlerTests.java index 369b0ef449b2f..bb613395a0fba 100644 --- a/test/fixtures/ec2-imds-fixture/src/test/java/fixture/aws/imds/Ec2ImdsHttpHandlerTests.java +++ b/test/fixtures/ec2-imds-fixture/src/test/java/fixture/aws/imds/Ec2ImdsHttpHandlerTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Nullable; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentType; @@ -29,6 +30,7 @@ import java.net.InetSocketAddress; import java.net.URI; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Set; @@ -36,17 +38,19 @@ public class Ec2ImdsHttpHandlerTests extends ESTestCase { + private static final String SECURITY_CREDENTIALS_URI = "/latest/meta-data/iam/security-credentials/"; + public void testImdsV1() throws IOException { final Map generatedCredentials = new HashMap<>(); - final var handler = new Ec2ImdsHttpHandler(generatedCredentials::put, Set.of()); + final var handler = new Ec2ImdsHttpHandler(Ec2ImdsVersion.V1, generatedCredentials::put, Set.of()); - final var roleResponse = handleRequest(handler, "GET", "/latest/meta-data/iam/security-credentials/"); + final var roleResponse = handleRequest(handler, "GET", SECURITY_CREDENTIALS_URI); assertEquals(RestStatus.OK, roleResponse.status()); final var profileName = roleResponse.body().utf8ToString(); assertTrue(Strings.hasText(profileName)); - final var credentialsResponse = handleRequest(handler, "GET", "/latest/meta-data/iam/security-credentials/" + profileName); + final var credentialsResponse = handleRequest(handler, "GET", SECURITY_CREDENTIALS_URI + profileName); assertEquals(RestStatus.OK, credentialsResponse.status()); assertThat(generatedCredentials, aMapWithSize(1)); @@ -62,14 +66,67 @@ public void testImdsV1() throws IOException { public void testImdsV2Disabled() { assertEquals( RestStatus.METHOD_NOT_ALLOWED, - handleRequest(new Ec2ImdsHttpHandler((accessKey, sessionToken) -> fail(), Set.of()), "PUT", "/latest/api/token").status() + handleRequest( + new Ec2ImdsHttpHandler(Ec2ImdsVersion.V1, (accessKey, sessionToken) -> fail(), Set.of()), + "PUT", + "/latest/api/token" + ).status() ); } + public void testImdsV2() throws IOException { + final Map generatedCredentials = new HashMap<>(); + + final var handler = new Ec2ImdsHttpHandler(Ec2ImdsVersion.V2, generatedCredentials::put, Set.of()); + + final var tokenResponse = handleRequest(handler, "PUT", "/latest/api/token"); + assertEquals(RestStatus.OK, tokenResponse.status()); + final var token = tokenResponse.body().utf8ToString(); + + final var roleResponse = checkImdsV2GetRequest(handler, SECURITY_CREDENTIALS_URI, token); + assertEquals(RestStatus.OK, roleResponse.status()); + final var profileName = roleResponse.body().utf8ToString(); + assertTrue(Strings.hasText(profileName)); + + final var credentialsResponse = checkImdsV2GetRequest(handler, SECURITY_CREDENTIALS_URI + profileName, token); + assertEquals(RestStatus.OK, credentialsResponse.status()); + + assertThat(generatedCredentials, aMapWithSize(1)); + final var accessKey = generatedCredentials.keySet().iterator().next(); + final var sessionToken = generatedCredentials.values().iterator().next(); + + final var responseMap = XContentHelper.convertToMap(XContentType.JSON.xContent(), credentialsResponse.body().streamInput(), false); + assertEquals(Set.of("AccessKeyId", "Expiration", "RoleArn", "SecretAccessKey", "Token"), responseMap.keySet()); + assertEquals(accessKey, responseMap.get("AccessKeyId")); + assertEquals(sessionToken, responseMap.get("Token")); + } + private record TestHttpResponse(RestStatus status, BytesReference body) {} + private static TestHttpResponse checkImdsV2GetRequest(Ec2ImdsHttpHandler handler, String uri, String token) { + final var unauthorizedResponse = handleRequest(handler, "GET", uri, null); + assertEquals(RestStatus.UNAUTHORIZED, unauthorizedResponse.status()); + + final var forbiddenResponse = handleRequest(handler, "GET", uri, randomValueOtherThan(token, ESTestCase::randomSecretKey)); + assertEquals(RestStatus.FORBIDDEN, forbiddenResponse.status()); + + return handleRequest(handler, "GET", uri, token); + } + private static TestHttpResponse handleRequest(Ec2ImdsHttpHandler handler, String method, String uri) { - final var httpExchange = new TestHttpExchange(method, uri, BytesArray.EMPTY, TestHttpExchange.EMPTY_HEADERS); + return handleRequest(handler, method, uri, null); + } + + private static TestHttpResponse handleRequest(Ec2ImdsHttpHandler handler, String method, String uri, @Nullable String token) { + final Headers headers; + if (token == null) { + headers = TestHttpExchange.EMPTY_HEADERS; + } else { + headers = new Headers(); + headers.put("X-aws-ec2-metadata-token", List.of(token)); + } + + final var httpExchange = new TestHttpExchange(method, uri, BytesArray.EMPTY, headers); try { handler.handle(httpExchange); } catch (IOException e) { From d2a4c70ca1f85e408efdc572ed4dda847733b0be Mon Sep 17 00:00:00 2001 From: Dimitris Rempapis Date: Mon, 2 Dec 2024 11:16:38 +0200 Subject: [PATCH 340/386] Search Queries in parallel - part 3 (#117149) Update IT tests grouping assertResponses --- .../elasticsearch/aliases/IndexAliasesIT.java | 74 ++----- .../fetch/subphase/MatchedQueriesIT.java | 109 ++++------ .../highlight/HighlighterSearchIT.java | 193 +++++++----------- .../search/functionscore/QueryRescorerIT.java | 74 +++---- .../search/query/MultiMatchQueryIT.java | 109 +++------- 5 files changed, 175 insertions(+), 384 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java index b70da34c8fe3f..309bf69f00be0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java @@ -65,6 +65,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.emptyArray; @@ -262,27 +263,16 @@ public void testSearchingFilteringAliasesSingleIndex() throws Exception { .setRefreshPolicy(RefreshPolicy.IMMEDIATE) ).actionGet(); - logger.info("--> checking single filtering alias search"); - assertResponse( + assertResponses( + searchResponse -> assertHits(searchResponse.getHits(), "1"), prepareSearch("foos").setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertHits(searchResponse.getHits(), "1") - ); - - logger.info("--> checking single filtering alias wildcard search"); - assertResponse( - prepareSearch("fo*").setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertHits(searchResponse.getHits(), "1") + prepareSearch("fo*").setQuery(QueryBuilders.matchAllQuery()) ); - assertResponse( + assertResponses( + searchResponse -> assertHits(searchResponse.getHits(), "1", "2", "3"), prepareSearch("tests").setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertHits(searchResponse.getHits(), "1", "2", "3") - ); - - logger.info("--> checking single filtering alias search with sort"); - assertResponse( - prepareSearch("tests").setQuery(QueryBuilders.matchAllQuery()).addSort("_index", SortOrder.ASC), - searchResponse -> assertHits(searchResponse.getHits(), "1", "2", "3") + prepareSearch("tests").setQuery(QueryBuilders.matchAllQuery()).addSort("_index", SortOrder.ASC) ); logger.info("--> checking single filtering alias search with global facets"); @@ -323,28 +313,12 @@ public void testSearchingFilteringAliasesSingleIndex() throws Exception { searchResponse -> assertHits(searchResponse.getHits(), "1", "2") ); - logger.info("--> checking single non-filtering alias search"); - assertResponse( + assertResponses( + searchResponse -> assertHits(searchResponse.getHits(), "1", "2", "3", "4"), prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertHits(searchResponse.getHits(), "1", "2", "3", "4") - ); - - logger.info("--> checking non-filtering alias and filtering alias search"); - assertResponse( prepareSearch("alias1", "foos").setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertHits(searchResponse.getHits(), "1", "2", "3", "4") - ); - - logger.info("--> checking index and filtering alias search"); - assertResponse( prepareSearch("test", "foos").setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertHits(searchResponse.getHits(), "1", "2", "3", "4") - ); - - logger.info("--> checking index and alias wildcard search"); - assertResponse( - prepareSearch("te*", "fo*").setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertHits(searchResponse.getHits(), "1", "2", "3", "4") + prepareSearch("te*", "fo*").setQuery(QueryBuilders.matchAllQuery()) ); } @@ -506,11 +480,11 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { prepareSearch("filter23", "filter13").setQuery(QueryBuilders.matchAllQuery()), searchResponse -> assertHits(searchResponse.getHits(), "21", "31", "13", "33") ); - assertResponse( + assertResponses( + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(4L)), prepareSearch("filter23", "filter13").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(4L)) + prepareSearch("filter13", "filter1").setSize(0).setQuery(QueryBuilders.matchAllQuery()) ); - assertResponse( prepareSearch("filter23", "filter1").setQuery(QueryBuilders.matchAllQuery()), searchResponse -> assertHits(searchResponse.getHits(), "21", "31", "11", "12", "13") @@ -519,16 +493,10 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { prepareSearch("filter23", "filter1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(5L)) ); - assertResponse( prepareSearch("filter13", "filter1").setQuery(QueryBuilders.matchAllQuery()), searchResponse -> assertHits(searchResponse.getHits(), "11", "12", "13", "33") ); - assertResponse( - prepareSearch("filter13", "filter1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(4L)) - ); - assertResponse( prepareSearch("filter13", "filter1", "filter23").setQuery(QueryBuilders.matchAllQuery()), searchResponse -> assertHits(searchResponse.getHits(), "11", "12", "13", "21", "31", "33") @@ -537,7 +505,6 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { prepareSearch("filter13", "filter1", "filter23").setSize(0).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(6L)) ); - assertResponse( prepareSearch("filter23", "filter13", "test2").setQuery(QueryBuilders.matchAllQuery()), searchResponse -> assertHits(searchResponse.getHits(), "21", "22", "23", "31", "13", "33") @@ -546,7 +513,6 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { prepareSearch("filter23", "filter13", "test2").setSize(0).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(6L)) ); - assertResponse( prepareSearch("filter23", "filter13", "test1", "test2").setQuery(QueryBuilders.matchAllQuery()), searchResponse -> assertHits(searchResponse.getHits(), "11", "12", "13", "21", "22", "23", "31", "33") @@ -1325,17 +1291,13 @@ public void testIndexingAndQueryingHiddenAliases() throws Exception { searchResponse -> assertHits(searchResponse.getHits(), "2", "3") ); - // Ensure that all docs can be gotten through the alias - assertResponse( + assertResponses( + searchResponse -> assertHits(searchResponse.getHits(), "1", "2", "3"), + // Ensure that all docs can be gotten through the alias prepareSearch(alias).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertHits(searchResponse.getHits(), "1", "2", "3") - ); - - // And querying using a wildcard with indices options set to expand hidden - assertResponse( + // And querying using a wildcard with indices options set to expand hidden prepareSearch("alias*").setQuery(QueryBuilders.matchAllQuery()) - .setIndicesOptions(IndicesOptions.fromOptions(false, false, true, false, true, true, true, false, false)), - searchResponse -> assertHits(searchResponse.getHits(), "1", "2", "3") + .setIndicesOptions(IndicesOptions.fromOptions(false, false, true, false, true, true, true, false, false)) ); // And that querying the alias with a wildcard and no expand options fails diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java index c796522eda0e8..b0faeeb295e33 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java @@ -33,6 +33,7 @@ import static org.elasticsearch.index.query.QueryBuilders.wrapperQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasKey; @@ -105,54 +106,32 @@ public void testSimpleMatchedQueryFromTopLevelFilter() throws Exception { prepareIndex("test").setId("3").setSource("name", "test").get(); refresh(); - assertResponse( + assertResponses(response -> { + assertHitCount(response, 3L); + for (SearchHit hit : response.getHits()) { + if (hit.getId().equals("1")) { + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(2)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); + assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("title")); + assertThat(hit.getMatchedQueryScore("title"), greaterThan(0f)); + } else if (hit.getId().equals("2") || hit.getId().equals("3")) { + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); + assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); + } else { + fail("Unexpected document returned with id " + hit.getId()); + } + } + }, prepareSearch().setQuery(matchAllQuery()) .setPostFilter( boolQuery().should(termQuery("name", "test").queryName("name")).should(termQuery("title", "title1").queryName("title")) ), - response -> { - assertHitCount(response, 3L); - for (SearchHit hit : response.getHits()) { - if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(2)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); - assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("title")); - assertThat(hit.getMatchedQueryScore("title"), greaterThan(0f)); - } else if (hit.getId().equals("2") || hit.getId().equals("3")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); - assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); - } else { - fail("Unexpected document returned with id " + hit.getId()); - } - } - } - ); - - assertResponse( prepareSearch().setQuery(matchAllQuery()) .setPostFilter( boolQuery().should(termQuery("name", "test").queryName("name")).should(termQuery("title", "title1").queryName("title")) - ), - response -> { - assertHitCount(response, 3L); - for (SearchHit hit : response.getHits()) { - if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(2)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); - assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("title")); - assertThat(hit.getMatchedQueryScore("title"), greaterThan(0f)); - } else if (hit.getId().equals("2") || hit.getId().equals("3")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); - assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); - } else { - fail("Unexpected document returned with id " + hit.getId()); - } - } - } + ) ); } @@ -165,43 +144,25 @@ public void testSimpleMatchedQueryFromTopLevelFilterAndFilteredQuery() throws Ex prepareIndex("test").setId("3").setSource("name", "test", "title", "title3").get(); refresh(); - assertResponse( + assertResponses(response -> { + assertHitCount(response, 3L); + for (SearchHit hit : response.getHits()) { + if (hit.getId().equals("1") || hit.getId().equals("2") || hit.getId().equals("3")) { + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(2)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); + assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("title")); + assertThat(hit.getMatchedQueryScore("title"), greaterThan(0f)); + } else { + fail("Unexpected document returned with id " + hit.getId()); + } + } + }, prepareSearch().setQuery( boolQuery().must(matchAllQuery()).filter(termsQuery("title", "title1", "title2", "title3").queryName("title")) ).setPostFilter(termQuery("name", "test").queryName("name")), - response -> { - assertHitCount(response, 3L); - for (SearchHit hit : response.getHits()) { - if (hit.getId().equals("1") || hit.getId().equals("2") || hit.getId().equals("3")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(2)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); - assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("title")); - assertThat(hit.getMatchedQueryScore("title"), greaterThan(0f)); - } else { - fail("Unexpected document returned with id " + hit.getId()); - } - } - } - ); - - assertResponse( prepareSearch().setQuery(termsQuery("title", "title1", "title2", "title3").queryName("title")) - .setPostFilter(matchQuery("name", "test").queryName("name")), - response -> { - assertHitCount(response, 3L); - for (SearchHit hit : response.getHits()) { - if (hit.getId().equals("1") || hit.getId().equals("2") || hit.getId().equals("3")) { - assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(2)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); - assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); - assertThat(hit.getMatchedQueriesAndScores(), hasKey("title")); - assertThat(hit.getMatchedQueryScore("title"), greaterThan(0f)); - } else { - fail("Unexpected document returned with id " + hit.getId()); - } - } - } + .setPostFilter(matchQuery("name", "test").queryName("name")) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 0805d0f366b0f..36580ebda8aee 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -97,6 +97,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNotHighlighted; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; @@ -596,40 +597,24 @@ public void testSourceLookupHighlightingUsingPostingsHighlighter() throws Except } indexRandom(true, indexRequestBuilders); - assertResponse( + assertResponses(response -> { + for (int i = 0; i < indexRequestBuilders.length; i++) { + assertHighlight( + response, + i, + "title", + 0, + equalTo("This is a test on the highlighting bug present in elasticsearch. Hopefully it works.") + ); + assertHighlight(response, i, "title", 1, 2, equalTo("This is the second bug to perform highlighting on.")); + } + }, prepareSearch().setQuery(matchQuery("title", "bug")) // asking for the whole field to be highlighted .highlighter(new HighlightBuilder().field("title", -1, 0)), - response -> { - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight( - response, - i, - "title", - 0, - equalTo("This is a test on the highlighting bug present in elasticsearch. Hopefully it works.") - ); - assertHighlight(response, i, "title", 1, 2, equalTo("This is the second bug to perform highlighting on.")); - } - } - ); - - assertResponse( prepareSearch().setQuery(matchQuery("title", "bug")) // sentences will be generated out of each value - .highlighter(new HighlightBuilder().field("title")), - response -> { - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight( - response, - i, - "title", - 0, - equalTo("This is a test on the highlighting bug present in elasticsearch. Hopefully it works.") - ); - assertHighlight(response, i, "title", 1, 2, equalTo("This is the second bug to perform highlighting on.")); - } - } + .highlighter(new HighlightBuilder().field("title")) ); assertResponse( @@ -792,27 +777,31 @@ public void testPlainHighlighterOrder() throws Exception { refresh(); { - // fragments should be in order of appearance by default - SearchSourceBuilder source = searchSource().query(matchQuery("field1", "brown dog")) - .highlighter(highlight().highlighterType("plain").field("field1").preTags("").postTags("").fragmentSize(25)); - - assertResponse(prepareSearch("test").setSource(source), response -> { - - assertHighlight(response, 0, "field1", 0, 3, equalTo("The quick brown fox")); - assertHighlight(response, 0, "field1", 1, 3, equalTo(" jumps over the lazy brown dog")); - assertHighlight(response, 0, "field1", 2, 3, equalTo(" dog doesn't care")); - }); - // lets be explicit about the order - source = searchSource().query(matchQuery("field1", "brown dog")) - .highlighter( - highlight().highlighterType("plain").field("field1").order("none").preTags("").postTags("").fragmentSize(25) - ); - - assertResponse(prepareSearch("test").setSource(source), response -> { + assertResponses(response -> { assertHighlight(response, 0, "field1", 0, 3, equalTo("The quick brown fox")); assertHighlight(response, 0, "field1", 1, 3, equalTo(" jumps over the lazy brown dog")); assertHighlight(response, 0, "field1", 2, 3, equalTo(" dog doesn't care")); - }); + }, + // fragments should be in order of appearance by default + prepareSearch("test").setSource( + searchSource().query(matchQuery("field1", "brown dog")) + .highlighter( + highlight().highlighterType("plain").field("field1").preTags("").postTags("").fragmentSize(25) + ) + ), + // lets be explicit about the order + prepareSearch("test").setSource( + searchSource().query(matchQuery("field1", "brown dog")) + .highlighter( + highlight().highlighterType("plain") + .field("field1") + .order("none") + .preTags("") + .postTags("") + .fragmentSize(25) + ) + ) + ); } { // order by score @@ -1701,42 +1690,26 @@ public void testDisableFastVectorHighlighter() throws Exception { } ); - // Using plain highlighter instead of FVH - assertResponse( + assertResponses(response -> { + for (int i = 0; i < indexRequestBuilders.length; i++) { + assertHighlight( + response, + i, + "title", + 0, + 1, + equalTo("This is a test for the workaround for the fast vector highlighting SOLR-3724") + ); + } + }, + // Using plain highlighter instead of FVH prepareSearch().setQuery(matchPhraseQuery("title", "test for the workaround")) .highlighter(new HighlightBuilder().field("title", 50, 1, 10).highlighterType("plain")), - response -> { - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight( - response, - i, - "title", - 0, - 1, - equalTo("This is a test for the workaround for the fast vector highlighting SOLR-3724") - ); - } - } - ); - - // Using plain highlighter instead of FVH on the field level - assertResponse( + // Using plain highlighter instead of FVH on the field level prepareSearch().setQuery(matchPhraseQuery("title", "test for the workaround")) .highlighter( new HighlightBuilder().field(new HighlightBuilder.Field("title").highlighterType("plain")).highlighterType("plain") - ), - response -> { - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight( - response, - i, - "title", - 0, - 1, - equalTo("This is a test for the workaround for the fast vector highlighting SOLR-3724") - ); - } - } + ) ); } @@ -1826,44 +1799,29 @@ public void testPlainHighlightDifferentFragmenter() throws Exception { .get(); refresh(); - assertResponse( + assertResponses(response -> { + assertHighlight(response, 0, "tags", 0, equalTo("this is a really long tag i would like to highlight")); + assertHighlight( + response, + 0, + "tags", + 1, + 2, + equalTo("here is another one that is very long tag and has the tag token near the end") + ); + }, prepareSearch("test").setQuery(QueryBuilders.matchPhraseQuery("tags", "long tag")) .highlighter( new HighlightBuilder().field( new HighlightBuilder.Field("tags").highlighterType("plain").fragmentSize(-1).numOfFragments(2).fragmenter("simple") ) ), - response -> { - assertHighlight(response, 0, "tags", 0, equalTo("this is a really long tag i would like to highlight")); - assertHighlight( - response, - 0, - "tags", - 1, - 2, - equalTo("here is another one that is very long tag and has the tag token near the end") - ); - } - ); - - assertResponse( prepareSearch("test").setQuery(QueryBuilders.matchPhraseQuery("tags", "long tag")) .highlighter( new HighlightBuilder().field( new Field("tags").highlighterType("plain").fragmentSize(-1).numOfFragments(2).fragmenter("span") ) - ), - response -> { - assertHighlight(response, 0, "tags", 0, equalTo("this is a really long tag i would like to highlight")); - assertHighlight( - response, - 0, - "tags", - 1, - 2, - equalTo("here is another one that is very long tag and has the tag token near the end") - ); - } + ) ); assertFailures( @@ -3627,15 +3585,16 @@ public void testWithNestedQuery() throws Exception { assertThat(field.fragments()[1].string(), equalTo("cow")); } ); - assertResponse( + assertResponses(response -> { + assertHitCount(response, 1); + HighlightField field = response.getHits().getAt(0).getHighlightFields().get("foo.text"); + assertThat(field.fragments().length, equalTo(1)); + assertThat(field.fragments()[0].string(), equalTo("brown shoes")); + }, prepareSearch().setQuery(nestedQuery("foo", prefixQuery("foo.text", "bro"), ScoreMode.None)) .highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type))), - response -> { - assertHitCount(response, 1); - HighlightField field = response.getHits().getAt(0).getHighlightFields().get("foo.text"); - assertThat(field.fragments().length, equalTo(1)); - assertThat(field.fragments()[0].string(), equalTo("brown shoes")); - } + prepareSearch().setQuery(nestedQuery("foo", matchPhrasePrefixQuery("foo.text", "bro"), ScoreMode.None)) + .highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type))) ); assertResponse( prepareSearch().setQuery(nestedQuery("foo", matchPhraseQuery("foo.text", "brown shoes"), ScoreMode.None)) @@ -3647,16 +3606,6 @@ public void testWithNestedQuery() throws Exception { assertThat(field.fragments()[0].string(), equalTo("brown shoes")); } ); - assertResponse( - prepareSearch().setQuery(nestedQuery("foo", matchPhrasePrefixQuery("foo.text", "bro"), ScoreMode.None)) - .highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type))), - response -> { - assertHitCount(response, 1); - HighlightField field = response.getHits().getAt(0).getHighlightFields().get("foo.text"); - assertThat(field.fragments().length, equalTo(1)); - assertThat(field.fragments()[0].string(), equalTo("brown shoes")); - } - ); } // For unified and fvh highlighters we just check that the nested query is correctly extracted diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index 9fed4ead8c248..a7efb2fe0e68b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -69,6 +69,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThirdHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; @@ -149,33 +150,24 @@ public void testRescorePhrase() throws Exception { 5 ), response -> { - assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); + assertHitCount(response, 3); assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); - assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); - assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); + assertFirstHit(response, hasId("1")); + assertSecondHit(response, hasId("3")); + assertThirdHit(response, hasId("2")); } ); - assertResponse( + assertResponses(response -> { + assertHitCount(response, 3); + assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); + assertFirstHit(response, hasId("1")); + assertSecondHit(response, hasId("2")); + assertThirdHit(response, hasId("3")); + }, prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer(new QueryRescorerBuilder(matchPhraseQuery("field1", "the quick brown").slop(3)), 5), - response -> { - assertHitCount(response, 3); - assertFirstHit(response, hasId("1")); - assertSecondHit(response, hasId("2")); - assertThirdHit(response, hasId("3")); - } - ); - assertResponse( prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) - .setRescorer(new QueryRescorerBuilder(matchPhraseQuery("field1", "the quick brown")), 5), - response -> { - assertHitCount(response, 3); - assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); - assertFirstHit(response, hasId("1")); - assertSecondHit(response, hasId("2")); - assertThirdHit(response, hasId("3")); - } + .setRescorer(new QueryRescorerBuilder(matchPhraseQuery("field1", "the quick brown")), 5) ); } @@ -212,7 +204,15 @@ public void testMoreDocs() throws Exception { prepareIndex("test").setId("11").setSource("field1", "2st street boston massachusetts").get(); prepareIndex("test").setId("12").setSource("field1", "3st street boston massachusetts").get(); indicesAdmin().prepareRefresh("test").get(); - assertResponse( + + assertResponses(response -> { + assertThat(response.getHits().getHits().length, equalTo(5)); + assertHitCount(response, 9); + assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); + assertFirstHit(response, hasId("2")); + assertSecondHit(response, hasId("6")); + assertThirdHit(response, hasId("3")); + }, prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR)) .setFrom(0) .setSize(5) @@ -221,16 +221,6 @@ public void testMoreDocs() throws Exception { .setRescoreQueryWeight(2.0f), 20 ), - response -> { - assertThat(response.getHits().getHits().length, equalTo(5)); - assertHitCount(response, 9); - assertFirstHit(response, hasId("2")); - assertSecondHit(response, hasId("6")); - assertThirdHit(response, hasId("3")); - } - ); - - assertResponse( prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR)) .setFrom(0) .setSize(5) @@ -239,15 +229,7 @@ public void testMoreDocs() throws Exception { new QueryRescorerBuilder(matchPhraseQuery("field1", "lexington avenue massachusetts").slop(3)).setQueryWeight(0.6f) .setRescoreQueryWeight(2.0f), 20 - ), - response -> { - assertThat(response.getHits().getHits().length, equalTo(5)); - assertHitCount(response, 9); - assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); - assertFirstHit(response, hasId("2")); - assertSecondHit(response, hasId("6")); - assertThirdHit(response, hasId("3")); - } + ) ); // Make sure non-zero from works: assertResponse( @@ -465,7 +447,8 @@ public void testEquivalence() throws Exception { .setFrom(0) .setSize(resultSize), plain -> { - assertResponse( + assertResponses( + rescored -> assertEquivalent(query, plain, rescored), prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) .setPreference("test") // ensure we hit the same shards for tie-breaking .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) @@ -478,10 +461,6 @@ public void testEquivalence() throws Exception { .setRescoreQueryWeight(0.0f), rescoreWindow ), - rescored -> assertEquivalent(query, plain, rescored) - ); // check equivalence - - assertResponse( prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) .setPreference("test") // ensure we hit the same shards for tie-breaking .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) @@ -492,8 +471,7 @@ public void testEquivalence() throws Exception { .setQueryWeight(1.0f) .setRescoreQueryWeight(1.0f), rescoreWindow - ), - rescored -> assertEquivalent(query, plain, rescored) + ) ); // check equivalence } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 3f6f7af56eb08..69a9fd7fdd4c7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -302,27 +302,20 @@ public void testDefaults() throws ExecutionException, InterruptedException { ), response -> assertFirstHit(response, hasId("theother")) ); - assertResponse( + assertResponses(response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("theone")); + }, prepareSearch("test").setQuery( randomizeType( multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category").operator(Operator.AND).type(type) ) ), - response -> { - assertHitCount(response, 1L); - assertFirstHit(response, hasId("theone")); - } - ); - assertResponse( prepareSearch("test").setQuery( randomizeType( multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category").operator(Operator.AND).type(type) ) - ), - response -> { - assertHitCount(response, 1L); - assertFirstHit(response, hasId("theone")); - } + ) ); } @@ -630,7 +623,10 @@ public void testCrossFieldMode() throws ExecutionException, InterruptedException response -> assertFirstHit(response, hasId("theother")) ); - assertResponse( + assertResponses(response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("theone")); + }, prepareSearch("test").setQuery( randomizeType( multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category").type( @@ -638,12 +634,6 @@ public void testCrossFieldMode() throws ExecutionException, InterruptedException ).operator(Operator.AND) ) ), - response -> { - assertHitCount(response, 1L); - assertFirstHit(response, hasId("theone")); - } - ); - assertResponse( prepareSearch("test").setQuery( randomizeType( multiMatchQuery("captain america 15", "full_name", "first_name", "last_name", "category", "skill").type( @@ -651,12 +641,6 @@ public void testCrossFieldMode() throws ExecutionException, InterruptedException ).analyzer("category").lenient(true).operator(Operator.AND) ) ), - response -> { - assertHitCount(response, 1L); - assertFirstHit(response, hasId("theone")); - } - ); - assertResponse( prepareSearch("test").setQuery( randomizeType( multiMatchQuery("captain america 15", "full_name", "first_name", "last_name", "category", "skill", "int-field").type( @@ -664,25 +648,17 @@ public void testCrossFieldMode() throws ExecutionException, InterruptedException ).analyzer("category").lenient(true).operator(Operator.AND) ) ), - response -> { - assertHitCount(response, 1L); - assertFirstHit(response, hasId("theone")); - } - ); - assertResponse( prepareSearch("test").setQuery( randomizeType( multiMatchQuery("captain america 15", "skill", "full_name", "first_name", "last_name", "category", "int-field").type( MultiMatchQueryBuilder.Type.CROSS_FIELDS ).analyzer("category").lenient(true).operator(Operator.AND) ) - ), - response -> { - assertHitCount(response, 1L); - assertFirstHit(response, hasId("theone")); - } + ) ); - assertResponse( + + assertResponses( + response -> assertFirstHit(response, hasId("theone")), prepareSearch("test").setQuery( randomizeType( multiMatchQuery("captain america 15", "first_name", "last_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) @@ -690,71 +666,42 @@ public void testCrossFieldMode() throws ExecutionException, InterruptedException .analyzer("category") ) ), - response -> assertFirstHit(response, hasId("theone")) - ); - - assertResponse( prepareSearch("test").setQuery( randomizeType(multiMatchQuery("15", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).analyzer("category")) ), - response -> assertFirstHit(response, hasId("theone")) - ); - - assertResponse( prepareSearch("test").setQuery( randomizeType(multiMatchQuery("25 15", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).analyzer("category")) ), - response -> assertFirstHit(response, hasId("theone")) - ); - - assertResponse( prepareSearch("test").setQuery( randomizeType( multiMatchQuery("25 15", "int-field", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).analyzer("category") ) ), - response -> assertFirstHit(response, hasId("theone")) - ); - - assertResponse( prepareSearch("test").setQuery( randomizeType( multiMatchQuery("25 15", "first_name", "int-field", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) .analyzer("category") ) ), - response -> assertFirstHit(response, hasId("theone")) - ); - - assertResponse( prepareSearch("test").setQuery( randomizeType( multiMatchQuery("25 15", "int-field", "skill", "first_name").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) .analyzer("category") ) ), - response -> assertFirstHit(response, hasId("theone")) - ); - - assertResponse( prepareSearch("test").setQuery( randomizeType( multiMatchQuery("25 15", "int-field", "first_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) .analyzer("category") ) ), - response -> assertFirstHit(response, hasId("theone")) - ); - - assertResponse( prepareSearch("test").setQuery( randomizeType( multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category").type( MultiMatchQueryBuilder.Type.CROSS_FIELDS ).analyzer("category").operator(Operator.OR) ) - ), - response -> assertFirstHit(response, hasId("theone")) + ) ); // test group based on analyzer -- all fields are grouped into a cross field search @@ -771,6 +718,7 @@ public void testCrossFieldMode() throws ExecutionException, InterruptedException assertFirstHit(response, hasId("theone")); } ); + // counter example assertHitCount( 0L, @@ -840,33 +788,26 @@ public void testCrossFieldMode() throws ExecutionException, InterruptedException randomizeType(multiMatchQuery("15", "int-field", "first_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)) ) ); - assertResponse( + assertResponses(response -> { + /* + * Doesn't find the one because "alpha 15" isn't a number and we don't + * break on spaces. + */ + assertHitCount(response, 1L); + assertFirstHit(response, hasId("ultimate1")); + }, prepareSearch("test").setQuery( randomizeType( multiMatchQuery("alpha 15", "first_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).lenient(true) ) ), - response -> { - /* - * Doesn't find the one because "alpha 15" isn't a number and we don't - * break on spaces. - */ - assertHitCount(response, 1L); - assertFirstHit(response, hasId("ultimate1")); - } - ); - // Lenient wasn't always properly lenient with two numeric fields - assertResponse( + // Lenient wasn't always properly lenient with two numeric fields prepareSearch("test").setQuery( randomizeType( multiMatchQuery("alpha 15", "int-field", "first_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) .lenient(true) ) - ), - response -> { - assertHitCount(response, 1L); - assertFirstHit(response, hasId("ultimate1")); - } + ) ); // Check that cross fields works with date fields assertResponse( From 2a30fbc1e8284b8a23f285983be1a91f362c48a7 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 2 Dec 2024 11:58:16 +0100 Subject: [PATCH 341/386] Remove bucketOrd from InternalGeoGridBucket (#117615) This commit removes the need of having a bucketOrd in InternalGeoGridBucket that is only used to build the InternalAggregation from the aggregator. --- .../bucket/BucketsAggregator.java | 39 +++++++---- .../bucket/geogrid/BucketPriorityQueue.java | 17 +++-- .../bucket/geogrid/GeoGridAggregator.java | 66 ++++++++++++------- .../bucket/geogrid/InternalGeoGrid.java | 9 ++- .../bucket/geogrid/InternalGeoGridBucket.java | 2 - .../bucket/terms/BucketAndOrd.java | 21 ++++++ 6 files changed, 112 insertions(+), 42 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BucketAndOrd.java diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java index 665dd49e3381d..e86c7127ec2f4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java @@ -160,7 +160,8 @@ protected void prepareSubAggs(LongArray ordsToCollect) throws IOException {} * the provided ordinals. *

    * Most aggregations should probably use something like - * {@link #buildSubAggsForAllBuckets(ObjectArray, ToLongFunction, BiConsumer)} + * {@link #buildSubAggsForAllBuckets(ObjectArray, LongArray, BiConsumer)} + * or {@link #buildSubAggsForAllBuckets(ObjectArray, ToLongFunction, BiConsumer)} * or {@link #buildAggregationsForVariableBuckets(LongArray, LongKeyedBucketOrds, BucketBuilderForVariable, ResultBuilderForVariable)} * or {@link #buildAggregationsForFixedBucketCount(LongArray, int, BucketBuilderForFixedCount, Function)} * or {@link #buildAggregationsForSingleBucket(LongArray, SingleBucketResultBuilder)} @@ -193,10 +194,9 @@ public int size() { } /** - * Build the sub aggregation results for a list of buckets and set them on - * the buckets. This is usually used by aggregations that are selective - * in which bucket they build. They use some mechanism of selecting a list - * of buckets to build use this method to "finish" building the results. + * Similarly to {@link #buildSubAggsForAllBuckets(ObjectArray, LongArray, BiConsumer)} + * but it needs to build the bucket ordinals. This method usually requires for buckets + * to contain the bucket ordinal. * @param buckets the buckets to finish building * @param bucketToOrd how to convert a bucket into an ordinal * @param setAggs how to set the sub-aggregation results on a bucket @@ -218,12 +218,29 @@ protected final void buildSubAggsForAllBuckets( bucketOrdsToCollect.set(s++, bucketToOrd.applyAsLong(bucket)); } } - var results = buildSubAggsForBuckets(bucketOrdsToCollect); - s = 0; - for (long ord = 0; ord < buckets.size(); ord++) { - for (B value : buckets.get(ord)) { - setAggs.accept(value, results.apply(s++)); - } + buildSubAggsForAllBuckets(buckets, bucketOrdsToCollect, setAggs); + } + } + + /** + * Build the sub aggregation results for a list of buckets and set them on + * the buckets. This is usually used by aggregations that are selective + * in which bucket they build. They use some mechanism of selecting a list + * of buckets to build use this method to "finish" building the results. + * @param buckets the buckets to finish building + * @param bucketOrdsToCollect bucket ordinals + * @param setAggs how to set the sub-aggregation results on a bucket + */ + protected final void buildSubAggsForAllBuckets( + ObjectArray buckets, + LongArray bucketOrdsToCollect, + BiConsumer setAggs + ) throws IOException { + var results = buildSubAggsForBuckets(bucketOrdsToCollect); + int s = 0; + for (long ord = 0; ord < buckets.size(); ord++) { + for (B value : buckets.get(ord)) { + setAggs.accept(value, results.apply(s++)); } } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/BucketPriorityQueue.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/BucketPriorityQueue.java index cc677605c4528..85c79df42a714 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/BucketPriorityQueue.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/BucketPriorityQueue.java @@ -11,17 +11,24 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ObjectArrayPriorityQueue; -class BucketPriorityQueue extends ObjectArrayPriorityQueue { +import java.util.function.Function; - BucketPriorityQueue(int size, BigArrays bigArrays) { +class BucketPriorityQueue extends ObjectArrayPriorityQueue { + + private final Function bucketSupplier; + + BucketPriorityQueue(int size, BigArrays bigArrays, Function bucketSupplier) { super(size, bigArrays); + this.bucketSupplier = bucketSupplier; } @Override - protected boolean lessThan(InternalGeoGridBucket o1, InternalGeoGridBucket o2) { - int cmp = Long.compare(o2.getDocCount(), o1.getDocCount()); + protected boolean lessThan(A o1, A o2) { + final B b1 = bucketSupplier.apply(o1); + final B b2 = bucketSupplier.apply(o2); + int cmp = Long.compare(b2.getDocCount(), b1.getDocCount()); if (cmp == 0) { - cmp = o2.compareTo(o1); + cmp = b2.compareTo(b1); if (cmp == 0) { cmp = System.identityHashCode(o2) - System.identityHashCode(o1); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java index 1d3614af08768..b84dff6e73e0b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java @@ -12,6 +12,7 @@ import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.ScoreMode; +import org.elasticsearch.common.util.IntArray; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.core.Releasables; @@ -23,6 +24,7 @@ import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; +import org.elasticsearch.search.aggregations.bucket.terms.BucketAndOrd; import org.elasticsearch.search.aggregations.bucket.terms.LongKeyedBucketOrds; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSource; @@ -135,34 +137,52 @@ public void collect(int doc, long owningBucketOrd) throws IOException { @Override public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + try (ObjectArray topBucketsPerOrd = bigArrays().newObjectArray(owningBucketOrds.size())) { - for (long ordIdx = 0; ordIdx < topBucketsPerOrd.size(); ordIdx++) { - int size = (int) Math.min(bucketOrds.bucketsInOrd(owningBucketOrds.get(ordIdx)), shardSize); - - try (BucketPriorityQueue ordered = new BucketPriorityQueue<>(size, bigArrays())) { - InternalGeoGridBucket spare = null; - LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx)); - while (ordsEnum.next()) { - if (spare == null) { - checkRealMemoryCBForInternalBucket(); - spare = newEmptyBucket(); + try (IntArray bucketsSizePerOrd = bigArrays().newIntArray(owningBucketOrds.size())) { + long ordsToCollect = 0; + for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { + int size = (int) Math.min(bucketOrds.bucketsInOrd(owningBucketOrds.get(ordIdx)), shardSize); + ordsToCollect += size; + bucketsSizePerOrd.set(ordIdx, size); + } + try (LongArray ordsArray = bigArrays().newLongArray(ordsToCollect)) { + long ordsCollected = 0; + for (long ordIdx = 0; ordIdx < topBucketsPerOrd.size(); ordIdx++) { + try ( + BucketPriorityQueue, InternalGeoGridBucket> ordered = + new BucketPriorityQueue<>(bucketsSizePerOrd.get(ordIdx), bigArrays(), b -> b.bucket) + ) { + BucketAndOrd spare = null; + LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx)); + while (ordsEnum.next()) { + if (spare == null) { + checkRealMemoryCBForInternalBucket(); + spare = new BucketAndOrd<>(newEmptyBucket()); + } + + // need a special function to keep the source bucket + // up-to-date so it can get the appropriate key + spare.bucket.hashAsLong = ordsEnum.value(); + spare.bucket.docCount = bucketDocCount(ordsEnum.ord()); + spare.ord = ordsEnum.ord(); + spare = ordered.insertWithOverflow(spare); + } + final int orderedSize = (int) ordered.size(); + final InternalGeoGridBucket[] buckets = new InternalGeoGridBucket[orderedSize]; + for (int i = orderedSize - 1; i >= 0; --i) { + BucketAndOrd bucketBucketAndOrd = ordered.pop(); + buckets[i] = bucketBucketAndOrd.bucket; + ordsArray.set(ordsCollected + i, bucketBucketAndOrd.ord); + } + topBucketsPerOrd.set(ordIdx, buckets); + ordsCollected += orderedSize; } - - // need a special function to keep the source bucket - // up-to-date so it can get the appropriate key - spare.hashAsLong = ordsEnum.value(); - spare.docCount = bucketDocCount(ordsEnum.ord()); - spare.bucketOrd = ordsEnum.ord(); - spare = ordered.insertWithOverflow(spare); - } - - topBucketsPerOrd.set(ordIdx, new InternalGeoGridBucket[(int) ordered.size()]); - for (int i = (int) ordered.size() - 1; i >= 0; --i) { - topBucketsPerOrd.get(ordIdx)[i] = ordered.pop(); } + assert ordsCollected == ordsArray.size(); + buildSubAggsForAllBuckets(topBucketsPerOrd, ordsArray, (b, aggs) -> b.aggregations = aggs); } } - buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); return buildAggregations( Math.toIntExact(owningBucketOrds.size()), ordIdx -> buildAggregation(name, requiredSize, Arrays.asList(topBucketsPerOrd.get(ordIdx)), metadata()) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java index 6a32b41034503..343c92b353884 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java @@ -27,6 +27,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.function.Function; import static java.util.Collections.unmodifiableList; @@ -106,7 +107,13 @@ public InternalAggregation get() { final int size = Math.toIntExact( context.isFinalReduce() == false ? bucketsReducer.size() : Math.min(requiredSize, bucketsReducer.size()) ); - try (BucketPriorityQueue ordered = new BucketPriorityQueue<>(size, context.bigArrays())) { + try ( + BucketPriorityQueue ordered = new BucketPriorityQueue<>( + size, + context.bigArrays(), + Function.identity() + ) + ) { bucketsReducer.forEach(entry -> { InternalGeoGridBucket bucket = createBucket(entry.key, entry.value.getDocCount(), entry.value.getAggregations()); ordered.insertWithOverflow(bucket); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java index 60de4c3974c92..8884a412bcf41 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java @@ -28,8 +28,6 @@ public abstract class InternalGeoGridBucket extends InternalMultiBucketAggregati protected long docCount; protected InternalAggregations aggregations; - long bucketOrd; - public InternalGeoGridBucket(long hashAsLong, long docCount, InternalAggregations aggregations) { this.docCount = docCount; this.aggregations = aggregations; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BucketAndOrd.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BucketAndOrd.java new file mode 100644 index 0000000000000..7b853860b7959 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BucketAndOrd.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.search.aggregations.bucket.terms; + +/** Represents a bucket and its bucket ordinal */ +public final class BucketAndOrd { + + public final B bucket; // the bucket + public long ord; // mutable ordinal of the bucket + + public BucketAndOrd(B bucket) { + this.bucket = bucket; + } +} From 79ce6e38728a7710f01f18d9769cd6941c2312f6 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 2 Dec 2024 11:59:34 +0100 Subject: [PATCH 342/386] Improve performance of H3.h3ToGeoBoundary (#117812) There are two clear code paths depending if a h3 bin belongs to even resolutions (class II) or uneven resolutions (class III). especializing the code paths for each type leads to an improvement in performance. --- .../java/org/elasticsearch/h3/FaceIJK.java | 241 ++++++++++-------- .../main/java/org/elasticsearch/h3/H3.java | 6 +- 2 files changed, 142 insertions(+), 105 deletions(-) diff --git a/libs/h3/src/main/java/org/elasticsearch/h3/FaceIJK.java b/libs/h3/src/main/java/org/elasticsearch/h3/FaceIJK.java index 866fdfe8a7f8b..a5744ed5eb6bc 100644 --- a/libs/h3/src/main/java/org/elasticsearch/h3/FaceIJK.java +++ b/libs/h3/src/main/java/org/elasticsearch/h3/FaceIJK.java @@ -417,43 +417,64 @@ public LatLng faceIjkToGeo(int res) { * for this FaceIJK address at a specified resolution. * * @param res The H3 resolution of the cell. - * @param start The first topological vertex to return. - * @param length The number of topological vertexes to return. */ - public CellBoundary faceIjkPentToCellBoundary(int res, int start, int length) { + public CellBoundary faceIjkPentToCellBoundary(int res) { // adjust the center point to be in an aperture 33r substrate grid // these should be composed for speed this.coord.downAp3(); this.coord.downAp3r(); // if res is Class III we need to add a cw aperture 7 to get to // icosahedral Class II - int adjRes = res; - if (H3Index.isResolutionClassIII(res)) { - this.coord.downAp7r(); - adjRes += 1; - } + final int adjRes = adjustRes(this.coord, res); + // If we're returning the entire loop, we need one more iteration in case // of a distortion vertex on the last edge - final int additionalIteration = length == Constants.NUM_PENT_VERTS ? 1 : 0; - final boolean isResolutionClassIII = H3Index.isResolutionClassIII(res); - // convert each vertex to lat/lng - // adjust the face of each vertex as appropriate and introduce - // edge-crossing vertices as needed + if (H3Index.isResolutionClassIII(res)) { + return faceIjkPentToCellBoundaryClassIII(adjRes); + } else { + return faceIjkPentToCellBoundaryClassII(adjRes); + } + } + + private CellBoundary faceIjkPentToCellBoundaryClassII(int adjRes) { + final LatLng[] points = new LatLng[Constants.NUM_PENT_VERTS]; + final FaceIJK fijk = new FaceIJK(this.face, new CoordIJK(0, 0, 0)); + for (int vert = 0; vert < Constants.NUM_PENT_VERTS; vert++) { + // The center point is now in the same substrate grid as the origin + // cell vertices. Add the center point substate coordinates + // to each vertex to translate the vertices to that cell. + fijk.coord.reset( + VERTEX_CLASSII[vert][0] + this.coord.i, + VERTEX_CLASSII[vert][1] + this.coord.j, + VERTEX_CLASSII[vert][2] + this.coord.k + ); + fijk.coord.ijkNormalize(); + fijk.face = this.face; + + fijk.adjustPentVertOverage(adjRes); + + points[vert] = fijk.coord.ijkToGeo(fijk.face, adjRes, true); + } + return new CellBoundary(points, Constants.NUM_PENT_VERTS); + } + + private CellBoundary faceIjkPentToCellBoundaryClassIII(int adjRes) { final LatLng[] points = new LatLng[CellBoundary.MAX_CELL_BNDRY_VERTS]; int numPoints = 0; - final CoordIJK scratch = new CoordIJK(0, 0, 0); - final FaceIJK fijk = new FaceIJK(this.face, scratch); - final int[][] coord = isResolutionClassIII ? VERTEX_CLASSIII : VERTEX_CLASSII; + final FaceIJK fijk = new FaceIJK(this.face, new CoordIJK(0, 0, 0)); final CoordIJK lastCoord = new CoordIJK(0, 0, 0); int lastFace = this.face; - for (int vert = start; vert < start + length + additionalIteration; vert++) { + for (int vert = 0; vert < Constants.NUM_PENT_VERTS + 1; vert++) { final int v = vert % Constants.NUM_PENT_VERTS; // The center point is now in the same substrate grid as the origin // cell vertices. Add the center point substate coordinates // to each vertex to translate the vertices to that cell. - scratch.reset(coord[v][0], coord[v][1], coord[v][2]); - scratch.ijkAdd(this.coord.i, this.coord.j, this.coord.k); - scratch.ijkNormalize(); + fijk.coord.reset( + VERTEX_CLASSIII[v][0] + this.coord.i, + VERTEX_CLASSIII[v][1] + this.coord.j, + VERTEX_CLASSIII[v][2] + this.coord.k + ); + fijk.coord.ijkNormalize(); fijk.face = this.face; fijk.adjustPentVertOverage(adjRes); @@ -461,7 +482,7 @@ public CellBoundary faceIjkPentToCellBoundary(int res, int start, int length) { // all Class III pentagon edges cross icosa edges // note that Class II pentagons have vertices on the edge, // not edge intersections - if (isResolutionClassIII && vert > start) { + if (vert > 0) { // find hex2d of the two vertexes on the last face final Vec2d orig2d0 = lastCoord.ijkToHex2d(); @@ -480,35 +501,17 @@ public CellBoundary faceIjkPentToCellBoundary(int res, int start, int length) { final Vec2d orig2d1 = lastCoord.ijkToHex2d(); - // find the appropriate icosa face edge vertexes - final Vec2d edge0; - final Vec2d edge1; - switch (adjacentFaceDir[fijkOrient.face][fijk.face]) { - case IJ -> { - edge0 = maxDimByCIIVec2d[adjRes][0]; - edge1 = maxDimByCIIVec2d[adjRes][1]; - } - case JK -> { - edge0 = maxDimByCIIVec2d[adjRes][1]; - edge1 = maxDimByCIIVec2d[adjRes][2]; - } - // case KI: - default -> { - assert (adjacentFaceDir[fijkOrient.face][fijk.face] == KI); - edge0 = maxDimByCIIVec2d[adjRes][2]; - edge1 = maxDimByCIIVec2d[adjRes][0]; - } - } - // find the intersection and add the lat/lng point to the result - final Vec2d inter = Vec2d.v2dIntersect(orig2d0, orig2d1, edge0, edge1); - points[numPoints++] = inter.hex2dToGeo(fijkOrient.face, adjRes, true); + final Vec2d inter = findIntersectionPoint(orig2d0, orig2d1, adjRes, adjacentFaceDir[fijkOrient.face][fijk.face]); + if (inter != null) { + points[numPoints++] = inter.hex2dToGeo(fijkOrient.face, adjRes, true); + } } // convert vertex to lat/lng and add to the result // vert == start + NUM_PENT_VERTS is only used to test for possible // intersection on last edge - if (vert < start + Constants.NUM_PENT_VERTS) { + if (vert < Constants.NUM_PENT_VERTS) { points[numPoints++] = fijk.coord.ijkToGeo(fijk.face, adjRes, true); } lastFace = fijk.face; @@ -522,10 +525,8 @@ public CellBoundary faceIjkPentToCellBoundary(int res, int start, int length) { * FaceIJK address at a specified resolution. * * @param res The H3 resolution of the cell. - * @param start The first topological vertex to return. - * @param length The number of topological vertexes to return. */ - public CellBoundary faceIjkToCellBoundary(final int res, final int start, final int length) { + public CellBoundary faceIjkToCellBoundary(final int res) { // adjust the center point to be in an aperture 33r substrate grid // these should be composed for speed this.coord.downAp3(); @@ -533,32 +534,63 @@ public CellBoundary faceIjkToCellBoundary(final int res, final int start, final // if res is Class III we need to add a cw aperture 7 to get to // icosahedral Class II - int adjRes = res; - if (H3Index.isResolutionClassIII(res)) { - this.coord.downAp7r(); - adjRes += 1; - } + final int adjRes = adjustRes(this.coord, res); - // If we're returning the entire loop, we need one more iteration in case - // of a distortion vertex on the last edge - final int additionalIteration = length == Constants.NUM_HEX_VERTS ? 1 : 0; - final boolean isResolutionClassIII = H3Index.isResolutionClassIII(res); // convert each vertex to lat/lng // adjust the face of each vertex as appropriate and introduce // edge-crossing vertices as needed + if (H3Index.isResolutionClassIII(res)) { + return faceIjkToCellBoundaryClassIII(adjRes); + } else { + return faceIjkToCellBoundaryClassII(adjRes); + } + } + + private static int adjustRes(CoordIJK coord, int res) { + if (H3Index.isResolutionClassIII(res)) { + coord.downAp7r(); + res += 1; + } + return res; + } + + private CellBoundary faceIjkToCellBoundaryClassII(int adjRes) { + final LatLng[] points = new LatLng[Constants.NUM_HEX_VERTS]; + final FaceIJK fijk = new FaceIJK(this.face, new CoordIJK(0, 0, 0)); + for (int vert = 0; vert < Constants.NUM_HEX_VERTS; vert++) { + fijk.coord.reset( + VERTEX_CLASSII[vert][0] + this.coord.i, + VERTEX_CLASSII[vert][1] + this.coord.j, + VERTEX_CLASSII[vert][2] + this.coord.k + ); + fijk.coord.ijkNormalize(); + fijk.face = this.face; + + fijk.adjustOverageClassII(adjRes, false, true); + + // convert vertex to lat/lng and add to the result + // vert == start + NUM_HEX_VERTS is only used to test for possible + // intersection on last edge + points[vert] = fijk.coord.ijkToGeo(fijk.face, adjRes, true); + } + return new CellBoundary(points, Constants.NUM_HEX_VERTS); + } + + private CellBoundary faceIjkToCellBoundaryClassIII(int adjRes) { final LatLng[] points = new LatLng[CellBoundary.MAX_CELL_BNDRY_VERTS]; int numPoints = 0; - final CoordIJK scratch1 = new CoordIJK(0, 0, 0); - final FaceIJK fijk = new FaceIJK(this.face, scratch1); - final CoordIJK scratch2 = isResolutionClassIII ? new CoordIJK(0, 0, 0) : null; - final int[][] verts = isResolutionClassIII ? VERTEX_CLASSIII : VERTEX_CLASSII; + final FaceIJK fijk = new FaceIJK(this.face, new CoordIJK(0, 0, 0)); + final CoordIJK scratch = new CoordIJK(0, 0, 0); int lastFace = -1; Overage lastOverage = Overage.NO_OVERAGE; - for (int vert = start; vert < start + length + additionalIteration; vert++) { - int v = vert % Constants.NUM_HEX_VERTS; - scratch1.reset(verts[v][0], verts[v][1], verts[v][2]); - scratch1.ijkAdd(this.coord.i, this.coord.j, this.coord.k); - scratch1.ijkNormalize(); + for (int vert = 0; vert < Constants.NUM_HEX_VERTS + 1; vert++) { + final int v = vert % Constants.NUM_HEX_VERTS; + fijk.coord.reset( + VERTEX_CLASSIII[v][0] + this.coord.i, + VERTEX_CLASSIII[v][1] + this.coord.j, + VERTEX_CLASSIII[v][2] + this.coord.k + ); + fijk.coord.ijkNormalize(); fijk.face = this.face; final Overage overage = fijk.adjustOverageClassII(adjRes, false, true); @@ -572,50 +604,20 @@ public CellBoundary faceIjkToCellBoundary(final int res, final int start, final projection. Note that Class II cell edges have vertices on the face edge, with no edge line intersections. */ - if (isResolutionClassIII && vert > start && fijk.face != lastFace && lastOverage != Overage.FACE_EDGE) { + if (vert > 0 && fijk.face != lastFace && lastOverage != Overage.FACE_EDGE) { // find hex2d of the two vertexes on original face final int lastV = (v + 5) % Constants.NUM_HEX_VERTS; // The center point is now in the same substrate grid as the origin // cell vertices. Add the center point substate coordinates // to each vertex to translate the vertices to that cell. - final int[] vertexLast = verts[lastV]; - final int[] vertexV = verts[v]; - scratch2.reset(vertexLast[0] + this.coord.i, vertexLast[1] + this.coord.j, vertexLast[2] + this.coord.k); - scratch2.ijkNormalize(); - final Vec2d orig2d0 = scratch2.ijkToHex2d(); - scratch2.reset(vertexV[0] + this.coord.i, vertexV[1] + this.coord.j, vertexV[2] + this.coord.k); - scratch2.ijkNormalize(); - final Vec2d orig2d1 = scratch2.ijkToHex2d(); + final Vec2d orig2d0 = orig(scratch, VERTEX_CLASSIII[lastV]); + final Vec2d orig2d1 = orig(scratch, VERTEX_CLASSIII[v]); // find the appropriate icosa face edge vertexes final int face2 = ((lastFace == this.face) ? fijk.face : lastFace); - final Vec2d edge0; - final Vec2d edge1; - switch (adjacentFaceDir[this.face][face2]) { - case IJ -> { - edge0 = maxDimByCIIVec2d[adjRes][0]; - edge1 = maxDimByCIIVec2d[adjRes][1]; - } - case JK -> { - edge0 = maxDimByCIIVec2d[adjRes][1]; - edge1 = maxDimByCIIVec2d[adjRes][2]; - } - // case KI: - default -> { - assert (adjacentFaceDir[this.face][face2] == KI); - edge0 = maxDimByCIIVec2d[adjRes][2]; - edge1 = maxDimByCIIVec2d[adjRes][0]; - } - } // find the intersection and add the lat/lng point to the result - final Vec2d inter = Vec2d.v2dIntersect(orig2d0, orig2d1, edge0, edge1); - /* - If a point of intersection occurs at a hexagon vertex, then each - adjacent hexagon edge will lie completely on a single icosahedron - face, and no additional vertex is required. - */ - final boolean isIntersectionAtVertex = orig2d0.numericallyIdentical(inter) || orig2d1.numericallyIdentical(inter); - if (isIntersectionAtVertex == false) { + final Vec2d inter = findIntersectionPoint(orig2d0, orig2d1, adjRes, adjacentFaceDir[this.face][face2]); + if (inter != null) { points[numPoints++] = inter.hex2dToGeo(this.face, adjRes, true); } } @@ -623,7 +625,7 @@ public CellBoundary faceIjkToCellBoundary(final int res, final int start, final // convert vertex to lat/lng and add to the result // vert == start + NUM_HEX_VERTS is only used to test for possible // intersection on last edge - if (vert < start + Constants.NUM_HEX_VERTS) { + if (vert < Constants.NUM_HEX_VERTS) { points[numPoints++] = fijk.coord.ijkToGeo(fijk.face, adjRes, true); } lastFace = fijk.face; @@ -632,6 +634,42 @@ public CellBoundary faceIjkToCellBoundary(final int res, final int start, final return new CellBoundary(points, numPoints); } + private Vec2d orig(CoordIJK scratch, int[] vertexLast) { + scratch.reset(vertexLast[0] + this.coord.i, vertexLast[1] + this.coord.j, vertexLast[2] + this.coord.k); + scratch.ijkNormalize(); + return scratch.ijkToHex2d(); + } + + private Vec2d findIntersectionPoint(Vec2d orig2d0, Vec2d orig2d1, int adjRes, int faceDir) { + // find the appropriate icosa face edge vertexes + final Vec2d edge0; + final Vec2d edge1; + switch (faceDir) { + case IJ -> { + edge0 = maxDimByCIIVec2d[adjRes][0]; + edge1 = maxDimByCIIVec2d[adjRes][1]; + } + case JK -> { + edge0 = maxDimByCIIVec2d[adjRes][1]; + edge1 = maxDimByCIIVec2d[adjRes][2]; + } + // case KI: + default -> { + assert (faceDir == KI); + edge0 = maxDimByCIIVec2d[adjRes][2]; + edge1 = maxDimByCIIVec2d[adjRes][0]; + } + } + // find the intersection and add the lat/lng point to the result + final Vec2d inter = Vec2d.v2dIntersect(orig2d0, orig2d1, edge0, edge1); + /* + If a point of intersection occurs at a hexagon vertex, then each + adjacent hexagon edge will lie completely on a single icosahedron + face, and no additional vertex is required. + */ + return orig2d0.numericallyIdentical(inter) || orig2d1.numericallyIdentical(inter) ? null : inter; + } + /** * compute the corresponding H3Index. * @param res The cell resolution. @@ -651,7 +689,6 @@ static long faceIjkToH3(int res, int face, CoordIJK coord) { // out of range input throw new IllegalArgumentException(" out of range input"); } - return H3Index.H3_set_base_cell(h, BaseCells.getBaseCell(face, coord)); } diff --git a/libs/h3/src/main/java/org/elasticsearch/h3/H3.java b/libs/h3/src/main/java/org/elasticsearch/h3/H3.java index 8c0bba62cecdb..08031088728ba 100644 --- a/libs/h3/src/main/java/org/elasticsearch/h3/H3.java +++ b/libs/h3/src/main/java/org/elasticsearch/h3/H3.java @@ -174,11 +174,11 @@ public static LatLng h3ToLatLng(String h3Address) { * Find the cell {@link CellBoundary} coordinates for the cell */ public static CellBoundary h3ToGeoBoundary(long h3) { - FaceIJK fijk = H3Index.h3ToFaceIjk(h3); + final FaceIJK fijk = H3Index.h3ToFaceIjk(h3); if (H3Index.H3_is_pentagon(h3)) { - return fijk.faceIjkPentToCellBoundary(H3Index.H3_get_resolution(h3), 0, Constants.NUM_PENT_VERTS); + return fijk.faceIjkPentToCellBoundary(H3Index.H3_get_resolution(h3)); } else { - return fijk.faceIjkToCellBoundary(H3Index.H3_get_resolution(h3), 0, Constants.NUM_HEX_VERTS); + return fijk.faceIjkToCellBoundary(H3Index.H3_get_resolution(h3)); } } From c7c725b2b37bdca3789207ac21a7ac2f15ce0c36 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 3 Dec 2024 01:05:56 +1100 Subject: [PATCH 343/386] Mute org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT #111319 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 8d64e1557ca19..d01b956db9199 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -237,6 +237,8 @@ tests: - class: org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT method: test {p0=search.highlight/50_synthetic_source/text multi unified from vectors} issue: https://github.com/elastic/elasticsearch/issues/117815 +- class: org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT + issue: https://github.com/elastic/elasticsearch/issues/111319 # Examples: # From 285a71b89b5bf9a70729261e254ddd432f9a46f2 Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Mon, 2 Dec 2024 09:16:28 -0500 Subject: [PATCH 344/386] [ML] Abstract upgrade mode into core logic (#117512) Transform is adding an identical upgrade mode for 9.x migration. The logic to set the metadata is roughly the same, but the follow-up actions once the upgrade mode is changed will be different. --- ...AbstractTransportSetUpgradeModeAction.java | 186 +++++++++++++++ .../action/SetUpgradeModeActionRequest.java | 79 +++++++ .../core/ml/action/SetUpgradeModeAction.java | 49 +--- ...actTransportSetUpgradeModeActionTests.java | 219 ++++++++++++++++++ .../action/TransportSetUpgradeModeAction.java | 179 ++++---------- 5 files changed, 535 insertions(+), 177 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportSetUpgradeModeAction.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/SetUpgradeModeActionRequest.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/AbstractTransportSetUpgradeModeActionTests.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportSetUpgradeModeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportSetUpgradeModeAction.java new file mode 100644 index 0000000000000..bbd90448cf855 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportSetUpgradeModeAction.java @@ -0,0 +1,186 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.action; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateTaskListener; +import org.elasticsearch.cluster.SimpleBatchedExecutor; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.cluster.service.MasterServiceTaskQueue; +import org.elasticsearch.common.Priority; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.Strings; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.util.concurrent.atomic.AtomicBoolean; + +public abstract class AbstractTransportSetUpgradeModeAction extends AcknowledgedTransportMasterNodeAction { + + private static final Logger logger = LogManager.getLogger(AbstractTransportSetUpgradeModeAction.class); + private final AtomicBoolean isRunning = new AtomicBoolean(false); + private final MasterServiceTaskQueue taskQueue; + + public AbstractTransportSetUpgradeModeAction( + String actionName, + String taskQueuePrefix, + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver + ) { + super( + actionName, + transportService, + clusterService, + threadPool, + actionFilters, + SetUpgradeModeActionRequest::new, + indexNameExpressionResolver, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + + this.taskQueue = clusterService.createTaskQueue(taskQueuePrefix + " upgrade mode", Priority.NORMAL, new UpdateModeExecutor()); + } + + @Override + protected void masterOperation( + Task task, + SetUpgradeModeActionRequest request, + ClusterState state, + ActionListener listener + ) throws Exception { + // Don't want folks spamming this endpoint while it is in progress, only allow one request to be handled at a time + if (isRunning.compareAndSet(false, true) == false) { + String msg = Strings.format( + "Attempted to set [upgrade_mode] for feature name [%s] to [%s] from [%s] while previous request was processing.", + featureName(), + request.enabled(), + upgradeMode(state) + ); + logger.info(msg); + Exception detail = new IllegalStateException(msg); + listener.onFailure( + new ElasticsearchStatusException( + "Cannot change [upgrade_mode] for feature name [{}]. Previous request is still being processed.", + RestStatus.TOO_MANY_REQUESTS, + detail, + featureName() + ) + ); + return; + } + + // Noop, nothing for us to do, simply return fast to the caller + var upgradeMode = upgradeMode(state); + if (request.enabled() == upgradeMode) { + logger.info("Upgrade mode noop"); + isRunning.set(false); + listener.onResponse(AcknowledgedResponse.TRUE); + return; + } + + logger.info( + "Starting to set [upgrade_mode] for feature name [{}] to [{}] from [{}]", + featureName(), + request.enabled(), + upgradeMode + ); + + ActionListener wrappedListener = ActionListener.wrap(r -> { + logger.info("Finished setting [upgrade_mode] for feature name [{}]", featureName()); + isRunning.set(false); + listener.onResponse(r); + }, e -> { + logger.info("Failed to set [upgrade_mode] for feature name [{}]", featureName()); + isRunning.set(false); + listener.onFailure(e); + }); + + ActionListener setUpgradeModeListener = wrappedListener.delegateFailure((delegate, ack) -> { + if (ack.isAcknowledged()) { + upgradeModeSuccessfullyChanged(task, request, state, delegate); + } else { + logger.info("Cluster state update is NOT acknowledged"); + wrappedListener.onFailure(new ElasticsearchTimeoutException("Unknown error occurred while updating cluster state")); + } + }); + + taskQueue.submitTask(featureName(), new UpdateModeStateListener(request, setUpgradeModeListener), request.ackTimeout()); + } + + /** + * Define the feature name, used in log messages and naming the task on the task queue. + */ + protected abstract String featureName(); + + /** + * Parse the ClusterState for the implementation's {@link org.elasticsearch.cluster.metadata.Metadata.Custom} and find the upgradeMode + * boolean stored there. We will compare this boolean with the request's desired state to determine if we should change the metadata. + */ + protected abstract boolean upgradeMode(ClusterState state); + + /** + * This is called from the ClusterState updater and is expected to return quickly. + */ + protected abstract ClusterState createUpdatedState(SetUpgradeModeActionRequest request, ClusterState state); + + /** + * This method is only called when the cluster state was successfully changed. + * If we failed to update for any reason, this will not be called. + * The ClusterState param is the previous ClusterState before we called update. + */ + protected abstract void upgradeModeSuccessfullyChanged( + Task task, + SetUpgradeModeActionRequest request, + ClusterState state, + ActionListener listener + ); + + @Override + protected ClusterBlockException checkBlock(SetUpgradeModeActionRequest request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } + + private record UpdateModeStateListener(SetUpgradeModeActionRequest request, ActionListener listener) + implements + ClusterStateTaskListener { + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + } + + private class UpdateModeExecutor extends SimpleBatchedExecutor { + @Override + public Tuple executeTask(UpdateModeStateListener clusterStateListener, ClusterState clusterState) { + return Tuple.tuple(createUpdatedState(clusterStateListener.request(), clusterState), null); + } + + @Override + public void taskSucceeded(UpdateModeStateListener clusterStateListener, Void unused) { + clusterStateListener.listener().onResponse(AcknowledgedResponse.TRUE); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/SetUpgradeModeActionRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/SetUpgradeModeActionRequest.java new file mode 100644 index 0000000000000..98e30b284c21a --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/SetUpgradeModeActionRequest.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.action; + +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +public class SetUpgradeModeActionRequest extends AcknowledgedRequest implements ToXContentObject { + + private final boolean enabled; + + private static final ParseField ENABLED = new ParseField("enabled"); + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "set_upgrade_mode_action_request", + a -> new SetUpgradeModeActionRequest((Boolean) a[0]) + ); + + static { + PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED); + } + + public SetUpgradeModeActionRequest(boolean enabled) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + this.enabled = enabled; + } + + public SetUpgradeModeActionRequest(StreamInput in) throws IOException { + super(in); + this.enabled = in.readBoolean(); + } + + public boolean enabled() { + return enabled; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeBoolean(enabled); + } + + @Override + public int hashCode() { + return Objects.hash(enabled); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || obj.getClass() != getClass()) { + return false; + } + SetUpgradeModeActionRequest other = (SetUpgradeModeActionRequest) obj; + return enabled == other.enabled(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(ENABLED.getPreferredName(), enabled); + builder.endObject(); + return builder; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java index 821caf001f3e0..a67ae33e85801 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java @@ -7,17 +7,13 @@ package org.elasticsearch.xpack.core.ml.action; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.action.SetUpgradeModeActionRequest; import java.io.IOException; -import java.util.Objects; public class SetUpgradeModeAction extends ActionType { @@ -28,9 +24,7 @@ private SetUpgradeModeAction() { super(NAME); } - public static class Request extends AcknowledgedRequest implements ToXContentObject { - - private final boolean enabled; + public static class Request extends SetUpgradeModeActionRequest { private static final ParseField ENABLED = new ParseField("enabled"); public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( @@ -43,48 +37,11 @@ public static class Request extends AcknowledgedRequest implements ToXC } public Request(boolean enabled) { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); - this.enabled = enabled; + super(enabled); } public Request(StreamInput in) throws IOException { super(in); - this.enabled = in.readBoolean(); - } - - public boolean isEnabled() { - return enabled; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeBoolean(enabled); - } - - @Override - public int hashCode() { - return Objects.hash(enabled); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || obj.getClass() != getClass()) { - return false; - } - Request other = (Request) obj; - return Objects.equals(enabled, other.enabled); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ENABLED.getPreferredName(), enabled); - builder.endObject(); - return builder; } } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/AbstractTransportSetUpgradeModeActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/AbstractTransportSetUpgradeModeActionTests.java new file mode 100644 index 0000000000000..d780b7fbc32f4 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/AbstractTransportSetUpgradeModeActionTests.java @@ -0,0 +1,219 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.action; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateTaskListener; +import org.elasticsearch.cluster.SimpleBatchedExecutor; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.cluster.service.MasterServiceTaskQueue; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; + +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class AbstractTransportSetUpgradeModeActionTests extends ESTestCase { + /** + * Creates a TaskQueue that invokes the SimpleBatchedExecutor. + */ + public static ClusterService clusterService() { + AtomicReference> executor = new AtomicReference<>(); + MasterServiceTaskQueue taskQueue = mock(); + ClusterService clusterService = mock(); + doAnswer(ans -> { + executor.set(ans.getArgument(2)); + return taskQueue; + }).when(clusterService).createTaskQueue(any(), any(), any()); + doAnswer(ans -> { + if (executor.get() == null) { + fail("We should create the task queue before we submit tasks to it"); + } else { + executor.get().executeTask(ans.getArgument(1), ClusterState.EMPTY_STATE); + executor.get().taskSucceeded(ans.getArgument(1), null); + } + return null; + }).when(taskQueue).submitTask(any(), any(), any()); + return clusterService; + } + + /** + * Creates a TaskQueue that calls the listener with an error. + */ + public static ClusterService clusterServiceWithError(Exception e) { + MasterServiceTaskQueue taskQueue = mock(); + ClusterService clusterService = mock(); + when(clusterService.createTaskQueue(any(), any(), any())).thenReturn(taskQueue); + doAnswer(ans -> { + ClusterStateTaskListener listener = ans.getArgument(1); + listener.onFailure(e); + return null; + }).when(taskQueue).submitTask(any(), any(), any()); + return clusterService; + } + + /** + * TaskQueue that does nothing. + */ + public static ClusterService clusterServiceThatDoesNothing() { + ClusterService clusterService = mock(); + when(clusterService.createTaskQueue(any(), any(), any())).thenReturn(mock()); + return clusterService; + } + + public void testIdempotent() throws Exception { + // create with update mode set to false + var action = new TestTransportSetUpgradeModeAction(clusterServiceThatDoesNothing(), false); + + // flip to true but do nothing (cluster service mock won't invoke the listener) + action.runWithoutWaiting(true); + // call again + var response = action.run(true); + + assertThat(response.v1(), nullValue()); + assertThat(response.v2(), notNullValue()); + assertThat(response.v2(), instanceOf(ElasticsearchStatusException.class)); + assertThat( + response.v2().getMessage(), + is("Cannot change [upgrade_mode] for feature name [" + action.featureName() + "]. Previous request is still being processed.") + ); + } + + public void testUpdateDoesNotRun() throws Exception { + var shouldNotChange = new AtomicBoolean(true); + var action = new TestTransportSetUpgradeModeAction(true, l -> shouldNotChange.set(false)); + + var response = action.run(true); + + assertThat(response.v1(), is(AcknowledgedResponse.TRUE)); + assertThat(response.v2(), nullValue()); + assertThat(shouldNotChange.get(), is(true)); + } + + public void testErrorReleasesLock() throws Exception { + var action = new TestTransportSetUpgradeModeAction(false, l -> l.onFailure(new IllegalStateException("hello there"))); + + action.run(true); + var response = action.run(true); + assertThat( + "Previous request should have finished processing.", + response.v2().getMessage(), + not(containsString("Previous request is still being processed")) + ); + } + + public void testErrorFromAction() throws Exception { + var expectedException = new IllegalStateException("hello there"); + var action = new TestTransportSetUpgradeModeAction(false, l -> l.onFailure(expectedException)); + + var response = action.run(true); + + assertThat(response.v1(), nullValue()); + assertThat(response.v2(), is(expectedException)); + } + + public void testErrorFromTaskQueue() throws Exception { + var expectedException = new IllegalStateException("hello there"); + var action = new TestTransportSetUpgradeModeAction(clusterServiceWithError(expectedException), false); + + var response = action.run(true); + + assertThat(response.v1(), nullValue()); + assertThat(response.v2(), is(expectedException)); + } + + public void testSuccess() throws Exception { + var action = new TestTransportSetUpgradeModeAction(false, l -> l.onResponse(AcknowledgedResponse.TRUE)); + + var response = action.run(true); + + assertThat(response.v1(), is(AcknowledgedResponse.TRUE)); + assertThat(response.v2(), nullValue()); + } + + private static class TestTransportSetUpgradeModeAction extends AbstractTransportSetUpgradeModeAction { + private final boolean upgradeMode; + private final ClusterState updatedClusterState; + private final Consumer> successFunc; + + TestTransportSetUpgradeModeAction(boolean upgradeMode, Consumer> successFunc) { + super("actionName", "taskQueuePrefix", mock(), clusterService(), mock(), mock(), mock()); + this.upgradeMode = upgradeMode; + this.updatedClusterState = ClusterState.EMPTY_STATE; + this.successFunc = successFunc; + } + + TestTransportSetUpgradeModeAction(ClusterService clusterService, boolean upgradeMode) { + super("actionName", "taskQueuePrefix", mock(), clusterService, mock(), mock(), mock()); + this.upgradeMode = upgradeMode; + this.updatedClusterState = ClusterState.EMPTY_STATE; + this.successFunc = listener -> {}; + } + + public void runWithoutWaiting(boolean upgrade) throws Exception { + masterOperation(mock(), new SetUpgradeModeActionRequest(upgrade), ClusterState.EMPTY_STATE, ActionListener.noop()); + } + + public Tuple run(boolean upgrade) throws Exception { + AtomicReference> response = new AtomicReference<>(); + CountDownLatch latch = new CountDownLatch(1); + masterOperation(mock(), new SetUpgradeModeActionRequest(upgrade), ClusterState.EMPTY_STATE, ActionListener.wrap(r -> { + response.set(Tuple.tuple(r, null)); + latch.countDown(); + }, e -> { + response.set(Tuple.tuple(null, e)); + latch.countDown(); + })); + assertTrue("Failed to run TestTransportSetUpgradeModeAction in 10s", latch.await(10, TimeUnit.SECONDS)); + return response.get(); + } + + @Override + protected String featureName() { + return "test-feature-name"; + } + + @Override + protected boolean upgradeMode(ClusterState state) { + return upgradeMode; + } + + @Override + protected ClusterState createUpdatedState(SetUpgradeModeActionRequest request, ClusterState state) { + return updatedClusterState; + } + + @Override + protected void upgradeModeSuccessfullyChanged( + Task task, + SetUpgradeModeActionRequest request, + ClusterState state, + ActionListener listener + ) { + successFunc.accept(listener); + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetUpgradeModeAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetUpgradeModeAction.java index 744d5dbd6974f..5912619e892ed 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetUpgradeModeAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetUpgradeModeAction.java @@ -9,35 +9,27 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; -import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateUpdateTask; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Predicates; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.persistent.PersistentTasksClusterService; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.persistent.PersistentTasksCustomMetadata.PersistentTask; import org.elasticsearch.persistent.PersistentTasksService; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.action.AbstractTransportSetUpgradeModeAction; +import org.elasticsearch.xpack.core.action.SetUpgradeModeActionRequest; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.IsolateDatafeedAction; @@ -48,7 +40,6 @@ import java.util.Comparator; import java.util.List; import java.util.Set; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; import static org.elasticsearch.ExceptionsHelper.rethrowAndSuppress; @@ -58,12 +49,11 @@ import static org.elasticsearch.xpack.core.ml.MlTasks.DATA_FRAME_ANALYTICS_TASK_NAME; import static org.elasticsearch.xpack.core.ml.MlTasks.JOB_TASK_NAME; -public class TransportSetUpgradeModeAction extends AcknowledgedTransportMasterNodeAction { +public class TransportSetUpgradeModeAction extends AbstractTransportSetUpgradeModeAction { private static final Set ML_TASK_NAMES = Set.of(JOB_TASK_NAME, DATAFEED_TASK_NAME, DATA_FRAME_ANALYTICS_TASK_NAME); private static final Logger logger = LogManager.getLogger(TransportSetUpgradeModeAction.class); - private final AtomicBoolean isRunning = new AtomicBoolean(false); private final PersistentTasksClusterService persistentTasksClusterService; private final PersistentTasksService persistentTasksService; private final OriginSettingClient client; @@ -79,69 +69,38 @@ public TransportSetUpgradeModeAction( Client client, PersistentTasksService persistentTasksService ) { - super( - SetUpgradeModeAction.NAME, - transportService, - clusterService, - threadPool, - actionFilters, - SetUpgradeModeAction.Request::new, - indexNameExpressionResolver, - EsExecutors.DIRECT_EXECUTOR_SERVICE - ); + super(SetUpgradeModeAction.NAME, "ml", transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver); this.persistentTasksClusterService = persistentTasksClusterService; this.client = new OriginSettingClient(client, ML_ORIGIN); this.persistentTasksService = persistentTasksService; } @Override - protected void masterOperation( - Task task, - SetUpgradeModeAction.Request request, - ClusterState state, - ActionListener listener - ) throws Exception { - - // Don't want folks spamming this endpoint while it is in progress, only allow one request to be handled at a time - if (isRunning.compareAndSet(false, true) == false) { - String msg = "Attempted to set [upgrade_mode] to [" - + request.isEnabled() - + "] from [" - + MlMetadata.getMlMetadata(state).isUpgradeMode() - + "] while previous request was processing."; - logger.info(msg); - Exception detail = new IllegalStateException(msg); - listener.onFailure( - new ElasticsearchStatusException( - "Cannot change [upgrade_mode]. Previous request is still being processed.", - RestStatus.TOO_MANY_REQUESTS, - detail - ) - ); - return; - } + protected String featureName() { + return "ml-set-upgrade-mode"; + } - // Noop, nothing for us to do, simply return fast to the caller - if (request.isEnabled() == MlMetadata.getMlMetadata(state).isUpgradeMode()) { - logger.info("Upgrade mode noop"); - isRunning.set(false); - listener.onResponse(AcknowledgedResponse.TRUE); - return; - } + @Override + protected boolean upgradeMode(ClusterState state) { + return MlMetadata.getMlMetadata(state).isUpgradeMode(); + } - logger.info( - "Starting to set [upgrade_mode] to [" + request.isEnabled() + "] from [" + MlMetadata.getMlMetadata(state).isUpgradeMode() + "]" - ); + @Override + protected ClusterState createUpdatedState(SetUpgradeModeActionRequest request, ClusterState currentState) { + logger.trace("Executing cluster state update"); + MlMetadata.Builder builder = new MlMetadata.Builder(currentState.metadata().custom(MlMetadata.TYPE)); + builder.isUpgradeMode(request.enabled()); + ClusterState.Builder newState = ClusterState.builder(currentState); + newState.metadata(Metadata.builder(currentState.getMetadata()).putCustom(MlMetadata.TYPE, builder.build()).build()); + return newState.build(); + } - ActionListener wrappedListener = ActionListener.wrap(r -> { - logger.info("Completed upgrade mode request"); - isRunning.set(false); - listener.onResponse(r); - }, e -> { - logger.info("Completed upgrade mode request but with failure", e); - isRunning.set(false); - listener.onFailure(e); - }); + protected void upgradeModeSuccessfullyChanged( + Task task, + SetUpgradeModeActionRequest request, + ClusterState state, + ActionListener wrappedListener + ) { final PersistentTasksCustomMetadata tasksCustomMetadata = state.metadata().custom(PersistentTasksCustomMetadata.TYPE); // <4> We have unassigned the tasks, respond to the listener. @@ -201,71 +160,29 @@ protected void masterOperation( */ - ActionListener clusterStateUpdateListener = ActionListener.wrap(acknowledgedResponse -> { - // State change was not acknowledged, we either timed out or ran into some exception - // We should not continue and alert failure to the end user - if (acknowledgedResponse.isAcknowledged() == false) { - logger.info("Cluster state update is NOT acknowledged"); - wrappedListener.onFailure(new ElasticsearchTimeoutException("Unknown error occurred while updating cluster state")); - return; - } - - // There are no tasks to worry about starting/stopping - if (tasksCustomMetadata == null || tasksCustomMetadata.tasks().isEmpty()) { - logger.info("No tasks to worry about after state update"); - wrappedListener.onResponse(AcknowledgedResponse.TRUE); - return; - } - - // Did we change from disabled -> enabled? - if (request.isEnabled()) { - logger.info("Enabling upgrade mode, must isolate datafeeds"); - isolateDatafeeds(tasksCustomMetadata, isolateDatafeedListener); - } else { - logger.info("Disabling upgrade mode, must wait for tasks to not have AWAITING_UPGRADE assignment"); - persistentTasksService.waitForPersistentTasksCondition( - // Wait for jobs, datafeeds and analytics not to be "Awaiting upgrade" - persistentTasksCustomMetadata -> persistentTasksCustomMetadata.tasks() - .stream() - .noneMatch(t -> ML_TASK_NAMES.contains(t.getTaskName()) && t.getAssignment().equals(AWAITING_UPGRADE)), - request.ackTimeout(), - ActionListener.wrap(r -> { - logger.info("Done waiting for tasks to be out of AWAITING_UPGRADE"); - wrappedListener.onResponse(AcknowledgedResponse.TRUE); - }, wrappedListener::onFailure) - ); - } - }, wrappedListener::onFailure); - - // <1> Change MlMetadata to indicate that upgrade_mode is now enabled - submitUnbatchedTask("ml-set-upgrade-mode", new AckedClusterStateUpdateTask(request, clusterStateUpdateListener) { - - @Override - protected AcknowledgedResponse newResponse(boolean acknowledged) { - logger.trace("Cluster update response built: " + acknowledged); - return AcknowledgedResponse.of(acknowledged); - } - - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - logger.trace("Executing cluster state update"); - MlMetadata.Builder builder = new MlMetadata.Builder(currentState.metadata().custom(MlMetadata.TYPE)); - builder.isUpgradeMode(request.isEnabled()); - ClusterState.Builder newState = ClusterState.builder(currentState); - newState.metadata(Metadata.builder(currentState.getMetadata()).putCustom(MlMetadata.TYPE, builder.build()).build()); - return newState.build(); - } - }); - } - - @SuppressForbidden(reason = "legacy usage of unbatched task") // TODO add support for batching here - private void submitUnbatchedTask(@SuppressWarnings("SameParameterValue") String source, ClusterStateUpdateTask task) { - clusterService.submitUnbatchedStateUpdateTask(source, task); - } + if (tasksCustomMetadata == null || tasksCustomMetadata.tasks().isEmpty()) { + logger.info("No tasks to worry about after state update"); + wrappedListener.onResponse(AcknowledgedResponse.TRUE); + return; + } - @Override - protected ClusterBlockException checkBlock(SetUpgradeModeAction.Request request, ClusterState state) { - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + if (request.enabled()) { + logger.info("Enabling upgrade mode, must isolate datafeeds"); + isolateDatafeeds(tasksCustomMetadata, isolateDatafeedListener); + } else { + logger.info("Disabling upgrade mode, must wait for tasks to not have AWAITING_UPGRADE assignment"); + persistentTasksService.waitForPersistentTasksCondition( + // Wait for jobs, datafeeds and analytics not to be "Awaiting upgrade" + persistentTasksCustomMetadata -> persistentTasksCustomMetadata.tasks() + .stream() + .noneMatch(t -> ML_TASK_NAMES.contains(t.getTaskName()) && t.getAssignment().equals(AWAITING_UPGRADE)), + request.ackTimeout(), + ActionListener.wrap(r -> { + logger.info("Done waiting for tasks to be out of AWAITING_UPGRADE"); + wrappedListener.onResponse(AcknowledgedResponse.TRUE); + }, wrappedListener::onFailure) + ); + } } /** From 49b707b1d6958f6593419bf936f3764bcc4a4432 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Mon, 2 Dec 2024 14:34:58 +0000 Subject: [PATCH 345/386] Remove some old metadata serialization conditions (#117825) --- .../cluster/metadata/IndexMetadata.java | 4 +- .../cluster/metadata/Metadata.java | 89 +++++-------------- 2 files changed, 22 insertions(+), 71 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java index b7c1ee5fbad96..681ea84513088 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java @@ -1785,7 +1785,7 @@ public static IndexMetadata readFrom(StreamInput in, @Nullable Function diff(Metadata previousState) { } public static Diff readDiffFrom(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(MetadataDiff.NOOP_METADATA_DIFF_VERSION) && in.readBoolean()) { - return SimpleDiffable.empty(); - } - return new MetadataDiff(in); + return in.readBoolean() ? SimpleDiffable.empty() : new MetadataDiff(in); } public static Metadata fromXContent(XContentParser parser) throws IOException { @@ -1552,10 +1547,6 @@ public Map getMappingsByHash() { private static class MetadataDiff implements Diff { - private static final TransportVersion NOOP_METADATA_DIFF_VERSION = TransportVersions.V_8_5_0; - private static final TransportVersion NOOP_METADATA_DIFF_SAFE_VERSION = - PublicationTransportHandler.INCLUDES_LAST_COMMITTED_DATA_VERSION; - private final long version; private final String clusterUUID; private final boolean clusterUUIDCommitted; @@ -1620,36 +1611,19 @@ private MetadataDiff(StreamInput in) throws IOException { coordinationMetadata = new CoordinationMetadata(in); transientSettings = Settings.readSettingsFromStream(in); persistentSettings = Settings.readSettingsFromStream(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_3_0)) { - hashesOfConsistentSettings = DiffableStringMap.readDiffFrom(in); - } else { - hashesOfConsistentSettings = DiffableStringMap.DiffableStringMapDiff.EMPTY; - } + hashesOfConsistentSettings = DiffableStringMap.readDiffFrom(in); indices = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), INDEX_METADATA_DIFF_VALUE_READER); templates = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), TEMPLATES_DIFF_VALUE_READER); customs = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), CUSTOM_VALUE_SERIALIZER); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) { - reservedStateMetadata = DiffableUtils.readJdkMapDiff( - in, - DiffableUtils.getStringKeySerializer(), - RESERVED_DIFF_VALUE_READER - ); - } else { - reservedStateMetadata = DiffableUtils.emptyDiff(); - } + reservedStateMetadata = DiffableUtils.readJdkMapDiff(in, DiffableUtils.getStringKeySerializer(), RESERVED_DIFF_VALUE_READER); } @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(NOOP_METADATA_DIFF_SAFE_VERSION)) { - out.writeBoolean(empty); - if (empty) { - // noop diff - return; - } - } else if (out.getTransportVersion().onOrAfter(NOOP_METADATA_DIFF_VERSION)) { - // noops are not safe with these versions, see #92259 - out.writeBoolean(false); + out.writeBoolean(empty); + if (empty) { + // noop diff + return; } out.writeString(clusterUUID); out.writeBoolean(clusterUUIDCommitted); @@ -1657,15 +1631,11 @@ public void writeTo(StreamOutput out) throws IOException { coordinationMetadata.writeTo(out); transientSettings.writeTo(out); persistentSettings.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_3_0)) { - hashesOfConsistentSettings.writeTo(out); - } + hashesOfConsistentSettings.writeTo(out); indices.writeTo(out); templates.writeTo(out); customs.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) { - reservedStateMetadata.writeTo(out); - } + reservedStateMetadata.writeTo(out); } @Override @@ -1696,8 +1666,6 @@ public Metadata apply(Metadata part) { } } - public static final TransportVersion MAPPINGS_AS_HASH_VERSION = TransportVersions.V_8_1_0; - public static Metadata readFrom(StreamInput in) throws IOException { Builder builder = new Builder(); builder.version = in.readLong(); @@ -1706,17 +1674,11 @@ public static Metadata readFrom(StreamInput in) throws IOException { builder.coordinationMetadata(new CoordinationMetadata(in)); builder.transientSettings(readSettingsFromStream(in)); builder.persistentSettings(readSettingsFromStream(in)); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_3_0)) { - builder.hashesOfConsistentSettings(DiffableStringMap.readFrom(in)); - } + builder.hashesOfConsistentSettings(DiffableStringMap.readFrom(in)); final Function mappingLookup; - if (in.getTransportVersion().onOrAfter(MAPPINGS_AS_HASH_VERSION)) { - final Map mappingMetadataMap = in.readMapValues(MappingMetadata::new, MappingMetadata::getSha256); - if (mappingMetadataMap.size() > 0) { - mappingLookup = mappingMetadataMap::get; - } else { - mappingLookup = null; - } + final Map mappingMetadataMap = in.readMapValues(MappingMetadata::new, MappingMetadata::getSha256); + if (mappingMetadataMap.isEmpty() == false) { + mappingLookup = mappingMetadataMap::get; } else { mappingLookup = null; } @@ -1733,11 +1695,9 @@ public static Metadata readFrom(StreamInput in) throws IOException { Custom customIndexMetadata = in.readNamedWriteable(Custom.class); builder.putCustom(customIndexMetadata.getWriteableName(), customIndexMetadata); } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) { - int reservedStateSize = in.readVInt(); - for (int i = 0; i < reservedStateSize; i++) { - builder.put(ReservedStateMetadata.readFrom(in)); - } + int reservedStateSize = in.readVInt(); + for (int i = 0; i < reservedStateSize; i++) { + builder.put(ReservedStateMetadata.readFrom(in)); } return builder.build(); } @@ -1750,24 +1710,15 @@ public void writeTo(StreamOutput out) throws IOException { coordinationMetadata.writeTo(out); transientSettings.writeTo(out); persistentSettings.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_3_0)) { - hashesOfConsistentSettings.writeTo(out); - } - // Starting in #MAPPINGS_AS_HASH_VERSION we write the mapping metadata first and then write the indices without metadata so that - // we avoid writing duplicate mappings twice - if (out.getTransportVersion().onOrAfter(MAPPINGS_AS_HASH_VERSION)) { - out.writeMapValues(mappingsByHash); - } + hashesOfConsistentSettings.writeTo(out); + out.writeMapValues(mappingsByHash); out.writeVInt(indices.size()); - final boolean writeMappingsHash = out.getTransportVersion().onOrAfter(MAPPINGS_AS_HASH_VERSION); for (IndexMetadata indexMetadata : this) { - indexMetadata.writeTo(out, writeMappingsHash); + indexMetadata.writeTo(out, true); } out.writeCollection(templates.values()); VersionedNamedWriteable.writeVersionedWritables(out, customs); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) { - out.writeCollection(reservedStateMetadata.values()); - } + out.writeCollection(reservedStateMetadata.values()); } public static Builder builder() { From ddc8b959ee7b028ffa8b3d5c9d90b5271d72a3cd Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Mon, 2 Dec 2024 16:41:02 +0100 Subject: [PATCH 346/386] ES|QL categorize docs (#117827) * Move ES|QL categorize out of snapshot functions * Categorize docs * Add experimental + fix docs * Add experimental + fix docs --- .../functions/description/categorize.asciidoc | 2 +- .../esql/functions/examples/categorize.asciidoc | 14 ++++++++++++++ .../esql/functions/grouping-functions.asciidoc | 2 ++ .../functions/kibana/definition/categorize.json | 9 ++++++--- .../esql/functions/kibana/docs/categorize.md | 6 +++++- .../esql/functions/layout/categorize.asciidoc | 3 +++ .../src/main/resources/docs.csv-spec | 17 +++++++++++++++++ .../function/EsqlFunctionRegistry.java | 5 +++-- .../function/grouping/Categorize.java | 14 +++++++++++++- 9 files changed, 64 insertions(+), 8 deletions(-) create mode 100644 docs/reference/esql/functions/examples/categorize.asciidoc diff --git a/docs/reference/esql/functions/description/categorize.asciidoc b/docs/reference/esql/functions/description/categorize.asciidoc index b6574c1855505..a5e8e2d507574 100644 --- a/docs/reference/esql/functions/description/categorize.asciidoc +++ b/docs/reference/esql/functions/description/categorize.asciidoc @@ -2,4 +2,4 @@ *Description* -Categorizes text messages. +Groups text messages into categories of similarly formatted text values. diff --git a/docs/reference/esql/functions/examples/categorize.asciidoc b/docs/reference/esql/functions/examples/categorize.asciidoc new file mode 100644 index 0000000000000..4167be6910c89 --- /dev/null +++ b/docs/reference/esql/functions/examples/categorize.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +This example categorizes server logs messages into categories and aggregates their counts. +[source.merge.styled,esql] +---- +include::{esql-specs}/docs.csv-spec[tag=docsCategorize] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/docs.csv-spec[tag=docsCategorize-result] +|=== + diff --git a/docs/reference/esql/functions/grouping-functions.asciidoc b/docs/reference/esql/functions/grouping-functions.asciidoc index ed0caf5ec2a4c..839320ce23392 100644 --- a/docs/reference/esql/functions/grouping-functions.asciidoc +++ b/docs/reference/esql/functions/grouping-functions.asciidoc @@ -9,6 +9,8 @@ The <> command supports these grouping functions: // tag::group_list[] * <> +* experimental:[] <> // end::group_list[] include::layout/bucket.asciidoc[] +include::layout/categorize.asciidoc[] diff --git a/docs/reference/esql/functions/kibana/definition/categorize.json b/docs/reference/esql/functions/kibana/definition/categorize.json index ca3971a6e05a3..ed5fa15232b85 100644 --- a/docs/reference/esql/functions/kibana/definition/categorize.json +++ b/docs/reference/esql/functions/kibana/definition/categorize.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "categorize", - "description" : "Categorizes text messages.", + "description" : "Groups text messages into categories of similarly formatted text values.", "signatures" : [ { "params" : [ @@ -29,6 +29,9 @@ "returnType" : "keyword" } ], - "preview" : false, - "snapshot_only" : true + "examples" : [ + "FROM sample_data\n| STATS count=COUNT() BY category=CATEGORIZE(message)" + ], + "preview" : true, + "snapshot_only" : false } diff --git a/docs/reference/esql/functions/kibana/docs/categorize.md b/docs/reference/esql/functions/kibana/docs/categorize.md index f59151b5bee65..80c04b79084e9 100644 --- a/docs/reference/esql/functions/kibana/docs/categorize.md +++ b/docs/reference/esql/functions/kibana/docs/categorize.md @@ -3,5 +3,9 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### CATEGORIZE -Categorizes text messages. +Groups text messages into categories of similarly formatted text values. +``` +FROM sample_data +| STATS count=COUNT() BY category=CATEGORIZE(message) +``` diff --git a/docs/reference/esql/functions/layout/categorize.asciidoc b/docs/reference/esql/functions/layout/categorize.asciidoc index c547362b71ab0..4075949ab4d12 100644 --- a/docs/reference/esql/functions/layout/categorize.asciidoc +++ b/docs/reference/esql/functions/layout/categorize.asciidoc @@ -4,6 +4,8 @@ [[esql-categorize]] === `CATEGORIZE` +preview::["Do not use on production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] + *Syntax* [.text-center] @@ -12,3 +14,4 @@ image::esql/functions/signature/categorize.svg[Embedded,opts=inline] include::../parameters/categorize.asciidoc[] include::../description/categorize.asciidoc[] include::../types/categorize.asciidoc[] +include::../examples/categorize.asciidoc[] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec index a6e1a771374ca..24baf1263d06a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec @@ -676,3 +676,20 @@ Ahmedabad | 9 | 72 Bangalore | 9 | 72 // end::bitLength-result[] ; + +docsCategorize +required_capability: categorize_v4 +// tag::docsCategorize[] +FROM sample_data +| STATS count=COUNT() BY category=CATEGORIZE(message) +// end::docsCategorize[] +| SORT category +; + +// tag::docsCategorize-result[] +count:long | category:keyword + 3 | .*?Connected.+?to.*? + 3 | .*?Connection.+?error.*? + 1 | .*?Disconnected.*? +// end::docsCategorize-result[] +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 3d26bc170b723..37b159922906c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -265,7 +265,9 @@ public Collection listFunctions(String pattern) { private static FunctionDefinition[][] functions() { return new FunctionDefinition[][] { // grouping functions - new FunctionDefinition[] { def(Bucket.class, Bucket::new, "bucket", "bin"), }, + new FunctionDefinition[] { + def(Bucket.class, Bucket::new, "bucket", "bin"), + def(Categorize.class, Categorize::new, "categorize") }, // aggregate functions // since they declare two public constructors - one with filter (for nested where) and one without // use casting to disambiguate between the two @@ -411,7 +413,6 @@ private static FunctionDefinition[][] snapshotFunctions() { // The delay() function is for debug/snapshot environments only and should never be enabled in a non-snapshot build. // This is an experimental function and can be removed without notice. def(Delay.class, Delay::new, "delay"), - def(Categorize.class, Categorize::new, "categorize"), def(Kql.class, Kql::new, "kql"), def(Rate.class, Rate::withUnresolvedTimestamp, "rate") } }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java index 63b5073c2217a..ca0447ce11ec4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; @@ -44,10 +45,21 @@ public class Categorize extends GroupingFunction implements Validatable { private final Expression field; - @FunctionInfo(returnType = "keyword", description = "Categorizes text messages.") + @FunctionInfo( + returnType = "keyword", + description = "Groups text messages into categories of similarly formatted text values.", + examples = { + @Example( + file = "docs", + tag = "docsCategorize", + description = "This example categorizes server logs messages into categories and aggregates their counts. " + ) }, + preview = true + ) public Categorize( Source source, @Param(name = "field", type = { "text", "keyword" }, description = "Expression to categorize") Expression field + ) { super(source, List.of(field)); this.field = field; From 9d9a136f3d104382cc0c8de3fc42a97b7ddbfcdd Mon Sep 17 00:00:00 2001 From: Dan Rubinstein Date: Mon, 2 Dec 2024 11:32:12 -0500 Subject: [PATCH 347/386] Unmuting chunking test (#116837) Co-authored-by: Elastic Machine --- .../xpack/inference/qa/mixed/OpenAIServiceMixedIT.java | 1 - 1 file changed, 1 deletion(-) diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java index d8d5eb49c3c00..b37bd1801b331 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java @@ -54,7 +54,6 @@ public static void shutdown() { openAiChatCompletionsServer.close(); } - @AwaitsFix(bugUrl = "Backport #112074 to 8.16") @SuppressWarnings("unchecked") public void testOpenAiEmbeddings() throws IOException { var openAiEmbeddingsSupported = bwcVersion.onOrAfter(Version.fromString(OPEN_AI_EMBEDDINGS_ADDED)); From 187935eb77e31493ce98512396dd1f38d9ce41e1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 2 Dec 2024 17:37:03 +0100 Subject: [PATCH 348/386] Fix OldRepositoryAccessIT testOldRepoAccess (#117649) This test started failing with the changes made in #115314 when we only have one shard in the index. This change adjusts test expectations. Closes #115631 --- muted-tests.yml | 3 --- .../java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java | 3 ++- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index d01b956db9199..73d9a29e275b3 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -91,9 +91,6 @@ tests: - class: org.elasticsearch.xpack.restart.MLModelDeploymentFullClusterRestartIT method: testDeploymentSurvivesRestart {cluster=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/115528 -- class: org.elasticsearch.oldrepos.OldRepositoryAccessIT - method: testOldRepoAccess - issue: https://github.com/elastic/elasticsearch/issues/115631 - class: org.elasticsearch.action.update.UpdateResponseTests method: testToAndFromXContent issue: https://github.com/elastic/elasticsearch/issues/115689 diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java index 30ec6630b9618..ef1c8284b9c19 100644 --- a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java @@ -484,7 +484,8 @@ private void assertDocs( logger.info(searchResponse); assertEquals(0, searchResponse.getHits().getTotalHits().value()); assertEquals(numberOfShards, searchResponse.getSuccessfulShards()); - assertEquals(numberOfShards, searchResponse.getSkippedShards()); + int expectedSkips = numberOfShards == 1 ? 0 : numberOfShards; + assertEquals(expectedSkips, searchResponse.getSkippedShards()); } finally { searchResponse.decRef(); } From e10fc3c90dc18da0b6dd02a06113899e0be0c5de Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 2 Dec 2024 12:19:03 -0500 Subject: [PATCH 349/386] Speed up bit compared with floats or bytes script operations (#117199) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Instead of doing an "if" statement, which doesn't lend itself to vectorization, I switched to expand to the bits and multiply the 1s and 0s. This led to a marginal speed improvement on ARM. I expect that Panama vector could be used here to be even faster, but I didn't want to spend anymore time on this for the time being. ``` Benchmark (dims) Mode Cnt Score Error Units IpBitVectorScorerBenchmark.dotProductByteIfStatement 768 thrpt 5 2.952 ± 0.026 ops/us IpBitVectorScorerBenchmark.dotProductByteUnwrap 768 thrpt 5 4.017 ± 0.068 ops/us IpBitVectorScorerBenchmark.dotProductFloatIfStatement 768 thrpt 5 2.987 ± 0.124 ops/us IpBitVectorScorerBenchmark.dotProductFloatUnwrap 768 thrpt 5 4.726 ± 0.136 ops/us ``` Benchmark I used. https://gist.github.com/benwtrent/b0edb3975d2f03356c1a5ea84c72abc9 --- docs/changelog/117199.yaml | 5 ++ .../elasticsearch/simdvec/ESVectorUtil.java | 23 +------ .../DefaultESVectorUtilSupport.java | 65 +++++++++++++++++++ .../vectorization/ESVectorUtilSupport.java | 4 ++ .../PanamaESVectorUtilSupport.java | 10 +++ 5 files changed, 86 insertions(+), 21 deletions(-) create mode 100644 docs/changelog/117199.yaml diff --git a/docs/changelog/117199.yaml b/docs/changelog/117199.yaml new file mode 100644 index 0000000000000..b685e98b61f6b --- /dev/null +++ b/docs/changelog/117199.yaml @@ -0,0 +1,5 @@ +pr: 117199 +summary: Speed up bit compared with floats or bytes script operations +area: Vector Search +type: enhancement +issues: [] diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java index 2f4743a47a14a..7fe475e86a2f5 100644 --- a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java @@ -61,17 +61,7 @@ public static int ipByteBit(byte[] q, byte[] d) { if (q.length != d.length * Byte.SIZE) { throw new IllegalArgumentException("vector dimensions incompatible: " + q.length + "!= " + Byte.SIZE + " x " + d.length); } - int result = 0; - // now combine the two vectors, summing the byte dimensions where the bit in d is `1` - for (int i = 0; i < d.length; i++) { - byte mask = d[i]; - for (int j = Byte.SIZE - 1; j >= 0; j--) { - if ((mask & (1 << j)) != 0) { - result += q[i * Byte.SIZE + Byte.SIZE - 1 - j]; - } - } - } - return result; + return IMPL.ipByteBit(q, d); } /** @@ -87,16 +77,7 @@ public static float ipFloatBit(float[] q, byte[] d) { if (q.length != d.length * Byte.SIZE) { throw new IllegalArgumentException("vector dimensions incompatible: " + q.length + "!= " + Byte.SIZE + " x " + d.length); } - float result = 0; - for (int i = 0; i < d.length; i++) { - byte mask = d[i]; - for (int j = Byte.SIZE - 1; j >= 0; j--) { - if ((mask & (1 << j)) != 0) { - result += q[i * Byte.SIZE + Byte.SIZE - 1 - j]; - } - } - } - return result; + return IMPL.ipFloatBit(q, d); } /** diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorUtilSupport.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorUtilSupport.java index 4a08096119d6a..00381c8c3fb2f 100644 --- a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorUtilSupport.java +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorUtilSupport.java @@ -10,9 +10,18 @@ package org.elasticsearch.simdvec.internal.vectorization; import org.apache.lucene.util.BitUtil; +import org.apache.lucene.util.Constants; final class DefaultESVectorUtilSupport implements ESVectorUtilSupport { + private static float fma(float a, float b, float c) { + if (Constants.HAS_FAST_SCALAR_FMA) { + return Math.fma(a, b, c); + } else { + return a * b + c; + } + } + DefaultESVectorUtilSupport() {} @Override @@ -20,6 +29,62 @@ public long ipByteBinByte(byte[] q, byte[] d) { return ipByteBinByteImpl(q, d); } + @Override + public int ipByteBit(byte[] q, byte[] d) { + return ipByteBitImpl(q, d); + } + + @Override + public float ipFloatBit(float[] q, byte[] d) { + return ipFloatBitImpl(q, d); + } + + public static int ipByteBitImpl(byte[] q, byte[] d) { + assert q.length == d.length * Byte.SIZE; + int acc0 = 0; + int acc1 = 0; + int acc2 = 0; + int acc3 = 0; + // now combine the two vectors, summing the byte dimensions where the bit in d is `1` + for (int i = 0; i < d.length; i++) { + byte mask = d[i]; + // Make sure its just 1 or 0 + + acc0 += q[i * Byte.SIZE + 0] * ((mask >> 7) & 1); + acc1 += q[i * Byte.SIZE + 1] * ((mask >> 6) & 1); + acc2 += q[i * Byte.SIZE + 2] * ((mask >> 5) & 1); + acc3 += q[i * Byte.SIZE + 3] * ((mask >> 4) & 1); + + acc0 += q[i * Byte.SIZE + 4] * ((mask >> 3) & 1); + acc1 += q[i * Byte.SIZE + 5] * ((mask >> 2) & 1); + acc2 += q[i * Byte.SIZE + 6] * ((mask >> 1) & 1); + acc3 += q[i * Byte.SIZE + 7] * ((mask >> 0) & 1); + } + return acc0 + acc1 + acc2 + acc3; + } + + public static float ipFloatBitImpl(float[] q, byte[] d) { + assert q.length == d.length * Byte.SIZE; + float acc0 = 0; + float acc1 = 0; + float acc2 = 0; + float acc3 = 0; + // now combine the two vectors, summing the byte dimensions where the bit in d is `1` + for (int i = 0; i < d.length; i++) { + byte mask = d[i]; + acc0 = fma(q[i * Byte.SIZE + 0], (mask >> 7) & 1, acc0); + acc1 = fma(q[i * Byte.SIZE + 1], (mask >> 6) & 1, acc1); + acc2 = fma(q[i * Byte.SIZE + 2], (mask >> 5) & 1, acc2); + acc3 = fma(q[i * Byte.SIZE + 3], (mask >> 4) & 1, acc3); + + acc0 = fma(q[i * Byte.SIZE + 4], (mask >> 3) & 1, acc0); + acc1 = fma(q[i * Byte.SIZE + 5], (mask >> 2) & 1, acc1); + acc2 = fma(q[i * Byte.SIZE + 6], (mask >> 1) & 1, acc2); + acc3 = fma(q[i * Byte.SIZE + 7], (mask >> 0) & 1, acc3); + } + return acc0 + acc1 + acc2 + acc3; + } + public static long ipByteBinByteImpl(byte[] q, byte[] d) { long ret = 0; int size = d.length; diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorUtilSupport.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorUtilSupport.java index d7611173ca693..6938bffec5f37 100644 --- a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorUtilSupport.java +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorUtilSupport.java @@ -14,4 +14,8 @@ public interface ESVectorUtilSupport { short B_QUERY = 4; long ipByteBinByte(byte[] q, byte[] d); + + int ipByteBit(byte[] q, byte[] d); + + float ipFloatBit(float[] q, byte[] d); } diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorUtilSupport.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorUtilSupport.java index 0e5827d046736..4de33643258e4 100644 --- a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorUtilSupport.java +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorUtilSupport.java @@ -48,6 +48,16 @@ public long ipByteBinByte(byte[] q, byte[] d) { return DefaultESVectorUtilSupport.ipByteBinByteImpl(q, d); } + @Override + public int ipByteBit(byte[] q, byte[] d) { + return DefaultESVectorUtilSupport.ipByteBitImpl(q, d); + } + + @Override + public float ipFloatBit(float[] q, byte[] d) { + return DefaultESVectorUtilSupport.ipFloatBitImpl(q, d); + } + private static final VectorSpecies BYTE_SPECIES_128 = ByteVector.SPECIES_128; private static final VectorSpecies BYTE_SPECIES_256 = ByteVector.SPECIES_256; From 913e0fbca87d5a77951194460859979f4c890b80 Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Mon, 2 Dec 2024 14:08:07 -0500 Subject: [PATCH 350/386] ESQL Date Nanos Addition and Subtraction (#116839) Resolves #109995 This adds support and tests for addition and subtraction of date nanos with periods and durations. It does not include support for date_diff, which is a separate ticket (#109999). The bulk of the PR is testing, the actual date math is all handled by library functions. --------- Co-authored-by: Elastic Machine --- .../esql/functions/kibana/definition/add.json | 72 ++++ .../esql/functions/kibana/definition/sub.json | 72 ++++ .../esql/functions/types/add.asciidoc | 4 + .../esql/functions/types/sub.asciidoc | 4 + .../xpack/esql/core/type/DataType.java | 8 + .../src/main/resources/date_nanos.csv-spec | 401 ++++++++++++++++++ .../arithmetic/AddDateNanosEvaluator.java | 142 +++++++ .../arithmetic/SubDateNanosEvaluator.java | 142 +++++++ .../xpack/esql/action/EsqlCapabilities.java | 4 + .../predicate/operator/arithmetic/Add.java | 34 +- .../DateTimeArithmeticOperation.java | 42 +- .../predicate/operator/arithmetic/Sub.java | 28 +- .../esql/type/EsqlDataTypeConverter.java | 7 +- .../xpack/esql/analysis/AnalyzerTests.java | 6 +- .../xpack/esql/analysis/VerifierTests.java | 4 +- .../expression/function/TestCaseSupplier.java | 95 ++++- .../operator/arithmetic/AddTests.java | 55 ++- .../operator/arithmetic/SubTests.java | 72 +++- .../esql/type/EsqlDataTypeConverterTests.java | 21 +- 19 files changed, 1152 insertions(+), 61 deletions(-) create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDateNanosEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDateNanosEvaluator.java diff --git a/docs/reference/esql/functions/kibana/definition/add.json b/docs/reference/esql/functions/kibana/definition/add.json index bd9fbf4d4f9ec..cfb4755a93d59 100644 --- a/docs/reference/esql/functions/kibana/definition/add.json +++ b/docs/reference/esql/functions/kibana/definition/add.json @@ -40,6 +40,42 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "A numeric value or a date time value." + }, + { + "name" : "rhs", + "type" : "date_period", + "optional" : false, + "description" : "A numeric value or a date time value." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "A numeric value or a date time value." + }, + { + "name" : "rhs", + "type" : "time_duration", + "optional" : false, + "description" : "A numeric value or a date time value." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { @@ -58,6 +94,24 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_period", + "optional" : false, + "description" : "A numeric value or a date time value." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "A numeric value or a date time value." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { @@ -256,6 +310,24 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "time_duration", + "optional" : false, + "description" : "A numeric value or a date time value." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "A numeric value or a date time value." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/sub.json b/docs/reference/esql/functions/kibana/definition/sub.json index e10e5a662c8cb..608b5eb1009a7 100644 --- a/docs/reference/esql/functions/kibana/definition/sub.json +++ b/docs/reference/esql/functions/kibana/definition/sub.json @@ -40,6 +40,60 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "A numeric value or a date time value." + }, + { + "name" : "rhs", + "type" : "date_period", + "optional" : false, + "description" : "A numeric value or a date time value." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "A numeric value or a date time value." + }, + { + "name" : "rhs", + "type" : "time_duration", + "optional" : false, + "description" : "A numeric value or a date time value." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_period", + "optional" : false, + "description" : "A numeric value or a date time value." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "A numeric value or a date time value." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { @@ -220,6 +274,24 @@ "variadic" : false, "returnType" : "long" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "time_duration", + "optional" : false, + "description" : "A numeric value or a date time value." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "A numeric value or a date time value." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/types/add.asciidoc b/docs/reference/esql/functions/types/add.asciidoc index 54d1aec463c1a..e47a0d81f27e7 100644 --- a/docs/reference/esql/functions/types/add.asciidoc +++ b/docs/reference/esql/functions/types/add.asciidoc @@ -7,7 +7,10 @@ lhs | rhs | result date | date_period | date date | time_duration | date +date_nanos | date_period | date_nanos +date_nanos | time_duration | date_nanos date_period | date | date +date_period | date_nanos | date_nanos date_period | date_period | date_period double | double | double double | integer | double @@ -19,6 +22,7 @@ long | double | double long | integer | long long | long | long time_duration | date | date +time_duration | date_nanos | date_nanos time_duration | time_duration | time_duration unsigned_long | unsigned_long | unsigned_long |=== diff --git a/docs/reference/esql/functions/types/sub.asciidoc b/docs/reference/esql/functions/types/sub.asciidoc index c3ded301ebe68..dca56026071ee 100644 --- a/docs/reference/esql/functions/types/sub.asciidoc +++ b/docs/reference/esql/functions/types/sub.asciidoc @@ -7,6 +7,9 @@ lhs | rhs | result date | date_period | date date | time_duration | date +date_nanos | date_period | date_nanos +date_nanos | time_duration | date_nanos +date_period | date_nanos | date_nanos date_period | date_period | date_period double | double | double double | integer | double @@ -17,6 +20,7 @@ integer | long | long long | double | double long | integer | long long | long | long +time_duration | date_nanos | date_nanos time_duration | time_duration | time_duration unsigned_long | unsigned_long | unsigned_long |=== diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index 1c65dd386667f..a63571093ba58 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -415,6 +415,14 @@ public static boolean isDateTimeOrTemporal(DataType t) { return isDateTime(t) || isTemporalAmount(t); } + public static boolean isDateTimeOrNanosOrTemporal(DataType t) { + return isDateTime(t) || isTemporalAmount(t) || t == DATE_NANOS; + } + + public static boolean isMillisOrNanos(DataType t) { + return t == DATETIME || t == DATE_NANOS; + } + public static boolean areCompatible(DataType left, DataType right) { if (left == right) { return true; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec index 2ee23382515da..daa45825b93fc 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec @@ -459,3 +459,404 @@ yr:date_nanos | mo:date_nanos | mn:date_nanos 2023-01-01T00:00:00.000000000Z | 2023-10-01T00:00:00.000000000Z | 2023-10-23T12:10:00.000000000Z | 2023-10-23T12:15:03.360000000Z 2023-01-01T00:00:00.000000000Z | 2023-10-01T00:00:00.000000000Z | 2023-10-23T12:10:00.000000000Z | 2023-10-23T12:15:03.360000000Z ; + +Add date nanos +required_capability: date_nanos_add_subtract + +FROM date_nanos +| WHERE millis > "2020-01-01" +| EVAL mo = nanos + 1 month, hr = nanos + 1 hour, dy = nanos - 4 days, mn = nanos - 2 minutes +| SORT millis DESC +| KEEP mo, hr, dy, mn; + +mo:date_nanos | hr:date_nanos | dy:date_nanos | mn:date_nanos +2023-11-23T13:55:01.543123456Z | 2023-10-23T14:55:01.543123456Z | 2023-10-19T13:55:01.543123456Z | 2023-10-23T13:53:01.543123456Z +2023-11-23T13:53:55.832987654Z | 2023-10-23T14:53:55.832987654Z | 2023-10-19T13:53:55.832987654Z | 2023-10-23T13:51:55.832987654Z +2023-11-23T13:52:55.015787878Z | 2023-10-23T14:52:55.015787878Z | 2023-10-19T13:52:55.015787878Z | 2023-10-23T13:50:55.015787878Z +2023-11-23T13:51:54.732102837Z | 2023-10-23T14:51:54.732102837Z | 2023-10-19T13:51:54.732102837Z | 2023-10-23T13:49:54.732102837Z +2023-11-23T13:33:34.937193000Z | 2023-10-23T14:33:34.937193000Z | 2023-10-19T13:33:34.937193000Z | 2023-10-23T13:31:34.937193000Z +2023-11-23T12:27:28.948000000Z | 2023-10-23T13:27:28.948000000Z | 2023-10-19T12:27:28.948000000Z | 2023-10-23T12:25:28.948000000Z +2023-11-23T12:15:03.360103847Z | 2023-10-23T13:15:03.360103847Z | 2023-10-19T12:15:03.360103847Z | 2023-10-23T12:13:03.360103847Z +2023-11-23T12:15:03.360103847Z | 2023-10-23T13:15:03.360103847Z | 2023-10-19T12:15:03.360103847Z | 2023-10-23T12:13:03.360103847Z +; + +datePlusPeriod +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos("2100-01-01T01:01:01.000123456Z") +| eval plus = dt + 4 years + 3 months + 2 weeks + 1 day; + +dt:date_nanos | plus:date_nanos +2100-01-01T01:01:01.000123456Z | 2104-04-16T01:01:01.000123456Z +; + +datePlusPeriodFromLeft +required_capability: date_nanos_add_subtract + +row n = to_date_nanos("2053-04-04T00:00:00.000123456Z") | eval then = 4 years + 3 months + 2 weeks + 1 day + n | keep then; + +then:date_nanos +2057-07-19T00:00:00.000123456Z +; + +datePlusMixedPeriodsFromLeft +required_capability: date_nanos_add_subtract + +row n = to_date_nanos("2053-04-01T00:00:00.000123456Z") +| eval then = 4 years + 3 months + 1 year + 2 weeks + 1 month + 1 day + 1 week + 1 day + n +| keep then; + +then:date_nanos +2058-08-24T00:00:00.000123456Z +; + +datePlusSumOfPeriodsFromLeft +required_capability: date_nanos_add_subtract + +row n = to_date_nanos("2053-04-04T00:00:00.000123456Z") | eval then = (4 years + 3 months + 2 weeks + 1 day) + n | keep then; + +then:date_nanos +2057-07-19T00:00:00.000123456Z +; + +datePlusNegatedPeriod +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos("2104-04-16T01:01:01.000123456Z") +| eval plus = dt + (-(4 years + 3 months + 2 weeks + 1 day)); + +dt:date_nanos | plus:date_nanos +2104-04-16T01:01:01.000123456Z | 2100-01-01T01:01:01.000123456Z +; + +dateMinusPeriod +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos("2104-04-16T01:01:01.000123456Z") +| eval minus = dt - 4 years - 3 months - 2 weeks - 1 day; + +dt:date_nanos | minus:date_nanos +2104-04-16T01:01:01.000123456Z | 2100-01-01T01:01:01.000123456Z +; + +dateMinusPeriodFromLeft +required_capability: date_nanos_add_subtract + +row n = to_date_nanos("2057-07-19T00:00:00.000123456Z") | eval then = -4 years - 3 months - 2 weeks - 1 day + n | keep then; + +then:date_nanos +2053-04-04T00:00:00.000123456Z +; + +dateMinusSumOfNegativePeriods +required_capability: date_nanos_add_subtract + +row n = to_date_nanos("2053-04-04T00:00:00.000123456Z") | eval then = n - (-4 years - 3 months - 2 weeks - 1 day)| keep then; + +then:date_nanos +2057-07-19T00:00:00.000123456Z +; + +dateMinusPeriodsFromLeftMultipleEvals +required_capability: date_nanos_add_subtract + +row n = to_date_nanos("2053-04-04T00:00:00.000123456Z") +| eval x = -4 years + n +| eval y = -3 months + x, then = y + (-2 weeks - 1 day) +| keep then; + +then:date_nanos +2048-12-20T00:00:00.000123456Z +; + +datePlusDuration +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos("2100-01-01T00:00:00.000123456Z") +| eval plus = dt + 1 hour + 1 minute + 1 second + 1 milliseconds; + +dt:date_nanos | plus:date_nanos +2100-01-01T00:00:00.000123456Z | 2100-01-01T01:01:01.001123456Z +; + +datePlusDurationFromLeft +required_capability: date_nanos_add_subtract + +row n = to_date_nanos("2053-04-04T00:00:00.000123456Z") | eval then = 1 hour + 1 minute + 1 second + 1 milliseconds + n | keep then; + +then:date_nanos +2053-04-04T01:01:01.001123456Z +; + +datePlusMixedDurationsFromLeft +required_capability: date_nanos_add_subtract + +row n = to_date_nanos("2053-04-04T00:00:00.000123456Z") +| eval then = 1 hour + 1 minute + 2 hour + 1 second + 2 minute + 1 milliseconds + 2 second + 2 millisecond + n +| keep then; + +then:date_nanos +2053-04-04T03:03:03.003123456Z +; + +datePlusSumOfDurationsFromLeft +required_capability: date_nanos_add_subtract + +row n = to_date_nanos("2053-04-04T00:00:00.000123456Z") | eval then = (1 hour + 1 minute + 1 second + 1 milliseconds) + n | keep then; + +then:date_nanos +2053-04-04T01:01:01.001123456Z +; + +datePlusNegatedDuration +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos("2100-01-01T01:01:01.001123456Z") +| eval plus = dt + (-(1 hour + 1 minute + 1 second + 1 milliseconds)); + +dt:date_nanos | plus:date_nanos +2100-01-01T01:01:01.001123456Z | 2100-01-01T00:00:00.000123456Z +; + +datePlusNull +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos("2100-01-01T01:01:01.001123456Z") +| eval plus_post = dt + null, plus_pre = null + dt; + +dt:date_nanos | plus_post:date_nanos | plus_pre:date_nanos +2100-01-01T01:01:01.001123456Z | null | null +; + +datePlusNullAndDuration +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos("2100-01-01T01:01:01.001123456Z") +| eval plus_post = dt + null + 1 hour, plus_pre = 1 second + null + dt; + +dt:date_nanos | plus_post:date_nanos | plus_pre:date_nanos +2100-01-01T01:01:01.001123456Z | null | null +; + +datePlusNullAndPeriod +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos("2100-01-01T01:01:01.001123456Z") +| eval plus_post = dt + null + 2 years, plus_pre = 3 weeks + null + dt; + +dt:date_nanos | plus_post:date_nanos | plus_pre:date_nanos +2100-01-01T01:01:01.001123456Z | null | null +; + +datePlusQuarter +required_capability: date_nanos_add_subtract + +required_capability: timespan_abbreviations +row dt = to_date_nanos("2100-01-01T01:01:01.000123456Z") +| eval plusQuarter = dt + 2 quarters +; + +dt:date_nanos | plusQuarter:date_nanos +2100-01-01T01:01:01.000123456Z | 2100-07-01T01:01:01.000123456Z +; + +datePlusAbbreviatedDurations +required_capability: timespan_abbreviations +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos("2100-01-01T00:00:00.000123456Z") +| eval plusDurations = dt + 1 h + 2 min + 2 sec + 1 s + 4 ms +; + +dt:date_nanos | plusDurations:date_nanos +2100-01-01T00:00:00.000123456Z | 2100-01-01T01:02:03.004123456Z +; + +datePlusAbbreviatedPeriods +required_capability: timespan_abbreviations +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos("2100-01-01T00:00:00.000123456Z") +| eval plusDurations = dt + 0 yr + 1y + 2 q + 3 mo + 4 w + 3 d +; + +dt:date_nanos | plusDurations:date_nanos +2100-01-01T00:00:00.000123456Z | 2101-11-01T00:00:00.000123456Z +; + + +dateMinusDuration +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos("2100-01-01T01:01:01.001123456Z") +| eval minus = dt - 1 hour - 1 minute - 1 second - 1 milliseconds; + +dt:date_nanos | minus:date_nanos +2100-01-01T01:01:01.001123456Z | 2100-01-01T00:00:00.000123456Z +; + +dateMinusDurationFromLeft +required_capability: date_nanos_add_subtract + +row n = to_date_nanos("2053-04-04T01:01:01.001123456Z") | eval then = -1 hour - 1 minute - 1 second - 1 milliseconds + n | keep then; + +then:date_nanos +2053-04-04T00:00:00.000123456Z +; + +dateMinusSumOfNegativeDurations +required_capability: date_nanos_add_subtract + +row n = to_date_nanos("2053-04-04T00:00:00.000123456Z") | eval then = n - (-1 hour - 1 minute - 1 second - 1 milliseconds) | keep then; + +then:date_nanos +2053-04-04T01:01:01.001123456Z +; + +dateMinusDurationsFromLeftMultipleEvals +required_capability: date_nanos_add_subtract + +row n = to_date_nanos("2053-04-04T04:03:02.001123456Z") +| eval x = -4 hour + n +| eval y = -3 minute + x, then = y + (-2 second - 1 millisecond) +| keep then +; + +then:date_nanos +2053-04-04T00:00:00.000123456Z +; + +dateMinusNull +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos("2053-04-04T04:03:02.001123456Z") +| eval minus = dt - null +; + +dt:date_nanos | minus:date_nanos +2053-04-04T04:03:02.001123456Z | null +; + +dateMinusNullAndPeriod +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos("2053-04-04T04:03:02.001123456Z") +| eval minus = dt - null - 4 minutes +; + +dt:date_nanos | minus:date_nanos +2053-04-04T04:03:02.001123456Z | null +; + +dateMinusNullAndDuration +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos("2053-04-04T04:03:02.001123456Z") +| eval minus = dt - 6 days - null +; + +dt:date_nanos | minus:date_nanos +2053-04-04T04:03:02.001123456Z | null +; + +datePlusPeriodAndDuration +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos("2100-01-01T00:00:00.000123456Z") +| eval plus = dt + 4 years + 3 months + 2 weeks + 1 day + 1 hour + 1 minute + 1 second + 1 milliseconds; + +dt:date_nanos | plus:date_nanos +2100-01-01T00:00:00.000123456Z | 2104-04-16T01:01:01.001123456Z +; + +dateMinusPeriodAndDuration +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos("2104-04-16T01:01:01.001123456Z") +| eval minus = dt - 4 years - 3 months - 2 weeks - 1 day - 1 hour - 1 minute - 1 second - 1 milliseconds; + +dt:date_nanos |minus:date_nanos +2104-04-16T01:01:01.001123456Z |2100-01-01T00:00:00.000123456Z +; + +datePlusPeriodMinusDuration +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos("2100-01-01T01:01:01.001123456Z") +| eval plus = dt + 4 years + 3 months + 2 weeks + 1 day - 1 hour - 1 minute - 1 second - 1 milliseconds; + +dt:date_nanos | plus:date_nanos +2100-01-01T01:01:01.001123456Z | 2104-04-16T00:00:00.000123456Z +; + +datePlusDurationMinusPeriod +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos("2104-04-16T00:00:00.000123456Z") +| eval plus = dt - 4 years - 3 months - 2 weeks - 1 day + 1 hour + 1 minute + 1 second + 1 milliseconds; + +dt:date_nanos | plus:date_nanos +2104-04-16T00:00:00.000123456Z | 2100-01-01T01:01:01.001123456Z +; + +dateMathArithmeticOverflow from addition +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos(9223372036854775807) +| eval plus = dt + 1 day +| keep plus; + +warning:Line 2:15: evaluation of [dt + 1 day] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:15: java.time.DateTimeException: Date nanos out of range. Must be between 1970-01-01T00:00:00Z and 2262-04-11T23:47:16.854775807 +plus:date_nanos +null +; + +date nanos subtraction before 1970 +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos(0::long) +| eval minus = dt - 1 day +| keep minus; + +warning:Line 2:16: evaluation of [dt - 1 day] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:16: java.time.DateTimeException: Date nanos out of range. Must be between 1970-01-01T00:00:00Z and 2262-04-11T23:47:16.854775807 +minus:date_nanos +null +; + +dateMathDateException +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos(0::long) +| eval plus = dt + 2147483647 years +| keep plus; + +warning:Line 2:15: evaluation of [dt + 2147483647 years] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:15: java.time.DateTimeException: Invalid value for Year (valid values -999999999 - 999999999): 2147485617 + +plus:date_nanos +null +; + +dateMathNegatedPeriod +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos(0::long) +| eval plus = -(-1 year) + dt +| keep plus; + +plus:date_nanos +1971-01-01T00:00:00.000Z +; + +dateMathNegatedDuration +required_capability: date_nanos_add_subtract + +row dt = to_date_nanos(0::long) +| eval plus = -(-1 second) + dt +| keep plus; + +plus:date_nanos +1970-01-01T00:00:01.000Z +; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDateNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDateNanosEvaluator.java new file mode 100644 index 0000000000000..fe80536ea5d0d --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDateNanosEvaluator.java @@ -0,0 +1,142 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; + +import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import java.time.DateTimeException; +import java.time.temporal.TemporalAmount; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Add}. + * This class is generated. Do not edit it. + */ +public final class AddDateNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator dateNanos; + + private final TemporalAmount temporalAmount; + + private final DriverContext driverContext; + + private Warnings warnings; + + public AddDateNanosEvaluator(Source source, EvalOperator.ExpressionEvaluator dateNanos, + TemporalAmount temporalAmount, DriverContext driverContext) { + this.source = source; + this.dateNanos = dateNanos; + this.temporalAmount = temporalAmount; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock dateNanosBlock = (LongBlock) dateNanos.eval(page)) { + LongVector dateNanosVector = dateNanosBlock.asVector(); + if (dateNanosVector == null) { + return eval(page.getPositionCount(), dateNanosBlock); + } + return eval(page.getPositionCount(), dateNanosVector); + } + } + + public LongBlock eval(int positionCount, LongBlock dateNanosBlock) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (dateNanosBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (dateNanosBlock.getValueCount(p) != 1) { + if (dateNanosBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendLong(Add.processDateNanos(dateNanosBlock.getLong(dateNanosBlock.getFirstValueIndex(p)), this.temporalAmount)); + } catch (ArithmeticException | DateTimeException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public LongBlock eval(int positionCount, LongVector dateNanosVector) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendLong(Add.processDateNanos(dateNanosVector.getLong(p), this.temporalAmount)); + } catch (ArithmeticException | DateTimeException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "AddDateNanosEvaluator[" + "dateNanos=" + dateNanos + ", temporalAmount=" + temporalAmount + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(dateNanos); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory dateNanos; + + private final TemporalAmount temporalAmount; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory dateNanos, + TemporalAmount temporalAmount) { + this.source = source; + this.dateNanos = dateNanos; + this.temporalAmount = temporalAmount; + } + + @Override + public AddDateNanosEvaluator get(DriverContext context) { + return new AddDateNanosEvaluator(source, dateNanos.get(context), temporalAmount, context); + } + + @Override + public String toString() { + return "AddDateNanosEvaluator[" + "dateNanos=" + dateNanos + ", temporalAmount=" + temporalAmount + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDateNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDateNanosEvaluator.java new file mode 100644 index 0000000000000..3b6f4c1046d40 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDateNanosEvaluator.java @@ -0,0 +1,142 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; + +import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import java.time.DateTimeException; +import java.time.temporal.TemporalAmount; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sub}. + * This class is generated. Do not edit it. + */ +public final class SubDateNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator dateNanos; + + private final TemporalAmount temporalAmount; + + private final DriverContext driverContext; + + private Warnings warnings; + + public SubDateNanosEvaluator(Source source, EvalOperator.ExpressionEvaluator dateNanos, + TemporalAmount temporalAmount, DriverContext driverContext) { + this.source = source; + this.dateNanos = dateNanos; + this.temporalAmount = temporalAmount; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock dateNanosBlock = (LongBlock) dateNanos.eval(page)) { + LongVector dateNanosVector = dateNanosBlock.asVector(); + if (dateNanosVector == null) { + return eval(page.getPositionCount(), dateNanosBlock); + } + return eval(page.getPositionCount(), dateNanosVector); + } + } + + public LongBlock eval(int positionCount, LongBlock dateNanosBlock) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (dateNanosBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (dateNanosBlock.getValueCount(p) != 1) { + if (dateNanosBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendLong(Sub.processDateNanos(dateNanosBlock.getLong(dateNanosBlock.getFirstValueIndex(p)), this.temporalAmount)); + } catch (ArithmeticException | DateTimeException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public LongBlock eval(int positionCount, LongVector dateNanosVector) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendLong(Sub.processDateNanos(dateNanosVector.getLong(p), this.temporalAmount)); + } catch (ArithmeticException | DateTimeException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SubDateNanosEvaluator[" + "dateNanos=" + dateNanos + ", temporalAmount=" + temporalAmount + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(dateNanos); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory dateNanos; + + private final TemporalAmount temporalAmount; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory dateNanos, + TemporalAmount temporalAmount) { + this.source = source; + this.dateNanos = dateNanos; + this.temporalAmount = temporalAmount; + } + + @Override + public SubDateNanosEvaluator get(DriverContext context) { + return new SubDateNanosEvaluator(source, dateNanos.get(context), temporalAmount, context); + } + + @Override + public String toString() { + return "SubDateNanosEvaluator[" + "dateNanos=" + dateNanos + ", temporalAmount=" + temporalAmount + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index dc3329a906741..a93590d7a5bc2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -345,6 +345,10 @@ public enum Cap { */ LEAST_GREATEST_FOR_DATENANOS(), + /** + * Support add and subtract on date nanos + */ + DATE_NANOS_ADD_SUBTRACT(), /** * Support for date_trunc function on date nanos type */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Add.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Add.java index 8f8d885ee379b..9d34410e8a164 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Add.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Add.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -21,7 +22,9 @@ import java.io.IOException; import java.time.DateTimeException; import java.time.Duration; +import java.time.Instant; import java.time.Period; +import java.time.ZonedDateTime; import java.time.temporal.TemporalAmount; import static org.elasticsearch.xpack.esql.core.util.DateUtils.asDateTime; @@ -33,7 +36,7 @@ public class Add extends DateTimeArithmeticOperation implements BinaryComparison public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Add", Add::new); @FunctionInfo( - returnType = { "double", "integer", "long", "date_period", "datetime", "time_duration", "unsigned_long" }, + returnType = { "double", "integer", "long", "date_nanos", "date_period", "datetime", "time_duration", "unsigned_long" }, description = "Add two numbers together. " + "If either field is <> then the result is `null`." ) public Add( @@ -41,12 +44,12 @@ public Add( @Param( name = "lhs", description = "A numeric value or a date time value.", - type = { "double", "integer", "long", "date_period", "datetime", "time_duration", "unsigned_long" } + type = { "double", "integer", "long", "date_nanos", "date_period", "datetime", "time_duration", "unsigned_long" } ) Expression left, @Param( name = "rhs", description = "A numeric value or a date time value.", - type = { "double", "integer", "long", "date_period", "datetime", "time_duration", "unsigned_long" } + type = { "double", "integer", "long", "date_nanos", "date_period", "datetime", "time_duration", "unsigned_long" } ) Expression right ) { super( @@ -58,7 +61,8 @@ public Add( AddLongsEvaluator.Factory::new, AddUnsignedLongsEvaluator.Factory::new, AddDoublesEvaluator.Factory::new, - AddDatetimesEvaluator.Factory::new + AddDatetimesEvaluator.Factory::new, + AddDateNanosEvaluator.Factory::new ); } @@ -70,7 +74,8 @@ private Add(StreamInput in) throws IOException { AddLongsEvaluator.Factory::new, AddUnsignedLongsEvaluator.Factory::new, AddDoublesEvaluator.Factory::new, - AddDatetimesEvaluator.Factory::new + AddDatetimesEvaluator.Factory::new, + AddDateNanosEvaluator.Factory::new ); } @@ -130,6 +135,25 @@ static long processDatetimes(long datetime, @Fixed TemporalAmount temporalAmount return asMillis(asDateTime(datetime).plus(temporalAmount)); } + @Evaluator(extraName = "DateNanos", warnExceptions = { ArithmeticException.class, DateTimeException.class }) + static long processDateNanos(long dateNanos, @Fixed TemporalAmount temporalAmount) { + // Instant.plus behaves differently from ZonedDateTime.plus, but DateUtils generally works with instants. + try { + return DateUtils.toLong( + Instant.from( + ZonedDateTime.ofInstant(DateUtils.toInstant(dateNanos), org.elasticsearch.xpack.esql.core.util.DateUtils.UTC) + .plus(temporalAmount) + ) + ); + } catch (IllegalArgumentException e) { + /* + toLong will throw IllegalArgumentException for out of range dates, but that includes the actual value which we want + to avoid returning here. + */ + throw new DateTimeException("Date nanos out of range. Must be between 1970-01-01T00:00:00Z and 2262-04-11T23:47:16.854775807"); + } + } + @Override public Period fold(Period left, Period right) { return left.plus(right); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java index d407dd8bf7de1..8bb166fac60bb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java @@ -22,10 +22,11 @@ import java.util.Collection; import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_NANOS; import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_PERIOD; import static org.elasticsearch.xpack.esql.core.type.DataType.TIME_DURATION; -import static org.elasticsearch.xpack.esql.core.type.DataType.isDateTime; -import static org.elasticsearch.xpack.esql.core.type.DataType.isDateTimeOrTemporal; +import static org.elasticsearch.xpack.esql.core.type.DataType.isDateTimeOrNanosOrTemporal; +import static org.elasticsearch.xpack.esql.core.type.DataType.isMillisOrNanos; import static org.elasticsearch.xpack.esql.core.type.DataType.isNull; import static org.elasticsearch.xpack.esql.core.type.DataType.isTemporalAmount; @@ -35,7 +36,8 @@ interface DatetimeArithmeticEvaluator { ExpressionEvaluator.Factory apply(Source source, ExpressionEvaluator.Factory expressionEvaluator, TemporalAmount temporalAmount); } - private final DatetimeArithmeticEvaluator datetimes; + private final DatetimeArithmeticEvaluator millisEvaluator; + private final DatetimeArithmeticEvaluator nanosEvaluator; DateTimeArithmeticOperation( Source source, @@ -46,10 +48,12 @@ interface DatetimeArithmeticEvaluator { BinaryEvaluator longs, BinaryEvaluator ulongs, BinaryEvaluator doubles, - DatetimeArithmeticEvaluator datetimes + DatetimeArithmeticEvaluator millisEvaluator, + DatetimeArithmeticEvaluator nanosEvaluator ) { super(source, left, right, op, ints, longs, ulongs, doubles); - this.datetimes = datetimes; + this.millisEvaluator = millisEvaluator; + this.nanosEvaluator = nanosEvaluator; } DateTimeArithmeticOperation( @@ -59,19 +63,22 @@ interface DatetimeArithmeticEvaluator { BinaryEvaluator longs, BinaryEvaluator ulongs, BinaryEvaluator doubles, - DatetimeArithmeticEvaluator datetimes + DatetimeArithmeticEvaluator millisEvaluator, + DatetimeArithmeticEvaluator nanosEvaluator ) throws IOException { super(in, op, ints, longs, ulongs, doubles); - this.datetimes = datetimes; + this.millisEvaluator = millisEvaluator; + this.nanosEvaluator = nanosEvaluator; } @Override protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { return TypeResolutions.isType( e, - t -> t.isNumeric() || DataType.isDateTimeOrTemporal(t) || DataType.isNull(t), + t -> t.isNumeric() || DataType.isDateTimeOrNanosOrTemporal(t) || DataType.isNull(t), sourceText(), paramOrdinal, + "date_nanos", "datetime", "numeric" ); @@ -86,11 +93,11 @@ protected TypeResolution checkCompatibility() { // - one argument is a DATETIME and the other a (foldable) TemporalValue, or // - both arguments are TemporalValues (so we can fold them), or // - one argument is NULL and the other one a DATETIME. - if (isDateTimeOrTemporal(leftType) || isDateTimeOrTemporal(rightType)) { + if (isDateTimeOrNanosOrTemporal(leftType) || isDateTimeOrNanosOrTemporal(rightType)) { if (isNull(leftType) || isNull(rightType)) { return TypeResolution.TYPE_RESOLVED; } - if ((isDateTime(leftType) && isTemporalAmount(rightType)) || (isTemporalAmount(leftType) && isDateTime(rightType))) { + if ((isMillisOrNanos(leftType) && isTemporalAmount(rightType)) || (isTemporalAmount(leftType) && isMillisOrNanos(rightType))) { return TypeResolution.TYPE_RESOLVED; } if (isTemporalAmount(leftType) && isTemporalAmount(rightType) && leftType == rightType) { @@ -171,7 +178,20 @@ public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { temporalAmountArgument = left(); } - return datetimes.apply(source(), toEvaluator.apply(datetimeArgument), (TemporalAmount) temporalAmountArgument.fold()); + return millisEvaluator.apply(source(), toEvaluator.apply(datetimeArgument), (TemporalAmount) temporalAmountArgument.fold()); + } else if (dataType() == DATE_NANOS) { + // One of the arguments has to be a date_nanos and the other a temporal amount. + Expression dateNanosArgument; + Expression temporalAmountArgument; + if (left().dataType() == DATE_NANOS) { + dateNanosArgument = left(); + temporalAmountArgument = right(); + } else { + dateNanosArgument = right(); + temporalAmountArgument = left(); + } + + return nanosEvaluator.apply(source(), toEvaluator.apply(dateNanosArgument), (TemporalAmount) temporalAmountArgument.fold()); } else { return super.toEvaluator(toEvaluator); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Sub.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Sub.java index 27f5579129cc9..e072619e67728 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Sub.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Sub.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -22,7 +23,9 @@ import java.io.IOException; import java.time.DateTimeException; import java.time.Duration; +import java.time.Instant; import java.time.Period; +import java.time.ZonedDateTime; import java.time.temporal.TemporalAmount; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; @@ -61,7 +64,8 @@ public Sub( SubLongsEvaluator.Factory::new, SubUnsignedLongsEvaluator.Factory::new, SubDoublesEvaluator.Factory::new, - SubDatetimesEvaluator.Factory::new + SubDatetimesEvaluator.Factory::new, + SubDateNanosEvaluator.Factory::new ); } @@ -73,7 +77,8 @@ private Sub(StreamInput in) throws IOException { SubLongsEvaluator.Factory::new, SubUnsignedLongsEvaluator.Factory::new, SubDoublesEvaluator.Factory::new, - SubDatetimesEvaluator.Factory::new + SubDatetimesEvaluator.Factory::new, + SubDateNanosEvaluator.Factory::new ); } @@ -143,6 +148,25 @@ static long processDatetimes(long datetime, @Fixed TemporalAmount temporalAmount return asMillis(asDateTime(datetime).minus(temporalAmount)); } + @Evaluator(extraName = "DateNanos", warnExceptions = { ArithmeticException.class, DateTimeException.class }) + static long processDateNanos(long dateNanos, @Fixed TemporalAmount temporalAmount) { + // Instant.plus behaves differently from ZonedDateTime.plus, but DateUtils generally works with instants. + try { + return DateUtils.toLong( + Instant.from( + ZonedDateTime.ofInstant(DateUtils.toInstant(dateNanos), org.elasticsearch.xpack.esql.core.util.DateUtils.UTC) + .minus(temporalAmount) + ) + ); + } catch (IllegalArgumentException e) { + /* + toLong will throw IllegalArgumentException for out of range dates, but that includes the actual value which we want + to avoid returning here. + */ + throw new DateTimeException("Date nanos out of range. Must be between 1970-01-01T00:00:00Z and 2262-04-11T23:47:16.854775807"); + } + } + @Override public Period fold(Period left, Period right) { return left.minus(right); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java index 4bfc9ac5d848f..6ba2d8451f956 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java @@ -78,7 +78,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; import static org.elasticsearch.xpack.esql.core.type.DataType.isDateTime; -import static org.elasticsearch.xpack.esql.core.type.DataType.isDateTimeOrTemporal; +import static org.elasticsearch.xpack.esql.core.type.DataType.isDateTimeOrNanosOrTemporal; import static org.elasticsearch.xpack.esql.core.type.DataType.isNullOrDatePeriod; import static org.elasticsearch.xpack.esql.core.type.DataType.isNullOrTemporalAmount; import static org.elasticsearch.xpack.esql.core.type.DataType.isNullOrTimeDuration; @@ -378,10 +378,13 @@ public static DataType commonType(DataType left, DataType right) { if (right == NULL) { return left; } - if (isDateTimeOrTemporal(left) || isDateTimeOrTemporal(right)) { + if (isDateTimeOrNanosOrTemporal(left) || isDateTimeOrNanosOrTemporal(right)) { if ((isDateTime(left) && isNullOrTemporalAmount(right)) || (isNullOrTemporalAmount(left) && isDateTime(right))) { return DATETIME; } + if ((left == DATE_NANOS && isNullOrTemporalAmount(right)) || (isNullOrTemporalAmount(left) && right == DATE_NANOS)) { + return DATE_NANOS; + } if (isNullOrTimeDuration(left) && isNullOrTimeDuration(right)) { return TIME_DURATION; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index e0ebc92afa95d..5a1e109041a16 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -2009,14 +2009,14 @@ public void testImplicitCasting() { assertThat( e.getMessage(), - containsString("first argument of [concat(\"2024\", \"-04\", \"-01\") + 1 day] must be [datetime or numeric]") + containsString("first argument of [concat(\"2024\", \"-04\", \"-01\") + 1 day] must be [date_nanos, datetime or numeric]") ); e = expectThrows(VerificationException.class, () -> analyze(""" from test | eval x = to_string(null) - 1 day """)); - assertThat(e.getMessage(), containsString("first argument of [to_string(null) - 1 day] must be [datetime or numeric]")); + assertThat(e.getMessage(), containsString("first argument of [to_string(null) - 1 day] must be [date_nanos, datetime or numeric]")); e = expectThrows(VerificationException.class, () -> analyze(""" from test | eval x = concat("2024", "-04", "-01") + "1 day" @@ -2024,7 +2024,7 @@ public void testImplicitCasting() { assertThat( e.getMessage(), - containsString("first argument of [concat(\"2024\", \"-04\", \"-01\") + \"1 day\"] must be [datetime or numeric]") + containsString("first argument of [concat(\"2024\", \"-04\", \"-01\") + \"1 day\"] must be [date_nanos, datetime or numeric]") ); e = expectThrows(VerificationException.class, () -> analyze(""" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index d4fca2a0a2540..d02e78202e0c2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -56,11 +56,11 @@ public class VerifierTests extends ESTestCase { public void testIncompatibleTypesInMathOperation() { assertEquals( - "1:40: second argument of [a + c] must be [datetime or numeric], found value [c] type [keyword]", + "1:40: second argument of [a + c] must be [date_nanos, datetime or numeric], found value [c] type [keyword]", error("row a = 1, b = 2, c = \"xxx\" | eval y = a + c") ); assertEquals( - "1:40: second argument of [a - c] must be [datetime or numeric], found value [c] type [keyword]", + "1:40: second argument of [a - c] must be [date_nanos, datetime or numeric], found value [c] type [keyword]", error("row a = 1, b = 2, c = \"xxx\" | eval y = a - c") ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index d78dfd3141a04..816c9ef6f352c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -1113,31 +1113,83 @@ public static List dateCases(long min, long max) { * */ public static List dateNanosCases() { - return List.of( - new TypedDataSupplier("<1970-01-01T00:00:00.000000000Z>", () -> 0L, DataType.DATE_NANOS), - new TypedDataSupplier("", () -> ESTestCase.randomLongBetween(0, 10 * (long) 10e11), DataType.DATE_NANOS), - new TypedDataSupplier( - "", - () -> ESTestCase.randomLongBetween(10 * (long) 10e11, Long.MAX_VALUE), - DataType.DATE_NANOS - ), - new TypedDataSupplier( - "", - () -> ESTestCase.randomLongBetween(Long.MAX_VALUE / 100 * 99, Long.MAX_VALUE), - DataType.DATE_NANOS - ) - ); + return dateNanosCases(Instant.EPOCH, DateUtils.MAX_NANOSECOND_INSTANT); + } + + /** + * Generate cases for {@link DataType#DATE_NANOS}. + * + */ + public static List dateNanosCases(Instant minValue, Instant maxValue) { + // maximum nanosecond date in ES is 2262-04-11T23:47:16.854775807Z + Instant twentyOneHundred = Instant.parse("2100-01-01T00:00:00Z"); + Instant twentyTwoHundred = Instant.parse("2200-01-01T00:00:00Z"); + Instant twentyTwoFifty = Instant.parse("2250-01-01T00:00:00Z"); + + List cases = new ArrayList<>(); + if (minValue.isAfter(Instant.EPOCH) == false) { + cases.add( + new TypedDataSupplier("<1970-01-01T00:00:00.000000000Z>", () -> DateUtils.toLong(Instant.EPOCH), DataType.DATE_NANOS) + ); + } + + Instant lower = Instant.EPOCH.isBefore(minValue) ? minValue : Instant.EPOCH; + Instant upper = twentyOneHundred.isAfter(maxValue) ? maxValue : twentyOneHundred; + if (upper.isAfter(lower)) { + cases.add( + new TypedDataSupplier( + "<21st century date nanos>", + () -> DateUtils.toLong(ESTestCase.randomInstantBetween(lower, upper)), + DataType.DATE_NANOS + ) + ); + } + + Instant lower2 = twentyOneHundred.isBefore(minValue) ? minValue : twentyOneHundred; + Instant upper2 = twentyTwoHundred.isAfter(maxValue) ? maxValue : twentyTwoHundred; + if (upper.isAfter(lower)) { + cases.add( + new TypedDataSupplier( + "<22nd century date nanos>", + () -> DateUtils.toLong(ESTestCase.randomInstantBetween(lower2, upper2)), + DataType.DATE_NANOS + ) + ); + } + + Instant lower3 = twentyTwoHundred.isBefore(minValue) ? minValue : twentyTwoHundred; + Instant upper3 = twentyTwoFifty.isAfter(maxValue) ? maxValue : twentyTwoFifty; + if (upper.isAfter(lower)) { + cases.add( + new TypedDataSupplier( + "<23rd century date nanos>", + () -> DateUtils.toLong(ESTestCase.randomInstantBetween(lower3, upper3)), + DataType.DATE_NANOS + ) + ); + } + return cases; } public static List datePeriodCases() { + return datePeriodCases(-1000, -13, -32, 1000, 13, 32); + } + + public static List datePeriodCases(int yearMin, int monthMin, int dayMin, int yearMax, int monthMax, int dayMax) { + final int yMin = Math.max(yearMin, -1000); + final int mMin = Math.max(monthMin, -13); + final int dMin = Math.max(dayMin, -32); + final int yMax = Math.min(yearMax, 1000); + final int mMax = Math.min(monthMax, 13); + final int dMax = Math.min(dayMax, 32); return List.of( new TypedDataSupplier("", () -> Period.ZERO, DataType.DATE_PERIOD, true), new TypedDataSupplier( "", () -> Period.of( - ESTestCase.randomIntBetween(-1000, 1000), - ESTestCase.randomIntBetween(-13, 13), - ESTestCase.randomIntBetween(-32, 32) + ESTestCase.randomIntBetween(yMin, yMax), + ESTestCase.randomIntBetween(mMin, mMax), + ESTestCase.randomIntBetween(dMin, dMax) ), DataType.DATE_PERIOD, true @@ -1146,11 +1198,18 @@ public static List datePeriodCases() { } public static List timeDurationCases() { + return timeDurationCases(-604800000, 604800000); + } + + public static List timeDurationCases(long minValue, long maxValue) { + // plus/minus 7 days by default, with caller limits + final long min = Math.max(minValue, -604800000L); + final long max = Math.max(maxValue, 604800000L); return List.of( new TypedDataSupplier("", () -> Duration.ZERO, DataType.TIME_DURATION, true), new TypedDataSupplier( "", - () -> Duration.ofMillis(ESTestCase.randomLongBetween(-604800000L, 604800000L)), // plus/minus 7 days + () -> Duration.ofMillis(ESTestCase.randomLongBetween(min, max)), DataType.TIME_DURATION, true ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java index 8c31b4a65dd14..abfb634d5f301 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java @@ -10,6 +10,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -18,7 +19,9 @@ import java.math.BigInteger; import java.time.Duration; +import java.time.Instant; import java.time.Period; +import java.time.ZonedDateTime; import java.time.temporal.TemporalAmount; import java.util.ArrayList; import java.util.List; @@ -26,6 +29,7 @@ import java.util.function.BiFunction; import java.util.function.BinaryOperator; import java.util.function.Supplier; +import java.util.function.ToLongBiFunction; import static org.elasticsearch.xpack.esql.core.util.DateUtils.asDateTime; import static org.elasticsearch.xpack.esql.core.util.DateUtils.asMillis; @@ -148,14 +152,14 @@ public static Iterable parameters() { BinaryOperator result = (lhs, rhs) -> { try { - return addDatesAndTemporalAmount(lhs, rhs); + return addDatesAndTemporalAmount(lhs, rhs, AddTests::addMillis); } catch (ArithmeticException e) { return null; } }; BiFunction> warnings = (lhs, rhs) -> { try { - addDatesAndTemporalAmount(lhs.data(), rhs.data()); + addDatesAndTemporalAmount(lhs.data(), rhs.data(), AddTests::addMillis); return List.of(); } catch (ArithmeticException e) { return List.of( @@ -186,6 +190,37 @@ public static Iterable parameters() { true ) ); + + BinaryOperator nanosResult = (lhs, rhs) -> { + try { + return addDatesAndTemporalAmount(lhs, rhs, AddTests::addNanos); + } catch (ArithmeticException e) { + return null; + } + }; + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + nanosResult, + DataType.DATE_NANOS, + TestCaseSupplier.dateNanosCases(), + TestCaseSupplier.datePeriodCases(0, 0, 0, 10, 13, 32), + startsWith("AddDateNanosEvaluator[dateNanos=Attribute[channel=0], temporalAmount="), + warnings, + true + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + nanosResult, + DataType.DATE_NANOS, + TestCaseSupplier.dateNanosCases(), + TestCaseSupplier.timeDurationCases(0, 604800000L), + startsWith("AddDateNanosEvaluator[dateNanos=Attribute[channel=0], temporalAmount="), + warnings, + true + ) + ); + suppliers.addAll(TestCaseSupplier.dateCases().stream().mapMulti((tds, consumer) -> { consumer.accept( new TestCaseSupplier( @@ -284,7 +319,7 @@ public static Iterable parameters() { private static String addErrorMessageString(boolean includeOrdinal, List> validPerPosition, List types) { try { - return typeErrorMessage(includeOrdinal, validPerPosition, types, (a, b) -> "datetime or numeric"); + return typeErrorMessage(includeOrdinal, validPerPosition, types, (a, b) -> "date_nanos, datetime or numeric"); } catch (IllegalStateException e) { // This means all the positional args were okay, so the expected error is from the combination return "[+] has arguments with incompatible types [" + types.get(0).typeName() + "] and [" + types.get(1).typeName() + "]"; @@ -292,7 +327,7 @@ private static String addErrorMessageString(boolean includeOrdinal, List adder) { // this weird casting dance makes the expected value lambda symmetric Long date; TemporalAmount period; @@ -303,9 +338,21 @@ private static Object addDatesAndTemporalAmount(Object lhs, Object rhs) { date = (Long) rhs; period = (TemporalAmount) lhs; } + return adder.applyAsLong(date, period); + } + + private static long addMillis(Long date, TemporalAmount period) { return asMillis(asDateTime(date).plus(period)); } + private static long addNanos(Long date, TemporalAmount period) { + return DateUtils.toLong( + Instant.from( + ZonedDateTime.ofInstant(DateUtils.toInstant(date), org.elasticsearch.xpack.esql.core.util.DateUtils.UTC).plus(period) + ) + ); + } + @Override protected Expression build(Source source, List args) { return new Add(source, args.get(0), args.get(1)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java index 39d55d1ba0b54..1338299b3a121 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java @@ -10,16 +10,23 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matchers; import java.time.Duration; +import java.time.Instant; import java.time.Period; +import java.time.ZonedDateTime; +import java.time.temporal.TemporalAmount; import java.util.List; +import java.util.function.BinaryOperator; import java.util.function.Supplier; +import java.util.function.ToLongBiFunction; import static org.elasticsearch.xpack.esql.EsqlTestUtils.randomLiteral; import static org.elasticsearch.xpack.esql.core.util.DateUtils.asDateTime; @@ -28,6 +35,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.startsWith; public class SubTests extends AbstractScalarFunctionTestCase { public SubTests(@Name("TestCase") Supplier testCaseSupplier) { @@ -117,13 +125,44 @@ public static Iterable parameters() { return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(lhs, DataType.DATETIME, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataType.DATE_PERIOD, "rhs") + new TestCaseSupplier.TypedData(rhs, DataType.DATE_PERIOD, "rhs").forceLiteral() ), - "SubDatetimesEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + Matchers.startsWith("SubDatetimesEvaluator[datetime=Attribute[channel=0], temporalAmount="), DataType.DATETIME, equalTo(asMillis(asDateTime(lhs).minus(rhs))) ); })); + + BinaryOperator nanosResult = (lhs, rhs) -> { + try { + return subtractDatesAndTemporalAmount(lhs, rhs, SubTests::subtractNanos); + } catch (ArithmeticException e) { + return null; + } + }; + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + nanosResult, + DataType.DATE_NANOS, + TestCaseSupplier.dateNanosCases(Instant.parse("1985-01-01T00:00:00Z"), DateUtils.MAX_NANOSECOND_INSTANT), + TestCaseSupplier.datePeriodCases(0, 0, 0, 10, 13, 32), + startsWith("SubDateNanosEvaluator[dateNanos=Attribute[channel=0], temporalAmount="), + (l, r) -> List.of(), + true + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + nanosResult, + DataType.DATE_NANOS, + TestCaseSupplier.dateNanosCases(Instant.parse("1985-01-01T00:00:00Z"), DateUtils.MAX_NANOSECOND_INSTANT), + TestCaseSupplier.timeDurationCases(0, 604800000L), + startsWith("SubDateNanosEvaluator[dateNanos=Attribute[channel=0], temporalAmount="), + (l, r) -> List.of(), + true + ) + ); + suppliers.add(new TestCaseSupplier("Period - Period", List.of(DataType.DATE_PERIOD, DataType.DATE_PERIOD), () -> { Period lhs = (Period) randomLiteral(DataType.DATE_PERIOD).value(); Period rhs = (Period) randomLiteral(DataType.DATE_PERIOD).value(); @@ -143,9 +182,9 @@ public static Iterable parameters() { TestCaseSupplier.TestCase testCase = new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(lhs, DataType.DATETIME, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataType.TIME_DURATION, "rhs") + new TestCaseSupplier.TypedData(rhs, DataType.TIME_DURATION, "rhs").forceLiteral() ), - "SubDatetimesEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + Matchers.startsWith("SubDatetimesEvaluator[datetime=Attribute[channel=0], temporalAmount="), DataType.DATETIME, equalTo(asMillis(asDateTime(lhs).minus(rhs))) ); @@ -164,6 +203,7 @@ public static Iterable parameters() { equalTo(lhs.minus(rhs)) ); })); + // exact math arithmetic exceptions suppliers.add( arithmeticExceptionOverflowCase( @@ -210,7 +250,7 @@ public static Iterable parameters() { return original.getData().get(nullPosition == 0 ? 1 : 0).type(); } return original.expectedType(); - }, (nullPosition, nullData, original) -> original); + }, (nullPosition, nullData, original) -> nullData.isForceLiteral() ? equalTo("LiteralsEvaluator[lit=null]") : original); suppliers.add(new TestCaseSupplier("MV", List.of(DataType.INTEGER, DataType.INTEGER), () -> { // Ensure we don't have an overflow @@ -236,4 +276,26 @@ public static Iterable parameters() { protected Expression build(Source source, List args) { return new Sub(source, args.get(0), args.get(1)); } + + private static Object subtractDatesAndTemporalAmount(Object lhs, Object rhs, ToLongBiFunction subtract) { + // this weird casting dance makes the expected value lambda symmetric + Long date; + TemporalAmount period; + if (lhs instanceof Long) { + date = (Long) lhs; + period = (TemporalAmount) rhs; + } else { + date = (Long) rhs; + period = (TemporalAmount) lhs; + } + return subtract.applyAsLong(date, period); + } + + private static long subtractNanos(Long date, TemporalAmount period) { + return DateUtils.toLong( + Instant.from( + ZonedDateTime.ofInstant(DateUtils.toInstant(date), org.elasticsearch.xpack.esql.core.util.DateUtils.UTC).minus(period) + ) + ); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverterTests.java index b30f0870496e3..8a57dfa968ccd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverterTests.java @@ -43,7 +43,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.UNSUPPORTED; import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; import static org.elasticsearch.xpack.esql.core.type.DataType.isDateTime; -import static org.elasticsearch.xpack.esql.core.type.DataType.isDateTimeOrTemporal; +import static org.elasticsearch.xpack.esql.core.type.DataType.isDateTimeOrNanosOrTemporal; import static org.elasticsearch.xpack.esql.core.type.DataType.isString; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.commonType; @@ -80,14 +80,18 @@ public void testCommonTypeStrings() { } public void testCommonTypeDateTimeIntervals() { - List DATE_TIME_INTERVALS = Arrays.stream(DataType.values()).filter(DataType::isDateTimeOrTemporal).toList(); + List DATE_TIME_INTERVALS = Arrays.stream(DataType.values()).filter(DataType::isDateTimeOrNanosOrTemporal).toList(); for (DataType dataType1 : DATE_TIME_INTERVALS) { for (DataType dataType2 : DataType.values()) { if (dataType2 == NULL) { assertEqualsCommonType(dataType1, NULL, dataType1); - } else if (isDateTimeOrTemporal(dataType2)) { - if (isDateTime(dataType1) || isDateTime(dataType2)) { + } else if (isDateTimeOrNanosOrTemporal(dataType2)) { + if ((dataType1 == DATE_NANOS && dataType2 == DATETIME) || (dataType1 == DATETIME && dataType2 == DATE_NANOS)) { + assertNullCommonType(dataType1, dataType2); + } else if (isDateTime(dataType1) || isDateTime(dataType2)) { assertEqualsCommonType(dataType1, dataType2, DATETIME); + } else if (dataType1 == DATE_NANOS || dataType2 == DATE_NANOS) { + assertEqualsCommonType(dataType1, dataType2, DATE_NANOS); } else if (dataType1 == dataType2) { assertEqualsCommonType(dataType1, dataType2, dataType1); } else { @@ -141,7 +145,6 @@ public void testCommonTypeMiscellaneous() { UNSUPPORTED, OBJECT, SOURCE, - DATE_NANOS, DOC_DATA_TYPE, TSID_DATA_TYPE, PARTIAL_AGG, @@ -165,12 +168,12 @@ public void testCommonTypeMiscellaneous() { } private static void assertEqualsCommonType(DataType dataType1, DataType dataType2, DataType commonType) { - assertEquals(commonType, commonType(dataType1, dataType2)); - assertEquals(commonType, commonType(dataType2, dataType1)); + assertEquals("Expected " + commonType + " for " + dataType1 + " and " + dataType2, commonType, commonType(dataType1, dataType2)); + assertEquals("Expected " + commonType + " for " + dataType1 + " and " + dataType2, commonType, commonType(dataType2, dataType1)); } private static void assertNullCommonType(DataType dataType1, DataType dataType2) { - assertNull(commonType(dataType1, dataType2)); - assertNull(commonType(dataType2, dataType1)); + assertNull("Expected null for " + dataType1 + " and " + dataType2, commonType(dataType1, dataType2)); + assertNull("Expected null for " + dataType1 + " and " + dataType2, commonType(dataType2, dataType1)); } } From ae3d0b9e609f757a365dd61bf02dc477c2bcba83 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 2 Dec 2024 14:19:41 -0500 Subject: [PATCH 351/386] ESQL: Limit size of `Literal#toString` (#117842) This `toString` is rendered in task output and progress. Let's make sure it's not massive. --- docs/changelog/117842.yaml | 5 ++ .../xpack/esql/heap_attack/HeapAttackIT.java | 85 ++++++++++++++++--- .../xpack/esql/core/expression/Literal.java | 6 +- .../esql/core/expression/LiteralTests.java | 20 +++++ 4 files changed, 103 insertions(+), 13 deletions(-) create mode 100644 docs/changelog/117842.yaml diff --git a/docs/changelog/117842.yaml b/docs/changelog/117842.yaml new file mode 100644 index 0000000000000..9b528a158288c --- /dev/null +++ b/docs/changelog/117842.yaml @@ -0,0 +1,5 @@ +pr: 117842 +summary: Limit size of `Literal#toString` +area: ES|QL +type: bug +issues: [] diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 008a056e87901..8b9176a346e30 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -295,15 +295,10 @@ private Response concat(int evals) throws IOException { * Returns many moderately long strings. */ public void testManyConcat() throws IOException { + int strings = 300; initManyLongs(); - Response resp = manyConcat(300); - Map map = responseAsMap(resp); - ListMatcher columns = matchesList(); - for (int s = 0; s < 300; s++) { - columns = columns.item(matchesMap().entry("name", "str" + s).entry("type", "keyword")); - } - MapMatcher mapMatcher = matchesMap(); - assertMap(map, mapMatcher.entry("columns", columns).entry("values", any(List.class)).entry("took", greaterThanOrEqualTo(0))); + Response resp = manyConcat("FROM manylongs", strings); + assertManyStrings(resp, strings); } /** @@ -311,15 +306,24 @@ public void testManyConcat() throws IOException { */ public void testHugeManyConcat() throws IOException { initManyLongs(); - assertCircuitBreaks(() -> manyConcat(2000)); + assertCircuitBreaks(() -> manyConcat("FROM manylongs", 2000)); + } + + /** + * Returns many moderately long strings. + */ + public void testManyConcatFromRow() throws IOException { + int strings = 2000; + Response resp = manyConcat("ROW a=9999, b=9999, c=9999, d=9999, e=9999", strings); + assertManyStrings(resp, strings); } /** * Tests that generate many moderately long strings. */ - private Response manyConcat(int strings) throws IOException { + private Response manyConcat(String init, int strings) throws IOException { StringBuilder query = startQuery(); - query.append("FROM manylongs | EVAL str = CONCAT("); + query.append(init).append(" | EVAL str = CONCAT("); query.append( Arrays.stream(new String[] { "a", "b", "c", "d", "e" }) .map(f -> "TO_STRING(" + f + ")") @@ -344,7 +348,64 @@ private Response manyConcat(int strings) throws IOException { query.append("str").append(s); } query.append("\"}"); - return query(query.toString(), null); + return query(query.toString(), "columns"); + } + + /** + * Returns many moderately long strings. + */ + public void testManyRepeat() throws IOException { + int strings = 30; + initManyLongs(); + Response resp = manyRepeat("FROM manylongs", strings); + assertManyStrings(resp, 30); + } + + /** + * Hits a circuit breaker by building many moderately long strings. + */ + public void testHugeManyRepeat() throws IOException { + initManyLongs(); + assertCircuitBreaks(() -> manyRepeat("FROM manylongs", 75)); + } + + /** + * Returns many moderately long strings. + */ + public void testManyRepeatFromRow() throws IOException { + int strings = 10000; + Response resp = manyRepeat("ROW a = 99", strings); + assertManyStrings(resp, strings); + } + + /** + * Tests that generate many moderately long strings. + */ + private Response manyRepeat(String init, int strings) throws IOException { + StringBuilder query = startQuery(); + query.append(init).append(" | EVAL str = TO_STRING(a)"); + for (int s = 0; s < strings; s++) { + query.append(",\nstr").append(s).append("=REPEAT(str, 10000)"); + } + query.append("\n|KEEP "); + for (int s = 0; s < strings; s++) { + if (s != 0) { + query.append(", "); + } + query.append("str").append(s); + } + query.append("\"}"); + return query(query.toString(), "columns"); + } + + private void assertManyStrings(Response resp, int strings) throws IOException { + Map map = responseAsMap(resp); + ListMatcher columns = matchesList(); + for (int s = 0; s < strings; s++) { + columns = columns.item(matchesMap().entry("name", "str" + s).entry("type", "keyword")); + } + MapMatcher mapMatcher = matchesMap(); + assertMap(map, mapMatcher.entry("columns", columns)); } public void testManyEval() throws IOException { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Literal.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Literal.java index 20cdbaf6acdbf..53f559c5c82fe 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Literal.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Literal.java @@ -122,7 +122,11 @@ public boolean equals(Object obj) { @Override public String toString() { - return String.valueOf(value); + String str = String.valueOf(value); + if (str.length() > 500) { + return str.substring(0, 500) + "..."; + } + return str; } @Override diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/LiteralTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/LiteralTests.java index a4c67a8076479..a628916e67746 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/LiteralTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/LiteralTests.java @@ -6,9 +6,12 @@ */ package org.elasticsearch.xpack.esql.core.expression; +import joptsimple.internal.Strings; + import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.tree.AbstractNodeTestCase; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.tree.SourceTests; import org.elasticsearch.xpack.esql.core.type.Converter; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -17,6 +20,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Objects; import java.util.function.Function; import java.util.function.Supplier; @@ -29,9 +33,12 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.SHORT; +import static org.hamcrest.Matchers.equalTo; public class LiteralTests extends AbstractNodeTestCase { + static class ValueAndCompatibleTypes { + final Supplier valueSupplier; final List validDataTypes; @@ -120,6 +127,19 @@ public void testReplaceChildren() { assertEquals("this type of node doesn't have any children to replace", e.getMessage()); } + public void testToString() { + assertThat(new Literal(Source.EMPTY, 1, LONG).toString(), equalTo("1")); + assertThat(new Literal(Source.EMPTY, "short", KEYWORD).toString(), equalTo("short")); + // toString should limit it's length + String tooLong = Strings.repeat('a', 510); + assertThat(new Literal(Source.EMPTY, tooLong, KEYWORD).toString(), equalTo(Strings.repeat('a', 500) + "...")); + + for (ValueAndCompatibleTypes g : GENERATORS) { + Literal lit = new Literal(Source.EMPTY, g.valueSupplier.get(), randomFrom(g.validDataTypes)); + assertThat(lit.toString(), equalTo(Objects.toString(lit.value()))); + } + } + private static Object randomValueOfTypeOtherThan(Object original, DataType type) { for (ValueAndCompatibleTypes gen : GENERATORS) { if (gen.validDataTypes.get(0) == type) { From 3b3be18af4d4e7457c6557589af190e5932f06dd Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 2 Dec 2024 20:42:07 +0100 Subject: [PATCH 352/386] Add javadocs for Lucene 7 codec classes (#117819) --- .../lucene/bwc/codecs/lucene70/BWCLucene70Codec.java | 8 ++++++++ .../xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java | 8 ++++++++ 2 files changed, 16 insertions(+) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java index 0100a8bd14635..5a49a7a415b9c 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java @@ -25,6 +25,12 @@ import org.elasticsearch.xpack.lucene.bwc.codecs.BWCCodec; import org.elasticsearch.xpack.lucene.bwc.codecs.lucene60.Lucene60MetadataOnlyPointsFormat; +/** + * Implements the Lucene 7.0 index format. Loaded via SPI for indices created/written with Lucene 7.x (Elasticsearch 6.x) mounted + * as archive indices first in Elasticsearch 8.x. Lucene 9.12 retained Lucene70Codec in its classpath which required overriding the + * codec name and version in the segment infos. This codec is still needed after upgrading to Elasticsearch 9.x because its codec + * name has been written to disk. + */ public class BWCLucene70Codec extends BWCCodec { private final FieldInfosFormat fieldInfosFormat = wrap(new Lucene60FieldInfosFormat()); @@ -46,6 +52,8 @@ public PostingsFormat getPostingsFormatForField(String field) { } }; + // Needed for SPI loading + @SuppressWarnings("unused") public BWCLucene70Codec() { this("BWCLucene70Codec"); } diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java index 77de24b53069d..f9ba02676c2d0 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java @@ -7,6 +7,14 @@ package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; +/** + * Implements the Lucene 7.0 index format. Will be loaded via SPI for indices created/written with Lucene 7.x (Elasticsearch 6.x) mounted + * as archive indices in Elasticsearch 9.x. Note that for indices with same version mounted first as archive indices in Elasticsearch 8.x, + * {@link BWCLucene70Codec} will be instead used which provides the same functionality, only registered with a different name. + * + * @deprecated Only for 7.0 back compat + */ +@Deprecated public class Lucene70Codec extends BWCLucene70Codec { public Lucene70Codec() { From 97a626b5ea9cb9a7aca5f83ac6395b3b0ad1dbf2 Mon Sep 17 00:00:00 2001 From: Marci W <333176+marciw@users.noreply.github.com> Date: Mon, 2 Dec 2024 14:46:41 -0500 Subject: [PATCH 353/386] Remove ccs banner (#117844) --- docs/reference/esql/esql-across-clusters.asciidoc | 5 ----- 1 file changed, 5 deletions(-) diff --git a/docs/reference/esql/esql-across-clusters.asciidoc b/docs/reference/esql/esql-across-clusters.asciidoc index db266fafde9d6..6decc351bc1c8 100644 --- a/docs/reference/esql/esql-across-clusters.asciidoc +++ b/docs/reference/esql/esql-across-clusters.asciidoc @@ -8,11 +8,6 @@ preview::["{ccs-cap} for {esql} is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] -[NOTE] -==== -For {ccs-cap} with {esql} on version 8.16 or later, remote clusters must also be on version 8.16 or later. -==== - With {esql}, you can execute a single query across multiple clusters. [discrete] From c54d4b687f3658fadcb158dbe43befa1edcb0e38 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 2 Dec 2024 21:17:19 +0100 Subject: [PATCH 354/386] Don't skip shards in coord rewrite if timestamp is an alias (#117271) The coordinator rewrite has logic to skip indices if the provided date range filter is not within the min and max range of all of its shards. This mechanism is enabled for event.ingested and @timestamp fields, against searchable snapshots. We have basic checks that such fields need to be of date field type, yet if they are defined as alias of a date field, their range will be empty, which indicates that the shards are empty, and the coord rewrite logic resolves the alias and ends up skipping shards that may have matching docs. This commit adds an explicit check that declares the range UNKNOWN instead of EMPTY in these circumstances. The same check is also performed in the coord rewrite logic, so that shards are no longer skipped by mistake. --- docs/changelog/117271.yaml | 5 + .../index/query/RangeQueryBuilder.java | 1 + .../elasticsearch/index/shard/IndexShard.java | 4 +- .../indices/TimestampFieldMapperService.java | 6 +- ...pshotsCanMatchOnCoordinatorIntegTests.java | 124 +++++++++++++++++- 5 files changed, 130 insertions(+), 10 deletions(-) create mode 100644 docs/changelog/117271.yaml diff --git a/docs/changelog/117271.yaml b/docs/changelog/117271.yaml new file mode 100644 index 0000000000000..1a328279b9635 --- /dev/null +++ b/docs/changelog/117271.yaml @@ -0,0 +1,5 @@ +pr: 117271 +summary: Don't skip shards in coord rewrite if timestamp is an alias +area: Search +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java index 9f6a2be8cdbc7..d6dad15abb8e6 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java @@ -426,6 +426,7 @@ public String getWriteableName() { protected MappedFieldType.Relation getRelation(final CoordinatorRewriteContext coordinatorRewriteContext) { final MappedFieldType fieldType = coordinatorRewriteContext.getFieldType(fieldName); if (fieldType instanceof final DateFieldMapper.DateFieldType dateFieldType) { + assert fieldName.equals(fieldType.name()); IndexLongFieldRange fieldRange = coordinatorRewriteContext.getFieldRange(fieldName); if (fieldRange.isComplete() == false || fieldRange == IndexLongFieldRange.EMPTY) { // if not all shards for this (frozen) index have reported ranges to cluster state, OR if they diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index ee24b8d9a9e91..993079a3106d7 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -2274,8 +2274,8 @@ private ShardLongFieldRange determineShardLongFieldRange(String fieldName) { return ShardLongFieldRange.UNKNOWN; // no mapper service, no idea if the field even exists } final MappedFieldType mappedFieldType = mapperService().fieldType(fieldName); - if (mappedFieldType instanceof DateFieldMapper.DateFieldType == false) { - return ShardLongFieldRange.UNKNOWN; // field missing or not a date + if (mappedFieldType instanceof DateFieldMapper.DateFieldType == false || mappedFieldType.name().equals(fieldName) == false) { + return ShardLongFieldRange.UNKNOWN; // field is missing, an alias (as the field type has a different name) or not a date field } if (mappedFieldType.isIndexed() == false) { return ShardLongFieldRange.UNKNOWN; // range information missing diff --git a/server/src/main/java/org/elasticsearch/indices/TimestampFieldMapperService.java b/server/src/main/java/org/elasticsearch/indices/TimestampFieldMapperService.java index 026766671e5aa..158cc1f44b608 100644 --- a/server/src/main/java/org/elasticsearch/indices/TimestampFieldMapperService.java +++ b/server/src/main/java/org/elasticsearch/indices/TimestampFieldMapperService.java @@ -166,11 +166,13 @@ private static DateFieldRangeInfo fromMapperService(MapperService mapperService) DateFieldMapper.DateFieldType eventIngestedFieldType = null; MappedFieldType mappedFieldType = mapperService.fieldType(DataStream.TIMESTAMP_FIELD_NAME); - if (mappedFieldType instanceof DateFieldMapper.DateFieldType dateFieldType) { + if (mappedFieldType instanceof DateFieldMapper.DateFieldType dateFieldType + && dateFieldType.name().equals(DataStream.TIMESTAMP_FIELD_NAME)) { timestampFieldType = dateFieldType; } mappedFieldType = mapperService.fieldType(IndexMetadata.EVENT_INGESTED_FIELD_NAME); - if (mappedFieldType instanceof DateFieldMapper.DateFieldType dateFieldType) { + if (mappedFieldType instanceof DateFieldMapper.DateFieldType dateFieldType + && dateFieldType.name().equals(IndexMetadata.EVENT_INGESTED_FIELD_NAME)) { eventIngestedFieldType = dateFieldType; } if (timestampFieldType == null && eventIngestedFieldType == null) { diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java index 26764592d5f72..21b24db6ce8d5 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.search.SearchShardsGroup; import org.elasticsearch.action.search.SearchShardsRequest; import org.elasticsearch.action.search.SearchShardsResponse; +import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.search.TransportSearchShardsAction; import org.elasticsearch.blobcache.shared.SharedBlobCacheService; import org.elasticsearch.cluster.metadata.DataStream; @@ -1096,6 +1097,119 @@ public void testCanMatchSkipsPartiallyMountedIndicesWhenFrozenNodesUnavailable() } } + public void testTimestampAsAlias() throws Exception { + doTestCoordRewriteWithAliasField("@timestamp"); + } + + public void testEventIngestedAsAlias() throws Exception { + doTestCoordRewriteWithAliasField("event.ingested"); + } + + private void doTestCoordRewriteWithAliasField(String aliasFieldName) throws Exception { + internalCluster().startMasterOnlyNode(); + internalCluster().startCoordinatingOnlyNode(Settings.EMPTY); + final String dataNodeHoldingRegularIndex = internalCluster().startDataOnlyNode(); + final String dataNodeHoldingSearchableSnapshot = internalCluster().startDataOnlyNode(); + + String timestampFieldName = randomAlphaOfLengthBetween(3, 10); + String[] indices = new String[] { "index-0001", "index-0002" }; + for (String index : indices) { + Settings extraSettings = Settings.builder() + .put(INDEX_ROUTING_REQUIRE_GROUP_SETTING.getConcreteSettingForNamespace("_name").getKey(), dataNodeHoldingRegularIndex) + .build(); + + assertAcked( + indicesAdmin().prepareCreate(index) + .setMapping( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + + .startObject(timestampFieldName) + .field("type", "date") + .endObject() + + .startObject(aliasFieldName) + .field("type", "alias") + .field("path", timestampFieldName) + .endObject() + + .endObject() + .endObject() + ) + .setSettings(indexSettingsNoReplicas(1).put(INDEX_SOFT_DELETES_SETTING.getKey(), true).put(extraSettings)) + ); + } + ensureGreen(indices); + + for (String index : indices) { + final List indexRequestBuilders = new ArrayList<>(); + for (int i = 0; i < 10; i++) { + indexRequestBuilders.add(prepareIndex(index).setSource(timestampFieldName, "2024-11-19T08:08:08Z")); + } + indexRandom(true, false, indexRequestBuilders); + + assertThat( + indicesAdmin().prepareForceMerge(index).setOnlyExpungeDeletes(true).setFlush(true).get().getFailedShards(), + equalTo(0) + ); + refresh(index); + forceMerge(); + } + + final String repositoryName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + createRepository(repositoryName, "mock"); + + final SnapshotId snapshotId = createSnapshot(repositoryName, "snapshot-1", List.of(indices[0])).snapshotId(); + assertAcked(indicesAdmin().prepareDelete(indices[0])); + + // Block the repository for the node holding the searchable snapshot shards + // to delay its restore + blockDataNode(repositoryName, dataNodeHoldingSearchableSnapshot); + + // Force the searchable snapshot to be allocated in a particular node + Settings restoredIndexSettings = Settings.builder() + .put(INDEX_ROUTING_REQUIRE_GROUP_SETTING.getConcreteSettingForNamespace("_name").getKey(), dataNodeHoldingSearchableSnapshot) + .build(); + + String mountedIndex = indices[0] + "-mounted"; + final MountSearchableSnapshotRequest mountRequest = new MountSearchableSnapshotRequest( + TEST_REQUEST_TIMEOUT, + mountedIndex, + repositoryName, + snapshotId.getName(), + indices[0], + restoredIndexSettings, + Strings.EMPTY_ARRAY, + false, + randomFrom(MountSearchableSnapshotRequest.Storage.values()) + ); + client().execute(MountSearchableSnapshotAction.INSTANCE, mountRequest).actionGet(); + + // Allow the searchable snapshots to be finally mounted + unblockNode(repositoryName, dataNodeHoldingSearchableSnapshot); + waitUntilRecoveryIsDone(mountedIndex); + ensureGreen(mountedIndex); + + String[] fieldsToQuery = new String[] { timestampFieldName, aliasFieldName }; + for (String fieldName : fieldsToQuery) { + RangeQueryBuilder rangeQuery = QueryBuilders.rangeQuery(fieldName).from("2024-11-01T00:00:00.000000000Z", true); + SearchRequest request = new SearchRequest().searchType(SearchType.QUERY_THEN_FETCH) + .source(new SearchSourceBuilder().query(rangeQuery)); + if (randomBoolean()) { + // pre_filter_shard_size default to 1 because there are read-only indices in the mix. It does not hurt to force it though. + request.setPreFilterShardSize(1); + } + assertResponse(client().search(request), searchResponse -> { + assertThat(searchResponse.getSuccessfulShards(), equalTo(2)); + assertThat(searchResponse.getFailedShards(), equalTo(0)); + assertThat(searchResponse.getSkippedShards(), equalTo(0)); + assertThat(searchResponse.getTotalShards(), equalTo(2)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(20L)); + }); + } + } + private void createIndexWithTimestampAndEventIngested(String indexName, int numShards, Settings extraSettings) throws IOException { assertAcked( indicesAdmin().prepareCreate(indexName) @@ -1144,8 +1258,7 @@ private void createIndexWithOnlyOneTimestampField(String timestampField, String ensureGreen(index); } - private void indexDocumentsWithOnlyOneTimestampField(String timestampField, String index, int docCount, String timestampTemplate) - throws Exception { + private void indexDocumentsWithOnlyOneTimestampField(String timestampField, String index, int docCount, String timestampTemplate) { final List indexRequestBuilders = new ArrayList<>(); for (int i = 0; i < docCount; i++) { indexRequestBuilders.add( @@ -1169,8 +1282,7 @@ private void indexDocumentsWithOnlyOneTimestampField(String timestampField, Stri forceMerge(); } - private void indexDocumentsWithTimestampAndEventIngestedDates(String indexName, int docCount, String timestampTemplate) - throws Exception { + private void indexDocumentsWithTimestampAndEventIngestedDates(String indexName, int docCount, String timestampTemplate) { final List indexRequestBuilders = new ArrayList<>(); for (int i = 0; i < docCount; i++) { @@ -1207,7 +1319,7 @@ private void indexDocumentsWithTimestampAndEventIngestedDates(String indexName, forceMerge(); } - private IndexMetadata getIndexMetadata(String indexName) { + private static IndexMetadata getIndexMetadata(String indexName) { return clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT) .clear() .setMetadata(true) @@ -1218,7 +1330,7 @@ private IndexMetadata getIndexMetadata(String indexName) { .index(indexName); } - private void waitUntilRecoveryIsDone(String index) throws Exception { + private static void waitUntilRecoveryIsDone(String index) throws Exception { assertBusy(() -> { RecoveryResponse recoveryResponse = indicesAdmin().prepareRecoveries(index).get(); assertThat(recoveryResponse.hasRecoveries(), equalTo(true)); From 6c2f6071b20633fafc383212331f79146613011b Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 2 Dec 2024 16:04:31 -0500 Subject: [PATCH 355/386] Refactor/bbq format (#117847) * Refactor bbq format to be contained in a package * fixing license headers * fixing module * fix style --- server/src/main/java/module-info.java | 4 ++-- .../{ => es816}/BinarizedByteVectorValues.java | 3 ++- .../codec/vectors/{ => es816}/BinaryQuantizer.java | 4 +++- .../{ => es816}/ES816BinaryFlatVectorsScorer.java | 14 ++++++++------ .../ES816BinaryQuantizedVectorsFormat.java | 2 +- .../ES816BinaryQuantizedVectorsReader.java | 7 ++++--- .../ES816BinaryQuantizedVectorsWriter.java | 10 ++++++---- .../ES816HnswBinaryQuantizedVectorsFormat.java | 2 +- .../{ => es816}/OffHeapBinarizedVectorValues.java | 9 +++++---- .../mapper/vectors/DenseVectorFieldMapper.java | 4 ++-- .../org.apache.lucene.codecs.KnnVectorsFormat | 4 ++-- .../{ => es816}/BinaryQuantizationTests.java | 4 +++- .../ES816BinaryFlatVectorsScorerTests.java | 4 +++- .../ES816BinaryQuantizedVectorsFormatTests.java | 3 ++- ...ES816HnswBinaryQuantizedVectorsFormatTests.java | 2 +- 15 files changed, 45 insertions(+), 31 deletions(-) rename server/src/main/java/org/elasticsearch/index/codec/vectors/{ => es816}/BinarizedByteVectorValues.java (96%) rename server/src/main/java/org/elasticsearch/index/codec/vectors/{ => es816}/BinaryQuantizer.java (98%) rename server/src/main/java/org/elasticsearch/index/codec/vectors/{ => es816}/ES816BinaryFlatVectorsScorer.java (95%) rename server/src/main/java/org/elasticsearch/index/codec/vectors/{ => es816}/ES816BinaryQuantizedVectorsFormat.java (98%) rename server/src/main/java/org/elasticsearch/index/codec/vectors/{ => es816}/ES816BinaryQuantizedVectorsReader.java (98%) rename server/src/main/java/org/elasticsearch/index/codec/vectors/{ => es816}/ES816BinaryQuantizedVectorsWriter.java (98%) rename server/src/main/java/org/elasticsearch/index/codec/vectors/{ => es816}/ES816HnswBinaryQuantizedVectorsFormat.java (99%) rename server/src/main/java/org/elasticsearch/index/codec/vectors/{ => es816}/OffHeapBinarizedVectorValues.java (97%) rename server/src/test/java/org/elasticsearch/index/codec/vectors/{ => es816}/BinaryQuantizationTests.java (99%) rename server/src/test/java/org/elasticsearch/index/codec/vectors/{ => es816}/ES816BinaryFlatVectorsScorerTests.java (99%) rename server/src/test/java/org/elasticsearch/index/codec/vectors/{ => es816}/ES816BinaryQuantizedVectorsFormatTests.java (98%) rename server/src/test/java/org/elasticsearch/index/codec/vectors/{ => es816}/ES816HnswBinaryQuantizedVectorsFormatTests.java (99%) diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index d572d3b90fec8..5acc202ebb294 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -457,8 +457,8 @@ org.elasticsearch.index.codec.vectors.ES814HnswScalarQuantizedVectorsFormat, org.elasticsearch.index.codec.vectors.ES815HnswBitVectorsFormat, org.elasticsearch.index.codec.vectors.ES815BitFlatVectorFormat, - org.elasticsearch.index.codec.vectors.ES816BinaryQuantizedVectorsFormat, - org.elasticsearch.index.codec.vectors.ES816HnswBinaryQuantizedVectorsFormat; + org.elasticsearch.index.codec.vectors.es816.ES816BinaryQuantizedVectorsFormat, + org.elasticsearch.index.codec.vectors.es816.ES816HnswBinaryQuantizedVectorsFormat; provides org.apache.lucene.codecs.Codec with diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/es816/BinarizedByteVectorValues.java similarity index 96% rename from server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java rename to server/src/main/java/org/elasticsearch/index/codec/vectors/es816/BinarizedByteVectorValues.java index cf69ab0862949..d5f968af3e738 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/es816/BinarizedByteVectorValues.java @@ -17,11 +17,12 @@ * * Modifications copyright (C) 2024 Elasticsearch B.V. */ -package org.elasticsearch.index.codec.vectors; +package org.elasticsearch.index.codec.vectors.es816; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.search.VectorScorer; import org.apache.lucene.util.VectorUtil; +import org.elasticsearch.index.codec.vectors.BQVectorUtils; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/BinaryQuantizer.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/es816/BinaryQuantizer.java similarity index 98% rename from server/src/main/java/org/elasticsearch/index/codec/vectors/BinaryQuantizer.java rename to server/src/main/java/org/elasticsearch/index/codec/vectors/es816/BinaryQuantizer.java index aa72904fe1341..768c6d526e468 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/BinaryQuantizer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/es816/BinaryQuantizer.java @@ -17,11 +17,13 @@ * * Modifications copyright (C) 2024 Elasticsearch B.V. */ -package org.elasticsearch.index.codec.vectors; +package org.elasticsearch.index.codec.vectors.es816; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.VectorUtil; +import org.elasticsearch.index.codec.vectors.BQSpaceUtils; +import org.elasticsearch.index.codec.vectors.BQVectorUtils; import static org.apache.lucene.index.VectorSimilarityFunction.COSINE; import static org.apache.lucene.index.VectorSimilarityFunction.EUCLIDEAN; diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryFlatVectorsScorer.java similarity index 95% rename from server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java rename to server/src/main/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryFlatVectorsScorer.java index 72c5da4880e75..445bdadab2354 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryFlatVectorsScorer.java @@ -17,7 +17,7 @@ * * Modifications copyright (C) 2024 Elasticsearch B.V. */ -package org.elasticsearch.index.codec.vectors; +package org.elasticsearch.index.codec.vectors.es816; import org.apache.lucene.codecs.hnsw.FlatVectorsScorer; import org.apache.lucene.index.KnnVectorValues; @@ -26,6 +26,8 @@ import org.apache.lucene.util.VectorUtil; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; +import org.elasticsearch.index.codec.vectors.BQSpaceUtils; +import org.elasticsearch.index.codec.vectors.BQVectorUtils; import org.elasticsearch.simdvec.ESVectorUtil; import java.io.IOException; @@ -35,10 +37,10 @@ import static org.apache.lucene.index.VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT; /** Vector scorer over binarized vector values */ -public class ES816BinaryFlatVectorsScorer implements FlatVectorsScorer { +class ES816BinaryFlatVectorsScorer implements FlatVectorsScorer { private final FlatVectorsScorer nonQuantizedDelegate; - public ES816BinaryFlatVectorsScorer(FlatVectorsScorer nonQuantizedDelegate) { + ES816BinaryFlatVectorsScorer(FlatVectorsScorer nonQuantizedDelegate) { this.nonQuantizedDelegate = nonQuantizedDelegate; } @@ -144,10 +146,10 @@ public RandomVectorScorerSupplier copy() throws IOException { } /** A binarized query representing its quantized form along with factors */ - public record BinaryQueryVector(byte[] vector, BinaryQuantizer.QueryFactors factors) {} + record BinaryQueryVector(byte[] vector, BinaryQuantizer.QueryFactors factors) {} /** Vector scorer over binarized vector values */ - public static class BinarizedRandomVectorScorer extends RandomVectorScorer.AbstractRandomVectorScorer { + static class BinarizedRandomVectorScorer extends RandomVectorScorer.AbstractRandomVectorScorer { private final BinaryQueryVector queryVector; private final BinarizedByteVectorValues targetVectors; private final VectorSimilarityFunction similarityFunction; @@ -155,7 +157,7 @@ public static class BinarizedRandomVectorScorer extends RandomVectorScorer.Abstr private final float sqrtDimensions; private final float maxX1; - public BinarizedRandomVectorScorer( + BinarizedRandomVectorScorer( BinaryQueryVector queryVectors, BinarizedByteVectorValues targetVectors, VectorSimilarityFunction similarityFunction diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsFormat.java similarity index 98% rename from server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormat.java rename to server/src/main/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsFormat.java index e32aea0fb04ae..d864ec5dee8c5 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsFormat.java @@ -17,7 +17,7 @@ * * Modifications copyright (C) 2024 Elasticsearch B.V. */ -package org.elasticsearch.index.codec.vectors; +package org.elasticsearch.index.codec.vectors.es816; import org.apache.lucene.codecs.hnsw.FlatVectorScorerUtil; import org.apache.lucene.codecs.hnsw.FlatVectorsFormat; diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsReader.java similarity index 98% rename from server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java rename to server/src/main/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsReader.java index 21c4a5c449387..fc20809ea7eed 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsReader.java @@ -17,7 +17,7 @@ * * Modifications copyright (C) 2024 Elasticsearch B.V. */ -package org.elasticsearch.index.codec.vectors; +package org.elasticsearch.index.codec.vectors.es816; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.hnsw.FlatVectorsReader; @@ -43,6 +43,7 @@ import org.apache.lucene.util.SuppressForbidden; import org.apache.lucene.util.hnsw.OrdinalTranslatedKnnCollector; import org.apache.lucene.util.hnsw.RandomVectorScorer; +import org.elasticsearch.index.codec.vectors.BQVectorUtils; import java.io.IOException; import java.util.HashMap; @@ -55,7 +56,7 @@ * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 */ @SuppressForbidden(reason = "Lucene classes") -public class ES816BinaryQuantizedVectorsReader extends FlatVectorsReader { +class ES816BinaryQuantizedVectorsReader extends FlatVectorsReader { private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(ES816BinaryQuantizedVectorsReader.class); @@ -64,7 +65,7 @@ public class ES816BinaryQuantizedVectorsReader extends FlatVectorsReader { private final FlatVectorsReader rawVectorsReader; private final ES816BinaryFlatVectorsScorer vectorScorer; - public ES816BinaryQuantizedVectorsReader( + ES816BinaryQuantizedVectorsReader( SegmentReadState state, FlatVectorsReader rawVectorsReader, ES816BinaryFlatVectorsScorer vectorsScorer diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsWriter.java similarity index 98% rename from server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java rename to server/src/main/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsWriter.java index a7774b850b64c..31ae977e81118 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsWriter.java @@ -17,7 +17,7 @@ * * Modifications copyright (C) 2024 Elasticsearch B.V. */ -package org.elasticsearch.index.codec.vectors; +package org.elasticsearch.index.codec.vectors.es816; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.KnnVectorsReader; @@ -48,6 +48,8 @@ import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.index.codec.vectors.BQSpaceUtils; +import org.elasticsearch.index.codec.vectors.BQVectorUtils; import java.io.Closeable; import java.io.IOException; @@ -61,14 +63,14 @@ import static org.apache.lucene.index.VectorSimilarityFunction.EUCLIDEAN; import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; import static org.apache.lucene.util.RamUsageEstimator.shallowSizeOfInstance; -import static org.elasticsearch.index.codec.vectors.ES816BinaryQuantizedVectorsFormat.BINARIZED_VECTOR_COMPONENT; -import static org.elasticsearch.index.codec.vectors.ES816BinaryQuantizedVectorsFormat.DIRECT_MONOTONIC_BLOCK_SHIFT; +import static org.elasticsearch.index.codec.vectors.es816.ES816BinaryQuantizedVectorsFormat.BINARIZED_VECTOR_COMPONENT; +import static org.elasticsearch.index.codec.vectors.es816.ES816BinaryQuantizedVectorsFormat.DIRECT_MONOTONIC_BLOCK_SHIFT; /** * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 */ @SuppressForbidden(reason = "Lucene classes") -public class ES816BinaryQuantizedVectorsWriter extends FlatVectorsWriter { +class ES816BinaryQuantizedVectorsWriter extends FlatVectorsWriter { private static final long SHALLOW_RAM_BYTES_USED = shallowSizeOfInstance(ES816BinaryQuantizedVectorsWriter.class); private final SegmentWriteState segmentWriteState; diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/es816/ES816HnswBinaryQuantizedVectorsFormat.java similarity index 99% rename from server/src/main/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormat.java rename to server/src/main/java/org/elasticsearch/index/codec/vectors/es816/ES816HnswBinaryQuantizedVectorsFormat.java index 097cdffff6ae4..52f9f14b7bf97 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/es816/ES816HnswBinaryQuantizedVectorsFormat.java @@ -17,7 +17,7 @@ * * Modifications copyright (C) 2024 Elasticsearch B.V. */ -package org.elasticsearch.index.codec.vectors; +package org.elasticsearch.index.codec.vectors.es816; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.KnnVectorsReader; diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/es816/OffHeapBinarizedVectorValues.java similarity index 97% rename from server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java rename to server/src/main/java/org/elasticsearch/index/codec/vectors/es816/OffHeapBinarizedVectorValues.java index e7d818bb752d6..12bf962d314bd 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/es816/OffHeapBinarizedVectorValues.java @@ -17,7 +17,7 @@ * * Modifications copyright (C) 2024 Elasticsearch B.V. */ -package org.elasticsearch.index.codec.vectors; +package org.elasticsearch.index.codec.vectors.es816; import org.apache.lucene.codecs.hnsw.FlatVectorsScorer; import org.apache.lucene.codecs.lucene90.IndexedDISI; @@ -29,6 +29,7 @@ import org.apache.lucene.util.Bits; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.packed.DirectMonotonicReader; +import org.elasticsearch.index.codec.vectors.BQVectorUtils; import java.io.IOException; import java.nio.ByteBuffer; @@ -37,7 +38,7 @@ import static org.elasticsearch.index.codec.vectors.BQVectorUtils.constSqrt; /** Binarized vector values loaded from off-heap */ -public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorValues { +abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorValues { protected final int dimension; protected final int size; @@ -251,8 +252,8 @@ public static OffHeapBinarizedVectorValues load( } /** Dense off-heap binarized vector values */ - public static class DenseOffHeapVectorValues extends OffHeapBinarizedVectorValues { - public DenseOffHeapVectorValues( + static class DenseOffHeapVectorValues extends OffHeapBinarizedVectorValues { + DenseOffHeapVectorValues( int dimension, int size, float[] centroid, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index dea9368a9377e..0a6a24f727572 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -46,8 +46,8 @@ import org.elasticsearch.index.codec.vectors.ES814HnswScalarQuantizedVectorsFormat; import org.elasticsearch.index.codec.vectors.ES815BitFlatVectorFormat; import org.elasticsearch.index.codec.vectors.ES815HnswBitVectorsFormat; -import org.elasticsearch.index.codec.vectors.ES816BinaryQuantizedVectorsFormat; -import org.elasticsearch.index.codec.vectors.ES816HnswBinaryQuantizedVectorsFormat; +import org.elasticsearch.index.codec.vectors.es816.ES816BinaryQuantizedVectorsFormat; +import org.elasticsearch.index.codec.vectors.es816.ES816HnswBinaryQuantizedVectorsFormat; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.mapper.ArraySourceValueFetcher; diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.KnnVectorsFormat b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.KnnVectorsFormat index c2201f5b1c319..389555e60b43b 100644 --- a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.KnnVectorsFormat +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.KnnVectorsFormat @@ -3,5 +3,5 @@ org.elasticsearch.index.codec.vectors.ES813Int8FlatVectorFormat org.elasticsearch.index.codec.vectors.ES814HnswScalarQuantizedVectorsFormat org.elasticsearch.index.codec.vectors.ES815HnswBitVectorsFormat org.elasticsearch.index.codec.vectors.ES815BitFlatVectorFormat -org.elasticsearch.index.codec.vectors.ES816BinaryQuantizedVectorsFormat -org.elasticsearch.index.codec.vectors.ES816HnswBinaryQuantizedVectorsFormat +org.elasticsearch.index.codec.vectors.es816.ES816BinaryQuantizedVectorsFormat +org.elasticsearch.index.codec.vectors.es816.ES816HnswBinaryQuantizedVectorsFormat diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/BinaryQuantizationTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/BinaryQuantizationTests.java similarity index 99% rename from server/src/test/java/org/elasticsearch/index/codec/vectors/BinaryQuantizationTests.java rename to server/src/test/java/org/elasticsearch/index/codec/vectors/es816/BinaryQuantizationTests.java index 32d717bd76f91..205cbb4119dd6 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/BinaryQuantizationTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/BinaryQuantizationTests.java @@ -17,11 +17,13 @@ * * Modifications copyright (C) 2024 Elasticsearch B.V. */ -package org.elasticsearch.index.codec.vectors; +package org.elasticsearch.index.codec.vectors.es816; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.VectorUtil; +import org.elasticsearch.index.codec.vectors.BQSpaceUtils; +import org.elasticsearch.index.codec.vectors.BQVectorUtils; import java.util.Random; diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryFlatVectorsScorerTests.java similarity index 99% rename from server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java rename to server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryFlatVectorsScorerTests.java index cef5e5358f3d5..a75b9bc6064d1 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryFlatVectorsScorerTests.java @@ -17,13 +17,15 @@ * * Modifications copyright (C) 2024 Elasticsearch B.V. */ -package org.elasticsearch.index.codec.vectors; +package org.elasticsearch.index.codec.vectors.es816; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.search.VectorScorer; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.VectorUtil; import org.elasticsearch.common.logging.LogConfigurator; +import org.elasticsearch.index.codec.vectors.BQSpaceUtils; +import org.elasticsearch.index.codec.vectors.BQVectorUtils; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsFormatTests.java similarity index 98% rename from server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java rename to server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsFormatTests.java index 42f2fbb383ac9..681f615653d40 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816BinaryQuantizedVectorsFormatTests.java @@ -17,7 +17,7 @@ * * Modifications copyright (C) 2024 Elasticsearch B.V. */ -package org.elasticsearch.index.codec.vectors; +package org.elasticsearch.index.codec.vectors.es816; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FilterCodec; @@ -41,6 +41,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; import org.elasticsearch.common.logging.LogConfigurator; +import org.elasticsearch.index.codec.vectors.BQVectorUtils; import java.io.IOException; import java.util.Locale; diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816HnswBinaryQuantizedVectorsFormatTests.java similarity index 99% rename from server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java rename to server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816HnswBinaryQuantizedVectorsFormatTests.java index ca96e093b7b28..a25fa2836ee34 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816HnswBinaryQuantizedVectorsFormatTests.java @@ -17,7 +17,7 @@ * * Modifications copyright (C) 2024 Elasticsearch B.V. */ -package org.elasticsearch.index.codec.vectors; +package org.elasticsearch.index.codec.vectors.es816; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FilterCodec; From 12be8203d3efd1ed62a838aaa1b379c592a7aaec Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 3 Dec 2024 09:31:51 +1100 Subject: [PATCH 356/386] Mute org.elasticsearch.xpack.test.rest.XPackRestIT test {p0=esql/60_usage/Basic ESQL usage output (telemetry) non-snapshot version} #117862 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 73d9a29e275b3..57db22feba059 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -236,6 +236,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/117815 - class: org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT issue: https://github.com/elastic/elasticsearch/issues/111319 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=esql/60_usage/Basic ESQL usage output (telemetry) non-snapshot version} + issue: https://github.com/elastic/elasticsearch/issues/117862 # Examples: # From af7d3f911fbacaa1f4b1be68398cc59cbfdc89e2 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 2 Dec 2024 17:57:02 -0800 Subject: [PATCH 357/386] Add cluster level reduction (#117731) This change introduces cluster-level reduction. Unlike data-node-level reduction, it does not require pragmas because the network latency and throughput across clusters differ significantly from those within a cluster. As a result, the benefits of this reduction should outweigh the risks. --- docs/changelog/117731.yaml | 5 ++ .../action/CrossClustersCancellationIT.java | 37 ++++++++++++ .../xpack/esql/planner/PlannerUtils.java | 47 ++++++---------- .../xpack/esql/plugin/ComputeService.java | 56 +++++++------------ 4 files changed, 78 insertions(+), 67 deletions(-) create mode 100644 docs/changelog/117731.yaml diff --git a/docs/changelog/117731.yaml b/docs/changelog/117731.yaml new file mode 100644 index 0000000000000..f69cd5bf31100 --- /dev/null +++ b/docs/changelog/117731.yaml @@ -0,0 +1,5 @@ +pr: 117731 +summary: Add cluster level reduction +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java index 5ffc92636b272..f29f79976dc0d 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java @@ -238,4 +238,41 @@ public void testSameRemoteClusters() throws Exception { } } } + + public void testTasks() throws Exception { + createRemoteIndex(between(10, 100)); + EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); + request.query("FROM *:test | STATS total=sum(const) | LIMIT 1"); + request.pragmas(randomPragmas()); + ActionFuture requestFuture = client().execute(EsqlQueryAction.INSTANCE, request); + assertTrue(PauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS)); + try { + assertBusy(() -> { + List clusterTasks = client(REMOTE_CLUSTER).admin() + .cluster() + .prepareListTasks() + .setActions(ComputeService.CLUSTER_ACTION_NAME) + .get() + .getTasks(); + assertThat(clusterTasks.size(), equalTo(1)); + List drivers = client(REMOTE_CLUSTER).admin() + .cluster() + .prepareListTasks() + .setTargetParentTaskId(clusterTasks.getFirst().taskId()) + .setActions(DriverTaskRunner.ACTION_NAME) + .setDetailed(true) + .get() + .getTasks(); + assertThat(drivers.size(), equalTo(1)); + TaskInfo driver = drivers.getFirst(); + assertThat(driver.description(), equalTo(""" + \\_ExchangeSourceOperator[] + \\_AggregationOperator[mode = INTERMEDIATE, aggs = sum of longs] + \\_ExchangeSinkOperator""")); + }); + } finally { + PauseFieldPlugin.allowEmitting.countDown(); + } + requestFuture.actionGet(30, TimeUnit.SECONDS).close(); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index c998af2215169..f4ada1442efe5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -29,14 +29,8 @@ import org.elasticsearch.xpack.esql.optimizer.LocalLogicalPlanOptimizer; import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalPlanOptimizer; -import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Filter; -import org.elasticsearch.xpack.esql.plan.logical.Limit; -import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.plan.logical.OrderBy; -import org.elasticsearch.xpack.esql.plan.logical.TopN; -import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec; import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize; @@ -44,10 +38,7 @@ import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSourceExec; import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; -import org.elasticsearch.xpack.esql.plan.physical.LimitExec; -import org.elasticsearch.xpack.esql.plan.physical.OrderExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; -import org.elasticsearch.xpack.esql.plan.physical.TopNExec; import org.elasticsearch.xpack.esql.planner.mapper.LocalMapper; import org.elasticsearch.xpack.esql.planner.mapper.Mapper; import org.elasticsearch.xpack.esql.session.Configuration; @@ -83,29 +74,25 @@ public static Tuple breakPlanBetweenCoordinatorAndDa return new Tuple<>(coordinatorPlan, dataNodePlan.get()); } - public static PhysicalPlan dataNodeReductionPlan(LogicalPlan plan, PhysicalPlan unused) { - var pipelineBreakers = plan.collectFirstChildren(Mapper::isPipelineBreaker); + public static PhysicalPlan reductionPlan(PhysicalPlan plan) { + // find the logical fragment + var fragments = plan.collectFirstChildren(p -> p instanceof FragmentExec); + if (fragments.isEmpty()) { + return null; + } + final FragmentExec fragment = (FragmentExec) fragments.getFirst(); - if (pipelineBreakers.isEmpty() == false) { - UnaryPlan pipelineBreaker = (UnaryPlan) pipelineBreakers.get(0); - if (pipelineBreaker instanceof TopN) { - LocalMapper mapper = new LocalMapper(); - var physicalPlan = EstimatesRowSize.estimateRowSize(0, mapper.map(plan)); - return physicalPlan.collectFirstChildren(TopNExec.class::isInstance).get(0); - } else if (pipelineBreaker instanceof Limit limit) { - return new LimitExec(limit.source(), unused, limit.limit()); - } else if (pipelineBreaker instanceof OrderBy order) { - return new OrderExec(order.source(), unused, order.order()); - } else if (pipelineBreaker instanceof Aggregate) { - LocalMapper mapper = new LocalMapper(); - var physicalPlan = EstimatesRowSize.estimateRowSize(0, mapper.map(plan)); - var aggregate = (AggregateExec) physicalPlan.collectFirstChildren(AggregateExec.class::isInstance).get(0); - return aggregate.withMode(AggregatorMode.INITIAL); - } else { - throw new EsqlIllegalArgumentException("unsupported unary physical plan node [" + pipelineBreaker.nodeName() + "]"); - } + final var pipelineBreakers = fragment.fragment().collectFirstChildren(Mapper::isPipelineBreaker); + if (pipelineBreakers.isEmpty()) { + return null; + } + final var pipelineBreaker = pipelineBreakers.getFirst(); + final LocalMapper mapper = new LocalMapper(); + PhysicalPlan reducePlan = mapper.map(pipelineBreaker); + if (reducePlan instanceof AggregateExec agg) { + reducePlan = agg.withMode(AggregatorMode.INITIAL); // force to emit intermediate outputs } - return null; + return EstimatesRowSize.estimateRowSize(fragment.estimatedRowSize(), reducePlan); } /** diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index b06dd3cdb64d3..9aea1577a4137 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -60,12 +60,10 @@ import org.elasticsearch.xpack.esql.action.EsqlQueryAction; import org.elasticsearch.xpack.esql.action.EsqlSearchShardsAction; import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; import org.elasticsearch.xpack.esql.enrich.LookupFromIndexService; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSourceExec; -import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; import org.elasticsearch.xpack.esql.plan.physical.OutputExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders; @@ -780,35 +778,24 @@ private void runComputeOnDataNode( } } + private static PhysicalPlan reductionPlan(ExchangeSinkExec plan, boolean enable) { + PhysicalPlan reducePlan = new ExchangeSourceExec(plan.source(), plan.output(), plan.isIntermediateAgg()); + if (enable) { + PhysicalPlan p = PlannerUtils.reductionPlan(plan); + if (p != null) { + reducePlan = p.replaceChildren(List.of(reducePlan)); + } + } + return new ExchangeSinkExec(plan.source(), plan.output(), plan.isIntermediateAgg(), reducePlan); + } + private class DataNodeRequestHandler implements TransportRequestHandler { @Override public void messageReceived(DataNodeRequest request, TransportChannel channel, Task task) { final ActionListener listener = new ChannelActionListener<>(channel); - final ExchangeSinkExec reducePlan; + final PhysicalPlan reductionPlan; if (request.plan() instanceof ExchangeSinkExec plan) { - var fragments = plan.collectFirstChildren(FragmentExec.class::isInstance); - if (fragments.isEmpty()) { - listener.onFailure(new IllegalStateException("expected a fragment plan for a remote compute; got " + request.plan())); - return; - } - var localExchangeSource = new ExchangeSourceExec(plan.source(), plan.output(), plan.isIntermediateAgg()); - Holder reducePlanHolder = new Holder<>(); - if (request.pragmas().nodeLevelReduction()) { - PhysicalPlan dataNodePlan = request.plan(); - request.plan() - .forEachUp( - FragmentExec.class, - f -> { reducePlanHolder.set(PlannerUtils.dataNodeReductionPlan(f.fragment(), dataNodePlan)); } - ); - } - reducePlan = new ExchangeSinkExec( - plan.source(), - plan.output(), - plan.isIntermediateAgg(), - reducePlanHolder.get() != null - ? reducePlanHolder.get().replaceChildren(List.of(localExchangeSource)) - : localExchangeSource - ); + reductionPlan = reductionPlan(plan, request.pragmas().nodeLevelReduction()); } else { listener.onFailure(new IllegalStateException("expected exchange sink for a remote compute; got " + request.plan())); return; @@ -825,7 +812,7 @@ public void messageReceived(DataNodeRequest request, TransportChannel channel, T request.indicesOptions() ); try (var computeListener = ComputeListener.create(transportService, (CancellableTask) task, listener)) { - runComputeOnDataNode((CancellableTask) task, sessionId, reducePlan, request, computeListener); + runComputeOnDataNode((CancellableTask) task, sessionId, reductionPlan, request, computeListener); } } } @@ -871,10 +858,10 @@ public void messageReceived(ClusterComputeRequest request, TransportChannel chan * Performs a compute on a remote cluster. The output pages are placed in an exchange sink specified by * {@code globalSessionId}. The coordinator on the main cluster will poll pages from there. *

    - * Currently, the coordinator on the remote cluster simply collects pages from data nodes in the remote cluster - * and places them in the exchange sink. We can achieve this by using a single exchange buffer to minimize overhead. - * However, here we use two exchange buffers so that we can run an actual plan on this coordinator to perform partial - * reduce operations, such as limit, topN, and partial-to-partial aggregation in the future. + * Currently, the coordinator on the remote cluster polls pages from data nodes within the remote cluster + * and performs cluster-level reduction before sending pages to the querying cluster. This reduction aims + * to minimize data transfers across clusters but may require additional CPU resources for operations like + * aggregations. */ void runComputeOnRemoteCluster( String clusterAlias, @@ -892,6 +879,7 @@ void runComputeOnRemoteCluster( () -> exchangeService.finishSinkHandler(globalSessionId, new TaskCancelledException(parentTask.getReasonCancelled())) ); final String localSessionId = clusterAlias + ":" + globalSessionId; + final PhysicalPlan coordinatorPlan = reductionPlan(plan, true); var exchangeSource = new ExchangeSourceHandler( configuration.pragmas().exchangeBufferSize(), transportService.getThreadPool().executor(ThreadPool.Names.SEARCH), @@ -899,12 +887,6 @@ void runComputeOnRemoteCluster( ); try (Releasable ignored = exchangeSource.addEmptySink()) { exchangeSink.addCompletionListener(computeListener.acquireAvoid()); - PhysicalPlan coordinatorPlan = new ExchangeSinkExec( - plan.source(), - plan.output(), - plan.isIntermediateAgg(), - new ExchangeSourceExec(plan.source(), plan.output(), plan.isIntermediateAgg()) - ); runCompute( parentTask, new ComputeContext(localSessionId, clusterAlias, List.of(), configuration, exchangeSource, exchangeSink), From 4a9f632fab7571e198f5030dd30acc80c436c58b Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 2 Dec 2024 19:53:08 -0800 Subject: [PATCH 358/386] By pass cancellation when closing sinks (#117797) > **java.lang.AssertionError: Leftover exchanges ExchangeService{sinks=[veZSyrPATq2Sg83dtgK3Jg:700/3]} on node node_s4** I looked into the test failure described in https://github.com/elastic/elasticsearch/issues/117253. The reason we don't clean up the exchange sink quickly is that, once a failure occurs, we cancel the request along with all its child requests. These exchange sinks will be cleaned up only after they become inactive, which by default takes 5 minutes. We could override the `esql.exchange.sink_inactive_interval` setting in the test to remove these exchange sinks faster. However, I think we should allow exchange requests that close exchange sinks to bypass cancellation, enabling quicker resource cleanup than the default inactive interval. Closes #117253 --- .../operator/exchange/ExchangeRequest.java | 17 ++++++-- .../operator/exchange/ExchangeService.java | 43 ++++++++++--------- .../exchange/ExchangeSourceHandler.java | 8 ++-- .../compute/operator/exchange/RemoteSink.java | 10 +++++ .../exchange/ExchangeRequestTests.java | 27 ++++++++++++ .../exchange/ExchangeServiceTests.java | 6 ++- 6 files changed, 82 insertions(+), 29 deletions(-) create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeRequestTests.java diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeRequest.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeRequest.java index 6ed2cc7e587be..1e8700bcd4030 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeRequest.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeRequest.java @@ -40,6 +40,17 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(sourcesFinished); } + @Override + public TaskId getParentTask() { + // Exchange requests with `sourcesFinished=true` complete the remote sink and return without blocking. + // Masking the parent task allows these requests to bypass task cancellation, ensuring cleanup of the remote sink. + // TODO: Maybe add a separate action/request for closing exchange sinks? + if (sourcesFinished) { + return TaskId.EMPTY_TASK_ID; + } + return super.getParentTask(); + } + /** * True if the {@link ExchangeSourceHandler} has enough input. * The corresponding {@link ExchangeSinkHandler} can drain pages and finish itself. @@ -70,9 +81,9 @@ public int hashCode() { @Override public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { - if (parentTaskId.isSet() == false) { - assert false : "ExchangeRequest must have a parent task"; - throw new IllegalStateException("ExchangeRequest must have a parent task"); + if (sourcesFinished == false && parentTaskId.isSet() == false) { + assert false : "ExchangeRequest with sourcesFinished=false must have a parent task"; + throw new IllegalStateException("ExchangeRequest with sourcesFinished=false must have a parent task"); } return new CancellableTask(id, type, action, "", parentTaskId, headers) { @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java index a943a90d02e87..00c68c4f48e86 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java @@ -314,28 +314,20 @@ static final class TransportRemoteSink implements RemoteSink { @Override public void fetchPageAsync(boolean allSourcesFinished, ActionListener listener) { if (allSourcesFinished) { - if (finished.compareAndSet(false, true)) { - doFetchPageAsync(true, listener); - } else { - // already finished or promised - listener.onResponse(new ExchangeResponse(blockFactory, null, true)); - } - } else { - // already finished - if (finished.get()) { - listener.onResponse(new ExchangeResponse(blockFactory, null, true)); - return; - } - doFetchPageAsync(false, ActionListener.wrap(r -> { - if (r.finished()) { - finished.set(true); - } - listener.onResponse(r); - }, e -> { - finished.set(true); - listener.onFailure(e); - })); + close(listener.map(unused -> new ExchangeResponse(blockFactory, null, true))); + return; + } + // already finished + if (finished.get()) { + listener.onResponse(new ExchangeResponse(blockFactory, null, true)); + return; } + doFetchPageAsync(false, ActionListener.wrap(r -> { + if (r.finished()) { + finished.set(true); + } + listener.onResponse(r); + }, e -> close(ActionListener.running(() -> listener.onFailure(e))))); } private void doFetchPageAsync(boolean allSourcesFinished, ActionListener listener) { @@ -361,6 +353,15 @@ private void doFetchPageAsync(boolean allSourcesFinished, ActionListener listener) { + if (finished.compareAndSet(false, true)) { + doFetchPageAsync(true, listener.delegateFailure((l, unused) -> l.onResponse(null))); + } else { + listener.onResponse(null); + } + } } // For testing diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java index 61b3386ce0274..375016a5d51d5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java @@ -224,8 +224,10 @@ void onSinkFailed(Exception e) { buffer.waitForReading().listener().onResponse(null); // resume the Driver if it is being blocked on reading if (finished == false) { finished = true; - outstandingSinks.finishInstance(); - completionListener.onFailure(e); + remoteSink.close(ActionListener.running(() -> { + outstandingSinks.finishInstance(); + completionListener.onFailure(e); + })); } } @@ -262,7 +264,7 @@ public void onFailure(Exception e) { failure.unwrapAndCollect(e); } buffer.waitForReading().listener().onResponse(null); // resume the Driver if it is being blocked on reading - sinkListener.onFailure(e); + remoteSink.close(ActionListener.running(() -> sinkListener.onFailure(e))); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/RemoteSink.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/RemoteSink.java index 7d81cd3f66600..aaa937ef17c0e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/RemoteSink.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/RemoteSink.java @@ -12,4 +12,14 @@ public interface RemoteSink { void fetchPageAsync(boolean allSourcesFinished, ActionListener listener); + + default void close(ActionListener listener) { + fetchPageAsync(true, listener.delegateFailure((l, r) -> { + try { + r.close(); + } finally { + l.onResponse(null); + } + })); + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeRequestTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeRequestTests.java new file mode 100644 index 0000000000000..8a0891651a497 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeRequestTests.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator.exchange; + +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class ExchangeRequestTests extends ESTestCase { + + public void testParentTask() { + ExchangeRequest r1 = new ExchangeRequest("1", true); + r1.setParentTask(new TaskId("node-1", 1)); + assertSame(TaskId.EMPTY_TASK_ID, r1.getParentTask()); + + ExchangeRequest r2 = new ExchangeRequest("1", false); + r2.setParentTask(new TaskId("node-2", 2)); + assertTrue(r2.getParentTask().isSet()); + assertThat(r2.getParentTask(), equalTo((new TaskId("node-2", 2)))); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index 4178f02898d79..fc6c850ba187b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -491,7 +491,7 @@ public void testConcurrentWithTransportActions() { } } - public void testFailToRespondPage() { + public void testFailToRespondPage() throws Exception { Settings settings = Settings.builder().build(); MockTransportService node0 = newTransportService(); ExchangeService exchange0 = new ExchangeService(settings, threadPool, ESQL_TEST_EXECUTOR, blockFactory()); @@ -558,7 +558,9 @@ public void sendResponse(TransportResponse transportResponse) { Throwable cause = ExceptionsHelper.unwrap(err, IOException.class); assertNotNull(cause); assertThat(cause.getMessage(), equalTo("page is too large")); - sinkHandler.onFailure(new RuntimeException(cause)); + PlainActionFuture sinkCompletionFuture = new PlainActionFuture<>(); + sinkHandler.addCompletionListener(sinkCompletionFuture); + assertBusy(() -> assertTrue(sinkCompletionFuture.isDone())); expectThrows(Exception.class, () -> sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS)); } } From af9a57ec66770530cf45aefd842e86a810b13947 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 3 Dec 2024 07:18:44 +0100 Subject: [PATCH 359/386] Remove supersetSize and subsetSize from InternalSignificantTerms.Bucket (#117574) Those fields are only used to update the score and not serialized in the bucket so they can be removed. --- .../SignificantTermsSignificanceScoreIT.java | 2 +- .../GlobalOrdinalsStringTermsAggregator.java | 9 +-- .../terms/InternalMappedSignificantTerms.java | 6 +- .../terms/InternalSignificantTerms.java | 50 ++------------ .../terms/MapStringTermsAggregator.java | 64 +++++++++-------- .../bucket/terms/NumericTermsAggregator.java | 69 ++++++++++--------- .../bucket/terms/SignificantLongTerms.java | 39 ++--------- .../bucket/terms/SignificantStringTerms.java | 30 ++------ .../bucket/terms/SignificantTerms.java | 24 +++---- .../terms/UnmappedSignificantTerms.java | 25 ++----- .../InternalSignificantTermsTestCase.java | 2 - .../terms/SignificantLongTermsTests.java | 15 +--- .../terms/SignificantStringTermsTests.java | 15 +--- ...AbstractSignificanceHeuristicTestCase.java | 39 +++-------- 14 files changed, 127 insertions(+), 262 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index bf11c1d69bcc6..671f60e2b9d5e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -495,7 +495,7 @@ public void testScriptScore() throws ExecutionException, InterruptedException, I for (SignificantTerms.Bucket bucket : sigTerms.getBuckets()) { assertThat( bucket.getSignificanceScore(), - is((double) bucket.getSubsetDf() + bucket.getSubsetSize() + bucket.getSupersetDf() + bucket.getSupersetSize()) + is((double) bucket.getSubsetDf() + sigTerms.getSubsetSize() + bucket.getSupersetDf() + sigTerms.getSupersetSize()) ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index 5a79155d1d4f5..4cf710232c7a0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -989,7 +989,7 @@ SignificantStringTerms.Bucket[] buildBuckets(int size) { @Override SignificantStringTerms.Bucket buildEmptyTemporaryBucket() { - return new SignificantStringTerms.Bucket(new BytesRef(), 0, 0, 0, 0, null, format, 0); + return new SignificantStringTerms.Bucket(new BytesRef(), 0, 0, null, format, 0); } private long subsetSize(long owningBucketOrd) { @@ -998,22 +998,19 @@ private long subsetSize(long owningBucketOrd) { } @Override - BucketUpdater bucketUpdater(long owningBucketOrd, GlobalOrdLookupFunction lookupGlobalOrd) - throws IOException { + BucketUpdater bucketUpdater(long owningBucketOrd, GlobalOrdLookupFunction lookupGlobalOrd) { long subsetSize = subsetSize(owningBucketOrd); return (spare, globalOrd, bucketOrd, docCount) -> { spare.bucketOrd = bucketOrd; oversizedCopy(lookupGlobalOrd.apply(globalOrd), spare.termBytes); spare.subsetDf = docCount; - spare.subsetSize = subsetSize; spare.supersetDf = backgroundFrequencies.freq(spare.termBytes); - spare.supersetSize = supersetSize; /* * During shard-local down-selection we use subset/superset stats * that are for this shard only. Back at the central reducer these * properties will be updated with global stats. */ - spare.updateScore(significanceHeuristic); + spare.updateScore(significanceHeuristic, subsetSize, supersetSize); }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java index 3f75a27306ab4..8c6d21cc74119 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java @@ -59,7 +59,7 @@ protected InternalMappedSignificantTerms(StreamInput in, Bucket.Reader bucket subsetSize = in.readVLong(); supersetSize = in.readVLong(); significanceHeuristic = in.readNamedWriteable(SignificanceHeuristic.class); - buckets = in.readCollectionAsList(stream -> bucketReader.read(stream, subsetSize, supersetSize, format)); + buckets = in.readCollectionAsList(stream -> bucketReader.read(stream, format)); } @Override @@ -91,12 +91,12 @@ public B getBucketByKey(String term) { } @Override - protected long getSubsetSize() { + public long getSubsetSize() { return subsetSize; } @Override - protected long getSupersetSize() { + public long getSupersetSize() { return supersetSize; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index 6c0eb465d1f80..78ae2481f5d99 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -53,13 +53,11 @@ public abstract static class Bucket> extends InternalMultiBu */ @FunctionalInterface public interface Reader> { - B read(StreamInput in, long subsetSize, long supersetSize, DocValueFormat format) throws IOException; + B read(StreamInput in, DocValueFormat format) throws IOException; } long subsetDf; - long subsetSize; long supersetDf; - long supersetSize; /** * Ordinal of the bucket while it is being built. Not used after it is * returned from {@link Aggregator#buildAggregations(org.elasticsearch.common.util.LongArray)} and not @@ -70,16 +68,7 @@ public interface Reader> { protected InternalAggregations aggregations; final transient DocValueFormat format; - protected Bucket( - long subsetDf, - long subsetSize, - long supersetDf, - long supersetSize, - InternalAggregations aggregations, - DocValueFormat format - ) { - this.subsetSize = subsetSize; - this.supersetSize = supersetSize; + protected Bucket(long subsetDf, long supersetDf, InternalAggregations aggregations, DocValueFormat format) { this.subsetDf = subsetDf; this.supersetDf = supersetDf; this.aggregations = aggregations; @@ -89,9 +78,7 @@ protected Bucket( /** * Read from a stream. */ - protected Bucket(StreamInput in, long subsetSize, long supersetSize, DocValueFormat format) { - this.subsetSize = subsetSize; - this.supersetSize = supersetSize; + protected Bucket(StreamInput in, DocValueFormat format) { this.format = format; } @@ -105,20 +92,10 @@ public long getSupersetDf() { return supersetDf; } - @Override - public long getSupersetSize() { - return supersetSize; - } - - @Override - public long getSubsetSize() { - return subsetSize; - } - // TODO we should refactor to remove this, since buckets should be immutable after they are generated. // This can lead to confusing bugs if the bucket is re-created (via createBucket() or similar) without // the score - void updateScore(SignificanceHeuristic significanceHeuristic) { + void updateScore(SignificanceHeuristic significanceHeuristic, long subsetSize, long supersetSize) { score = significanceHeuristic.getScore(subsetDf, subsetSize, supersetDf, supersetSize); } @@ -262,13 +239,11 @@ public InternalAggregation get() { buckets.forEach(entry -> { final B b = createBucket( entry.value.subsetDf[0], - globalSubsetSize, entry.value.supersetDf[0], - globalSupersetSize, entry.value.reducer.getAggregations(), entry.value.reducer.getProto() ); - b.updateScore(heuristic); + b.updateScore(heuristic, globalSubsetSize, globalSupersetSize); if (((b.score > 0) && (b.subsetDf >= minDocCount)) || reduceContext.isFinalReduce() == false) { final B removed = ordered.insertWithOverflow(b); if (removed == null) { @@ -317,9 +292,7 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { .map( b -> createBucket( samplingContext.scaleUp(b.subsetDf), - subsetSize, samplingContext.scaleUp(b.supersetDf), - supersetSize, InternalAggregations.finalizeSampling(b.aggregations, samplingContext), b ) @@ -328,14 +301,7 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { ); } - abstract B createBucket( - long subsetDf, - long subsetSize, - long supersetDf, - long supersetSize, - InternalAggregations aggregations, - B prototype - ); + abstract B createBucket(long subsetDf, long supersetDf, InternalAggregations aggregations, B prototype); protected abstract A create(long subsetSize, long supersetSize, List buckets); @@ -344,10 +310,6 @@ abstract B createBucket( */ protected abstract B[] createBucketsArray(int size); - protected abstract long getSubsetSize(); - - protected abstract long getSupersetSize(); - protected abstract SignificanceHeuristic getSignificanceHeuristic(); @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java index 6ae47d5975479..b96c495d37489 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java @@ -47,7 +47,6 @@ import java.util.function.BiConsumer; import java.util.function.Function; import java.util.function.LongConsumer; -import java.util.function.Supplier; import static org.elasticsearch.search.aggregations.InternalOrder.isKeyOrder; @@ -296,7 +295,7 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro try (ObjectArrayPriorityQueue ordered = buildPriorityQueue(size)) { B spare = null; BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningOrd); - Supplier emptyBucketBuilder = emptyBucketBuilder(owningOrd); + BucketUpdater bucketUpdater = bucketUpdater(owningOrd); while (ordsEnum.next()) { long docCount = bucketDocCount(ordsEnum.ord()); otherDocCounts.increment(ordIdx, docCount); @@ -305,9 +304,9 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro } if (spare == null) { checkRealMemoryCBForInternalBucket(); - spare = emptyBucketBuilder.get(); + spare = buildEmptyBucket(); } - updateBucket(spare, ordsEnum, docCount); + bucketUpdater.updateBucket(spare, ordsEnum, docCount); spare = ordered.insertWithOverflow(spare); } @@ -348,9 +347,9 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro abstract void collectZeroDocEntriesIfNeeded(long owningBucketOrd, boolean excludeDeletedDocs) throws IOException; /** - * Build an empty temporary bucket. + * Build an empty bucket. */ - abstract Supplier emptyBucketBuilder(long owningBucketOrd); + abstract B buildEmptyBucket(); /** * Build a {@link PriorityQueue} to sort the buckets. After we've @@ -362,7 +361,7 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro * Update fields in {@code spare} to reflect information collected for * this bucket ordinal. */ - abstract void updateBucket(B spare, BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum, long docCount) throws IOException; + abstract BucketUpdater bucketUpdater(long owningBucketOrd); /** * Build an array to hold the "top" buckets for each ordinal. @@ -399,6 +398,10 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro abstract R buildEmptyResult(); } + interface BucketUpdater { + void updateBucket(B spare, BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum, long docCount) throws IOException; + } + /** * Builds results for the standard {@code terms} aggregation. */ @@ -490,8 +493,8 @@ private void collectZeroDocEntries(BinaryDocValues values, Bits liveDocs, int ma } @Override - Supplier emptyBucketBuilder(long owningBucketOrd) { - return () -> new StringTerms.Bucket(new BytesRef(), 0, null, showTermDocCountError, 0, format); + StringTerms.Bucket buildEmptyBucket() { + return new StringTerms.Bucket(new BytesRef(), 0, null, showTermDocCountError, 0, format); } @Override @@ -500,10 +503,12 @@ ObjectArrayPriorityQueue buildPriorityQueue(int size) { } @Override - void updateBucket(StringTerms.Bucket spare, BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum, long docCount) throws IOException { - ordsEnum.readValue(spare.termBytes); - spare.docCount = docCount; - spare.bucketOrd = ordsEnum.ord(); + BucketUpdater bucketUpdater(long owningBucketOrd) { + return (spare, ordsEnum, docCount) -> { + ordsEnum.readValue(spare.termBytes); + spare.docCount = docCount; + spare.bucketOrd = ordsEnum.ord(); + }; } @Override @@ -615,9 +620,8 @@ public void collect(int doc, long owningBucketOrd) throws IOException { void collectZeroDocEntriesIfNeeded(long owningBucketOrd, boolean excludeDeletedDocs) throws IOException {} @Override - Supplier emptyBucketBuilder(long owningBucketOrd) { - long subsetSize = subsetSizes.get(owningBucketOrd); - return () -> new SignificantStringTerms.Bucket(new BytesRef(), 0, subsetSize, 0, 0, null, format, 0); + SignificantStringTerms.Bucket buildEmptyBucket() { + return new SignificantStringTerms.Bucket(new BytesRef(), 0, 0, null, format, 0); } @Override @@ -626,20 +630,20 @@ ObjectArrayPriorityQueue buildPriorityQueue(int s } @Override - void updateBucket(SignificantStringTerms.Bucket spare, BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum, long docCount) - throws IOException { - - ordsEnum.readValue(spare.termBytes); - spare.bucketOrd = ordsEnum.ord(); - spare.subsetDf = docCount; - spare.supersetDf = backgroundFrequencies.freq(spare.termBytes); - spare.supersetSize = supersetSize; - /* - * During shard-local down-selection we use subset/superset stats - * that are for this shard only. Back at the central reducer these - * properties will be updated with global stats. - */ - spare.updateScore(significanceHeuristic); + BucketUpdater bucketUpdater(long owningBucketOrd) { + long subsetSize = subsetSizes.get(owningBucketOrd); + return (spare, ordsEnum, docCount) -> { + ordsEnum.readValue(spare.termBytes); + spare.bucketOrd = ordsEnum.ord(); + spare.subsetDf = docCount; + spare.supersetDf = backgroundFrequencies.freq(spare.termBytes); + /* + * During shard-local down-selection we use subset/superset stats + * that are for this shard only. Back at the central reducer these + * properties will be updated with global stats. + */ + spare.updateScore(significanceHeuristic, subsetSize, supersetSize); + }; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java index ce89b95b76a05..5d4c15d8a3b80 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java @@ -43,7 +43,6 @@ import java.util.Map; import java.util.function.BiConsumer; import java.util.function.Function; -import java.util.function.Supplier; import static java.util.Collections.emptyList; import static org.elasticsearch.search.aggregations.InternalOrder.isKeyOrder; @@ -177,7 +176,7 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro try (ObjectArrayPriorityQueue ordered = buildPriorityQueue(size)) { B spare = null; BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); - Supplier emptyBucketBuilder = emptyBucketBuilder(owningBucketOrd); + BucketUpdater bucketUpdater = bucketUpdater(owningBucketOrd); while (ordsEnum.next()) { long docCount = bucketDocCount(ordsEnum.ord()); otherDocCounts.increment(ordIdx, docCount); @@ -186,9 +185,9 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro } if (spare == null) { checkRealMemoryCBForInternalBucket(); - spare = emptyBucketBuilder.get(); + spare = buildEmptyBucket(); } - updateBucket(spare, ordsEnum, docCount); + bucketUpdater.updateBucket(spare, ordsEnum, docCount); spare = ordered.insertWithOverflow(spare); } @@ -240,17 +239,16 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro abstract B[] buildBuckets(int size); /** - * Build a {@linkplain Supplier} that can be used to build "empty" - * buckets. Those buckets will then be {@link #updateBucket updated} + * Build an empty bucket. Those buckets will then be {@link #bucketUpdater(long)} updated} * for each collected bucket. */ - abstract Supplier emptyBucketBuilder(long owningBucketOrd); + abstract B buildEmptyBucket(); /** * Update fields in {@code spare} to reflect information collected for * this bucket ordinal. */ - abstract void updateBucket(B spare, BucketOrdsEnum ordsEnum, long docCount) throws IOException; + abstract BucketUpdater bucketUpdater(long owningBucketOrd); /** * Build a {@link ObjectArrayPriorityQueue} to sort the buckets. After we've @@ -282,6 +280,10 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro abstract R buildEmptyResult(); } + interface BucketUpdater { + void updateBucket(B spare, BucketOrdsEnum ordsEnum, long docCount) throws IOException; + } + abstract class StandardTermsResultStrategy, B extends InternalTerms.Bucket> extends ResultStrategy { protected final boolean showTermDocCountError; @@ -305,13 +307,6 @@ final void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException { buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); } - @Override - Supplier emptyBucketBuilder(long owningBucketOrd) { - return this::buildEmptyBucket; - } - - abstract B buildEmptyBucket(); - @Override final void collectZeroDocEntriesIfNeeded(long owningBucketOrd, boolean excludeDeletedDocs) throws IOException { if (bucketCountThresholds.getMinDocCount() != 0) { @@ -375,10 +370,12 @@ LongTerms.Bucket buildEmptyBucket() { } @Override - void updateBucket(LongTerms.Bucket spare, BucketOrdsEnum ordsEnum, long docCount) { - spare.term = ordsEnum.value(); - spare.docCount = docCount; - spare.bucketOrd = ordsEnum.ord(); + BucketUpdater bucketUpdater(long owningBucketOrd) { + return (LongTerms.Bucket spare, BucketOrdsEnum ordsEnum, long docCount) -> { + spare.term = ordsEnum.value(); + spare.docCount = docCount; + spare.bucketOrd = ordsEnum.ord(); + }; } @Override @@ -457,10 +454,12 @@ DoubleTerms.Bucket buildEmptyBucket() { } @Override - void updateBucket(DoubleTerms.Bucket spare, BucketOrdsEnum ordsEnum, long docCount) { - spare.term = NumericUtils.sortableLongToDouble(ordsEnum.value()); - spare.docCount = docCount; - spare.bucketOrd = ordsEnum.ord(); + BucketUpdater bucketUpdater(long owningBucketOrd) { + return (DoubleTerms.Bucket spare, BucketOrdsEnum ordsEnum, long docCount) -> { + spare.term = NumericUtils.sortableLongToDouble(ordsEnum.value()); + spare.docCount = docCount; + spare.bucketOrd = ordsEnum.ord(); + }; } @Override @@ -565,20 +564,22 @@ SignificantLongTerms.Bucket[] buildBuckets(int size) { } @Override - Supplier emptyBucketBuilder(long owningBucketOrd) { - long subsetSize = subsetSizes.get(owningBucketOrd); - return () -> new SignificantLongTerms.Bucket(0, subsetSize, 0, supersetSize, 0, null, format, 0); + SignificantLongTerms.Bucket buildEmptyBucket() { + return new SignificantLongTerms.Bucket(0, 0, 0, null, format, 0); } @Override - void updateBucket(SignificantLongTerms.Bucket spare, BucketOrdsEnum ordsEnum, long docCount) throws IOException { - spare.term = ordsEnum.value(); - spare.subsetDf = docCount; - spare.supersetDf = backgroundFrequencies.freq(spare.term); - spare.bucketOrd = ordsEnum.ord(); - // During shard-local down-selection we use subset/superset stats that are for this shard only - // Back at the central reducer these properties will be updated with global stats - spare.updateScore(significanceHeuristic); + BucketUpdater bucketUpdater(long owningBucketOrd) { + long subsetSize = subsetSizes.get(owningBucketOrd); + return (spare, ordsEnum, docCount) -> { + spare.term = ordsEnum.value(); + spare.subsetDf = docCount; + spare.supersetDf = backgroundFrequencies.freq(spare.term); + spare.bucketOrd = ordsEnum.ord(); + // During shard-local down-selection we use subset/superset stats that are for this shard only + // Back at the central reducer these properties will be updated with global stats + spare.updateScore(significanceHeuristic, subsetSize, supersetSize); + }; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTerms.java index 2aace2a714a26..17ea290b7aaaf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTerms.java @@ -30,23 +30,14 @@ public static class Bucket extends InternalSignificantTerms.Bucket { long term; - public Bucket( - long subsetDf, - long subsetSize, - long supersetDf, - long supersetSize, - long term, - InternalAggregations aggregations, - DocValueFormat format, - double score - ) { - super(subsetDf, subsetSize, supersetDf, supersetSize, aggregations, format); + public Bucket(long subsetDf, long supersetDf, long term, InternalAggregations aggregations, DocValueFormat format, double score) { + super(subsetDf, supersetDf, aggregations, format); this.term = term; this.score = score; } - Bucket(StreamInput in, long subsetSize, long supersetSize, DocValueFormat format) throws IOException { - super(in, subsetSize, supersetSize, format); + Bucket(StreamInput in, DocValueFormat format) throws IOException { + super(in, format); subsetDf = in.readVLong(); supersetDf = in.readVLong(); term = in.readLong(); @@ -136,16 +127,7 @@ public SignificantLongTerms create(List buckets) { @Override public Bucket createBucket(InternalAggregations aggregations, SignificantLongTerms.Bucket prototype) { - return new Bucket( - prototype.subsetDf, - prototype.subsetSize, - prototype.supersetDf, - prototype.supersetSize, - prototype.term, - aggregations, - prototype.format, - prototype.score - ); + return new Bucket(prototype.subsetDf, prototype.supersetDf, prototype.term, aggregations, prototype.format, prototype.score); } @Override @@ -169,14 +151,7 @@ protected Bucket[] createBucketsArray(int size) { } @Override - Bucket createBucket( - long subsetDf, - long subsetSize, - long supersetDf, - long supersetSize, - InternalAggregations aggregations, - SignificantLongTerms.Bucket prototype - ) { - return new Bucket(subsetDf, subsetSize, supersetDf, supersetSize, prototype.term, aggregations, format, prototype.score); + Bucket createBucket(long subsetDf, long supersetDf, InternalAggregations aggregations, SignificantLongTerms.Bucket prototype) { + return new Bucket(subsetDf, supersetDf, prototype.term, aggregations, format, prototype.score); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTerms.java index 791c09d3cbd99..b255f17d2843b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTerms.java @@ -34,14 +34,12 @@ public static class Bucket extends InternalSignificantTerms.Bucket { public Bucket( BytesRef term, long subsetDf, - long subsetSize, long supersetDf, - long supersetSize, InternalAggregations aggregations, DocValueFormat format, double score ) { - super(subsetDf, subsetSize, supersetDf, supersetSize, aggregations, format); + super(subsetDf, supersetDf, aggregations, format); this.termBytes = term; this.score = score; } @@ -49,8 +47,8 @@ public Bucket( /** * Read from a stream. */ - public Bucket(StreamInput in, long subsetSize, long supersetSize, DocValueFormat format) throws IOException { - super(in, subsetSize, supersetSize, format); + public Bucket(StreamInput in, DocValueFormat format) throws IOException { + super(in, format); termBytes = in.readBytesRef(); subsetDf = in.readVLong(); supersetDf = in.readVLong(); @@ -140,16 +138,7 @@ public SignificantStringTerms create(List buckets @Override public Bucket createBucket(InternalAggregations aggregations, SignificantStringTerms.Bucket prototype) { - return new Bucket( - prototype.termBytes, - prototype.subsetDf, - prototype.subsetSize, - prototype.supersetDf, - prototype.supersetSize, - aggregations, - prototype.format, - prototype.score - ); + return new Bucket(prototype.termBytes, prototype.subsetDf, prototype.supersetDf, aggregations, prototype.format, prototype.score); } @Override @@ -173,14 +162,7 @@ protected Bucket[] createBucketsArray(int size) { } @Override - Bucket createBucket( - long subsetDf, - long subsetSize, - long supersetDf, - long supersetSize, - InternalAggregations aggregations, - SignificantStringTerms.Bucket prototype - ) { - return new Bucket(prototype.termBytes, subsetDf, subsetSize, supersetDf, supersetSize, aggregations, format, prototype.score); + Bucket createBucket(long subsetDf, long supersetDf, InternalAggregations aggregations, SignificantStringTerms.Bucket prototype) { + return new Bucket(prototype.termBytes, subsetDf, supersetDf, aggregations, format, prototype.score); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTerms.java index f02b5338eea74..e8f160193bc71 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTerms.java @@ -17,6 +17,18 @@ */ public interface SignificantTerms extends MultiBucketsAggregation, Iterable { + /** + * @return The numbers of docs in the subset (also known as "foreground set"). + * This number is equal to the document count of the containing aggregation. + */ + long getSubsetSize(); + + /** + * @return The numbers of docs in the superset (ordinarily the background count + * of the containing aggregation). + */ + long getSupersetSize(); + interface Bucket extends MultiBucketsAggregation.Bucket { /** @@ -30,24 +42,12 @@ interface Bucket extends MultiBucketsAggregation.Bucket { */ long getSubsetDf(); - /** - * @return The numbers of docs in the subset (also known as "foreground set"). - * This number is equal to the document count of the containing aggregation. - */ - long getSubsetSize(); - /** * @return The number of docs in the superset containing a particular term (also * known as the "background count" of the bucket) */ long getSupersetDf(); - /** - * @return The numbers of docs in the superset (ordinarily the background count - * of the containing aggregation). - */ - long getSupersetSize(); - } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedSignificantTerms.java index 8bd14a46bff96..6d1370f147f36 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedSignificantTerms.java @@ -40,16 +40,8 @@ public class UnmappedSignificantTerms extends InternalSignificantTerms { - private Bucket( - BytesRef term, - long subsetDf, - long subsetSize, - long supersetDf, - long supersetSize, - InternalAggregations aggregations, - DocValueFormat format - ) { - super(subsetDf, subsetSize, supersetDf, supersetSize, aggregations, format); + private Bucket(BytesRef term, long subsetDf, long supersetDf, InternalAggregations aggregations, DocValueFormat format) { + super(subsetDf, supersetDf, aggregations, format); } } @@ -95,14 +87,7 @@ protected UnmappedSignificantTerms create(long subsetSize, long supersetSize, Li } @Override - Bucket createBucket( - long subsetDf, - long subsetSize, - long supersetDf, - long supersetSize, - InternalAggregations aggregations, - Bucket prototype - ) { + Bucket createBucket(long subsetDf, long supersetDf, InternalAggregations aggregations, Bucket prototype) { throw new UnsupportedOperationException("not supported for UnmappedSignificantTerms"); } @@ -153,12 +138,12 @@ protected SignificanceHeuristic getSignificanceHeuristic() { } @Override - protected long getSubsetSize() { + public long getSubsetSize() { return 0; } @Override - protected long getSupersetSize() { + public long getSupersetSize() { return 0; } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTermsTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTermsTestCase.java index 6d49d6855caca..7e5d19977fe9f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTermsTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTermsTestCase.java @@ -59,8 +59,6 @@ protected void assertSampled( InternalSignificantTerms.Bucket sampledBucket = sampledIt.next(); assertEquals(sampledBucket.subsetDf, samplingContext.scaleUp(reducedBucket.subsetDf)); assertEquals(sampledBucket.supersetDf, samplingContext.scaleUp(reducedBucket.supersetDf)); - assertEquals(sampledBucket.subsetSize, samplingContext.scaleUp(reducedBucket.subsetSize)); - assertEquals(sampledBucket.supersetSize, samplingContext.scaleUp(reducedBucket.supersetSize)); assertThat(sampledBucket.score, closeTo(reducedBucket.score, 1e-14)); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTermsTests.java index a303199338783..92bfa2f6f89f4 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTermsTests.java @@ -49,17 +49,8 @@ public void setUp() throws Exception { Set terms = new HashSet<>(); for (int i = 0; i < numBuckets; ++i) { long term = randomValueOtherThanMany(l -> terms.add(l) == false, random()::nextLong); - SignificantLongTerms.Bucket bucket = new SignificantLongTerms.Bucket( - subsetDfs[i], - subsetSize, - supersetDfs[i], - supersetSize, - term, - aggs, - format, - 0 - ); - bucket.updateScore(significanceHeuristic); + SignificantLongTerms.Bucket bucket = new SignificantLongTerms.Bucket(subsetDfs[i], supersetDfs[i], term, aggs, format, 0); + bucket.updateScore(significanceHeuristic, subsetSize, supersetSize); buckets.add(bucket); } return new SignificantLongTerms(name, requiredSize, 1L, metadata, format, subsetSize, supersetSize, significanceHeuristic, buckets); @@ -90,8 +81,6 @@ public void setUp() throws Exception { randomLong(), randomNonNegativeLong(), randomNonNegativeLong(), - randomNonNegativeLong(), - randomNonNegativeLong(), InternalAggregations.EMPTY, format, 0 diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTermsTests.java index a91566c615eaf..7499831f371aa 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantStringTermsTests.java @@ -42,17 +42,8 @@ public class SignificantStringTermsTests extends InternalSignificantTermsTestCas Set terms = new HashSet<>(); for (int i = 0; i < numBuckets; ++i) { BytesRef term = randomValueOtherThanMany(b -> terms.add(b) == false, () -> new BytesRef(randomAlphaOfLength(10))); - SignificantStringTerms.Bucket bucket = new SignificantStringTerms.Bucket( - term, - subsetDfs[i], - subsetSize, - supersetDfs[i], - supersetSize, - aggs, - format, - 0 - ); - bucket.updateScore(significanceHeuristic); + SignificantStringTerms.Bucket bucket = new SignificantStringTerms.Bucket(term, subsetDfs[i], supersetDfs[i], aggs, format, 0); + bucket.updateScore(significanceHeuristic, subsetSize, supersetSize); buckets.add(bucket); } return new SignificantStringTerms( @@ -93,8 +84,6 @@ public class SignificantStringTermsTests extends InternalSignificantTermsTestCas new BytesRef(randomAlphaOfLengthBetween(1, 10)), randomNonNegativeLong(), randomNonNegativeLong(), - randomNonNegativeLong(), - randomNonNegativeLong(), InternalAggregations.EMPTY, format, 0 diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractSignificanceHeuristicTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractSignificanceHeuristicTestCase.java index ae5083c245538..a3c03526c9b93 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractSignificanceHeuristicTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractSignificanceHeuristicTestCase.java @@ -95,22 +95,20 @@ public void testStreamResponse() throws Exception { InternalMappedSignificantTerms read = (InternalMappedSignificantTerms) in.readNamedWriteable(InternalAggregation.class); assertEquals(sigTerms.getSignificanceHeuristic(), read.getSignificanceHeuristic()); + assertThat(read.getSubsetSize(), equalTo(10L)); + assertThat(read.getSupersetSize(), equalTo(20L)); SignificantTerms.Bucket originalBucket = sigTerms.getBuckets().get(0); SignificantTerms.Bucket streamedBucket = read.getBuckets().get(0); assertThat(originalBucket.getKeyAsString(), equalTo(streamedBucket.getKeyAsString())); assertThat(originalBucket.getSupersetDf(), equalTo(streamedBucket.getSupersetDf())); assertThat(originalBucket.getSubsetDf(), equalTo(streamedBucket.getSubsetDf())); - assertThat(streamedBucket.getSubsetSize(), equalTo(10L)); - assertThat(streamedBucket.getSupersetSize(), equalTo(20L)); } InternalMappedSignificantTerms getRandomSignificantTerms(SignificanceHeuristic heuristic) { if (randomBoolean()) { SignificantLongTerms.Bucket bucket = new SignificantLongTerms.Bucket( 1, - 2, 3, - 4, 123, InternalAggregations.EMPTY, DocValueFormat.RAW, @@ -121,9 +119,7 @@ public void testStreamResponse() throws Exception { SignificantStringTerms.Bucket bucket = new SignificantStringTerms.Bucket( new BytesRef("someterm"), 1, - 2, 3, - 4, InternalAggregations.EMPTY, DocValueFormat.RAW, randomDoubleBetween(0, 100, true) @@ -136,15 +132,13 @@ public void testReduce() { List aggs = createInternalAggregations(); AggregationReduceContext context = InternalAggregationTestCase.emptyReduceContextBuilder().forFinalReduction(); SignificantTerms reducedAgg = (SignificantTerms) InternalAggregationTestCase.reduce(aggs, context); + assertThat(reducedAgg.getSubsetSize(), equalTo(16L)); + assertThat(reducedAgg.getSupersetSize(), equalTo(30L)); assertThat(reducedAgg.getBuckets().size(), equalTo(2)); assertThat(reducedAgg.getBuckets().get(0).getSubsetDf(), equalTo(8L)); - assertThat(reducedAgg.getBuckets().get(0).getSubsetSize(), equalTo(16L)); assertThat(reducedAgg.getBuckets().get(0).getSupersetDf(), equalTo(10L)); - assertThat(reducedAgg.getBuckets().get(0).getSupersetSize(), equalTo(30L)); assertThat(reducedAgg.getBuckets().get(1).getSubsetDf(), equalTo(8L)); - assertThat(reducedAgg.getBuckets().get(1).getSubsetSize(), equalTo(16L)); assertThat(reducedAgg.getBuckets().get(1).getSupersetDf(), equalTo(10L)); - assertThat(reducedAgg.getBuckets().get(1).getSupersetSize(), equalTo(30L)); } public void testBasicScoreProperties() { @@ -234,9 +228,9 @@ private List createInternalAggregations() { : new AbstractSignificanceHeuristicTestCase.LongTestAggFactory(); List aggs = new ArrayList<>(); - aggs.add(factory.createAggregation(significanceHeuristic, 4, 10, 1, (f, i) -> f.createBucket(4, 4, 5, 10, 0))); - aggs.add(factory.createAggregation(significanceHeuristic, 4, 10, 1, (f, i) -> f.createBucket(4, 4, 5, 10, 1))); - aggs.add(factory.createAggregation(significanceHeuristic, 8, 10, 2, (f, i) -> f.createBucket(4, 4, 5, 10, i))); + aggs.add(factory.createAggregation(significanceHeuristic, 4, 10, 1, (f, i) -> f.createBucket(4, 5, 0))); + aggs.add(factory.createAggregation(significanceHeuristic, 4, 10, 1, (f, i) -> f.createBucket(4, 5, 1))); + aggs.add(factory.createAggregation(significanceHeuristic, 8, 10, 2, (f, i) -> f.createBucket(4, 5, i))); return aggs; } @@ -254,7 +248,7 @@ final A createAggregation( abstract A createAggregation(SignificanceHeuristic significanceHeuristic, long subsetSize, long supersetSize, List buckets); - abstract B createBucket(long subsetDF, long subsetSize, long supersetDF, long supersetSize, long label); + abstract B createBucket(long subsetDF, long supersetDF, long label); } private class StringTestAggFactory extends TestAggFactory { @@ -279,13 +273,11 @@ SignificantStringTerms createAggregation( } @Override - SignificantStringTerms.Bucket createBucket(long subsetDF, long subsetSize, long supersetDF, long supersetSize, long label) { + SignificantStringTerms.Bucket createBucket(long subsetDF, long supersetDF, long label) { return new SignificantStringTerms.Bucket( new BytesRef(Long.toString(label).getBytes(StandardCharsets.UTF_8)), subsetDF, - subsetSize, supersetDF, - supersetSize, InternalAggregations.EMPTY, DocValueFormat.RAW, 0 @@ -315,17 +307,8 @@ SignificantLongTerms createAggregation( } @Override - SignificantLongTerms.Bucket createBucket(long subsetDF, long subsetSize, long supersetDF, long supersetSize, long label) { - return new SignificantLongTerms.Bucket( - subsetDF, - subsetSize, - supersetDF, - supersetSize, - label, - InternalAggregations.EMPTY, - DocValueFormat.RAW, - 0 - ); + SignificantLongTerms.Bucket createBucket(long subsetDF, long supersetDF, long label) { + return new SignificantLongTerms.Bucket(subsetDF, supersetDF, label, InternalAggregations.EMPTY, DocValueFormat.RAW, 0); } } From fc266e5ea9e9c83c16f006f8d53ad481530273aa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20R=C3=BChsen?= Date: Tue, 3 Dec 2024 07:50:18 +0100 Subject: [PATCH 360/386] [Profiling] Switch to 19Hz sampling frequency (#117757) * [Profiling] Switch to 19Hz sampling frequency * Fix internalClusterTest --- .../xpack/profiling/action/GetStackTracesActionIT.java | 8 ++++---- .../xpack/profiling/action/CO2Calculator.java | 2 +- .../xpack/profiling/action/CostCalculator.java | 2 +- .../xpack/profiling/action/CO2CalculatorTests.java | 4 ++-- .../xpack/profiling/action/CostCalculatorTests.java | 2 +- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStackTracesActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStackTracesActionIT.java index 6463cda554e5b..4b3a4fb0108f7 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStackTracesActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStackTracesActionIT.java @@ -46,8 +46,8 @@ public void testGetStackTracesUnfiltered() throws Exception { assertEquals(18, stackTrace.fileIds.length); assertEquals(18, stackTrace.frameIds.length); assertEquals(18, stackTrace.typeIds.length); - assertEquals(0.0000048475146d, stackTrace.annualCO2Tons, 0.0000000001d); - assertEquals(0.18834d, stackTrace.annualCostsUSD, 0.00001d); + assertEquals(0.0000051026469d, stackTrace.annualCO2Tons, 0.0000000001d); + assertEquals(0.19825d, stackTrace.annualCostsUSD, 0.00001d); // not determined by default assertNull(stackTrace.subGroups); @@ -91,8 +91,8 @@ public void testGetStackTracesGroupedByServiceName() throws Exception { assertEquals(18, stackTrace.fileIds.length); assertEquals(18, stackTrace.frameIds.length); assertEquals(18, stackTrace.typeIds.length); - assertEquals(0.0000048475146d, stackTrace.annualCO2Tons, 0.0000000001d); - assertEquals(0.18834d, stackTrace.annualCostsUSD, 0.00001d); + assertEquals(0.0000051026469d, stackTrace.annualCO2Tons, 0.0000000001d); + assertEquals(0.19825d, stackTrace.annualCostsUSD, 0.00001d); assertEquals(Long.valueOf(2L), stackTrace.subGroups.getCount("basket")); assertNotNull(response.getStackFrames()); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CO2Calculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CO2Calculator.java index fbd5f7a9b5328..0a05fc5930942 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CO2Calculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CO2Calculator.java @@ -12,7 +12,7 @@ import java.util.Map; final class CO2Calculator { - private static final double DEFAULT_SAMPLING_FREQUENCY = 20.0d; + private static final double DEFAULT_SAMPLING_FREQUENCY = 19.0d; private static final double DEFAULT_CO2_TONS_PER_KWH = 0.000379069d; // unit: metric tons / kWh private static final double DEFAULT_KILOWATTS_PER_CORE_X86 = 7.0d / 1000.0d; // unit: watt / core private static final double DEFAULT_KILOWATTS_PER_CORE_ARM64 = 2.8d / 1000.0d; // unit: watt / core diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostCalculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostCalculator.java index b8ee54f5f29e8..05b51adb6a52f 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostCalculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CostCalculator.java @@ -10,7 +10,7 @@ import java.util.Map; final class CostCalculator { - private static final double DEFAULT_SAMPLING_FREQUENCY = 20.0d; + private static final double DEFAULT_SAMPLING_FREQUENCY = 19.0d; private static final double SECONDS_PER_HOUR = 60 * 60; private static final double SECONDS_PER_YEAR = SECONDS_PER_HOUR * 24 * 365.0d; // unit: seconds public static final double DEFAULT_COST_USD_PER_CORE_HOUR = 0.0425d; // unit: USD / (core * hour) diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CO2CalculatorTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CO2CalculatorTests.java index ff698465a56c5..9be98fbe4f46b 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CO2CalculatorTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CO2CalculatorTests.java @@ -73,7 +73,7 @@ public void testCreateFromRegularSource() { double samplingDurationInSeconds = 1_800.0d; // 30 minutes long samples = 100_000L; // 100k samples - double annualCoreHours = CostCalculator.annualCoreHours(samplingDurationInSeconds, samples, 20.0d); + double annualCoreHours = CostCalculator.annualCoreHours(samplingDurationInSeconds, samples, 19.0d); CO2Calculator co2Calculator = new CO2Calculator(hostsTable, samplingDurationInSeconds, null, null, null, null); checkCO2Calculation(co2Calculator.getAnnualCO2Tons(HOST_ID_A, samples), annualCoreHours, 1.135d, 0.0002786d, 7.0d); @@ -110,7 +110,7 @@ public void testCreateFromMalformedSource() { double samplingDurationInSeconds = 1_800.0d; // 30 minutes long samples = 100_000L; // 100k samples - double annualCoreHours = CostCalculator.annualCoreHours(samplingDurationInSeconds, samples, 20.0d); + double annualCoreHours = CostCalculator.annualCoreHours(samplingDurationInSeconds, samples, 19.0d); CO2Calculator co2Calculator = new CO2Calculator(hostsTable, samplingDurationInSeconds, null, null, null, null); checkCO2Calculation(co2Calculator.getAnnualCO2Tons(HOST_ID_A, samples), annualCoreHours, 1.135d, 0.0002786d, 7.0d); diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CostCalculatorTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CostCalculatorTests.java index eaf6cf618eddb..1c719c97164dc 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CostCalculatorTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/CostCalculatorTests.java @@ -63,7 +63,7 @@ public void testCreateFromRegularSource() { double samplingDurationInSeconds = 1_800.0d; // 30 minutes long samples = 100_000L; // 100k samples - double annualCoreHours = CostCalculator.annualCoreHours(samplingDurationInSeconds, samples, 20.0d); + double annualCoreHours = CostCalculator.annualCoreHours(samplingDurationInSeconds, samples, 19.0d); CostCalculator costCalculator = new CostCalculator(hostsTable, samplingDurationInSeconds, null, null, null); // Checks whether the cost calculation is based on the lookup data. From 564e13e2ba49ac78c8c142f9b29481e56c498c83 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20R=C3=BChsen?= Date: Tue, 3 Dec 2024 08:56:20 +0100 Subject: [PATCH 361/386] [Profiling] Add field profiling.agent.config.sampling_frequency to profiling-hosts (#117752) --- .../profiling/component-template/profiling-hosts.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json index e58a3cbd39f97..50f3ab6bf9a08 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json @@ -135,6 +135,9 @@ }, "config.present_cpu_cores": { "type": "integer" + }, + "config.sampling_frequency": { + "type": "integer" } } }, From cbb08babdbd7d8f42426df7984caa2d587b26ff7 Mon Sep 17 00:00:00 2001 From: Dimitris Rempapis Date: Tue, 3 Dec 2024 10:52:14 +0200 Subject: [PATCH 362/386] Remove RestApiVersion#V_7 references for 9.0.0 (#117572) Address and remove references to org elastic search.core.RestApiVersion#V_7 from the search-related code. --- docs/changelog/117572.yaml | 5 + .../action/search/MultiSearchRequest.java | 27 +---- .../termvectors/TermVectorsRequest.java | 6 - .../index/query/CommonTermsQueryBuilder.java | 72 ------------ .../index/query/TypeQueryV7Builder.java | 108 ------------------ .../document/RestTermVectorsAction.java | 1 - .../action/search/RestMultiSearchAction.java | 3 - .../elasticsearch/search/SearchModule.java | 26 ----- .../MovAvgPipelineAggregationBuilder.java | 85 -------------- .../search/sort/FieldSortBuilder.java | 8 -- .../search/sort/ScriptSortBuilder.java | 9 -- .../search/sort/SortBuilder.java | 8 -- .../search/MultiSearchRequestTests.java | 30 ++--- 13 files changed, 20 insertions(+), 368 deletions(-) create mode 100644 docs/changelog/117572.yaml delete mode 100644 server/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java delete mode 100644 server/src/main/java/org/elasticsearch/index/query/TypeQueryV7Builder.java delete mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregationBuilder.java diff --git a/docs/changelog/117572.yaml b/docs/changelog/117572.yaml new file mode 100644 index 0000000000000..a4a2ef6c06f5d --- /dev/null +++ b/docs/changelog/117572.yaml @@ -0,0 +1,5 @@ +pr: 117572 +summary: Address and remove any references of RestApiVersion version 7 +area: Search +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java index 8467ee6fd86f3..2022180475529 100644 --- a/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java @@ -18,11 +18,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.rest.action.search.RestMultiSearchAction; -import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; @@ -51,10 +47,6 @@ * A multi search API request. */ public class MultiSearchRequest extends ActionRequest implements CompositeIndicesRequest { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestSearchAction.class); - public static final String FIRST_LINE_EMPTY_DEPRECATION_MESSAGE = - "support for empty first line before any action metadata in msearch API is deprecated " - + "and will be removed in the next major version"; public static final int MAX_CONCURRENT_SEARCH_REQUESTS_DEFAULT = 0; private int maxConcurrentSearchRequests = 0; @@ -213,12 +205,6 @@ public static void readMultiLineFormat( if (nextMarker == -1) { break; } - // support first line with \n - if (parserConfig.restApiVersion() == RestApiVersion.V_7 && nextMarker == 0) { - deprecationLogger.compatibleCritical("msearch_first_line_empty", FIRST_LINE_EMPTY_DEPRECATION_MESSAGE); - from = nextMarker + 1; - continue; - } SearchRequest searchRequest = new SearchRequest(); if (indices != null) { @@ -281,14 +267,11 @@ public static void readMultiLineFormat( allowNoIndices = value; } else if ("ignore_throttled".equals(entry.getKey()) || "ignoreThrottled".equals(entry.getKey())) { ignoreThrottled = value; - } else if (parserConfig.restApiVersion() == RestApiVersion.V_7 - && ("type".equals(entry.getKey()) || "types".equals(entry.getKey()))) { - deprecationLogger.compatibleCritical("msearch_with_types", RestMultiSearchAction.TYPES_DEPRECATION_MESSAGE); - } else if (extraParamParser.apply(entry.getKey(), value, searchRequest)) { - // Skip, the parser handled the key/value - } else { - throw new IllegalArgumentException("key [" + entry.getKey() + "] is not supported in the metadata section"); - } + } else if (extraParamParser.apply(entry.getKey(), value, searchRequest)) { + // Skip, the parser handled the key/value + } else { + throw new IllegalArgumentException("key [" + entry.getKey() + "] is not supported in the metadata section"); + } } defaultOptions = IndicesOptions.fromParameters( expandWildcards, diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java index a36158d11b5b3..7a7b2afab75d1 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java @@ -20,13 +20,11 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.VersionType; -import org.elasticsearch.rest.action.document.RestTermVectorsAction; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -52,7 +50,6 @@ // It's not possible to suppress teh warning at #realtime(boolean) at a method-level. @SuppressWarnings("unchecked") public final class TermVectorsRequest extends SingleShardRequest implements RealtimeRequest { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(TermVectorsRequest.class); private static final ParseField INDEX = new ParseField("_index"); private static final ParseField ID = new ParseField("_id"); @@ -66,7 +63,6 @@ public final class TermVectorsRequest extends SingleShardRequest { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(CommonTermsQueryBuilder.class); - public static final String COMMON_TERMS_QUERY_DEPRECATION_MSG = "Common Terms Query usage is not supported. " - + "Use [match] query which can efficiently skip blocks of documents if the total number of hits is not tracked."; - - @UpdateForV9(owner = UpdateForV9.Owner.SEARCH_RELEVANCE) // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7 - public static ParseField NAME_V7 = new ParseField("common").withAllDeprecated(COMMON_TERMS_QUERY_DEPRECATION_MSG) - .forRestApiVersion(RestApiVersion.equalTo(RestApiVersion.V_7)); - - @Override - protected void doWriteTo(StreamOutput out) throws IOException { - throw new UnsupportedOperationException("common_term_query is not meant to be serialized."); - } - - @Override - protected void doXContent(XContentBuilder builder, Params params) throws IOException {} - - @Override - protected Query doToQuery(SearchExecutionContext context) throws IOException { - return null; - } - - @Override - protected boolean doEquals(CommonTermsQueryBuilder other) { - return false; - } - - @Override - protected int doHashCode() { - return 0; - } - - @Override - public String getWriteableName() { - return null; - } - - public static CommonTermsQueryBuilder fromXContent(XContentParser parser) throws IOException { - deprecationLogger.compatibleCritical("common_term_query", COMMON_TERMS_QUERY_DEPRECATION_MSG); - throw new ParsingException(parser.getTokenLocation(), COMMON_TERMS_QUERY_DEPRECATION_MSG); - } - - @Override - public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; - } -} diff --git a/server/src/main/java/org/elasticsearch/index/query/TypeQueryV7Builder.java b/server/src/main/java/org/elasticsearch/index/query/TypeQueryV7Builder.java deleted file mode 100644 index c9aae0195acf7..0000000000000 --- a/server/src/main/java/org/elasticsearch/index/query/TypeQueryV7Builder.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.index.query; - -import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.Query; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; - -@UpdateForV9(owner = UpdateForV9.Owner.SEARCH_RELEVANCE) // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7 -public class TypeQueryV7Builder extends AbstractQueryBuilder { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(TypeQueryV7Builder.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Type queries are deprecated, " - + "prefer to filter on a field instead."; - - private static final String NAME = "type"; - public static final ParseField NAME_V7 = new ParseField(NAME).forRestApiVersion(RestApiVersion.equalTo(RestApiVersion.V_7)); - private static final ParseField VALUE_FIELD = new ParseField("value"); - private static final ObjectParser PARSER = new ObjectParser<>(NAME, TypeQueryV7Builder::new); - - static { - PARSER.declareString( - QueryBuilder::queryName, - AbstractQueryBuilder.NAME_FIELD.forRestApiVersion(RestApiVersion.equalTo(RestApiVersion.V_7)) - ); - PARSER.declareFloat( - QueryBuilder::boost, - AbstractQueryBuilder.BOOST_FIELD.forRestApiVersion(RestApiVersion.equalTo(RestApiVersion.V_7)) - ); - PARSER.declareString(TypeQueryV7Builder::setValue, VALUE_FIELD.forRestApiVersion(RestApiVersion.equalTo(RestApiVersion.V_7))); - } - - private String value; - - public TypeQueryV7Builder() {} - - /** - * Read from a stream. - */ - public TypeQueryV7Builder(StreamInput in) throws IOException { - super(in); - } - - @Override - protected void doWriteTo(StreamOutput out) throws IOException {} - - @Override - protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(NAME); - builder.field(VALUE_FIELD.getPreferredName(), MapperService.SINGLE_MAPPING_NAME); - printBoostAndQueryName(builder); - builder.endObject(); - } - - @Override - protected Query doToQuery(SearchExecutionContext context) throws IOException { - return new MatchNoDocsQuery(); - } - - @Override - protected boolean doEquals(TypeQueryV7Builder other) { - return true; - } - - @Override - protected int doHashCode() { - return 0; - } - - public static TypeQueryV7Builder fromXContent(XContentParser parser) throws IOException { - deprecationLogger.compatibleCritical("type_query", TYPES_DEPRECATION_MESSAGE); - throw new ParsingException(parser.getTokenLocation(), TYPES_DEPRECATION_MESSAGE); - } - - @Override - public String getWriteableName() { - return NAME; - } - - public void setValue(String value) { - this.value = value; - } - - @Override - public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; - } -} diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java index 8e41e1cd09674..d2b09af8e1f3d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java @@ -35,7 +35,6 @@ */ @ServerlessScope(Scope.PUBLIC) public class RestTermVectorsAction extends BaseRestHandler { - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in term vector requests is deprecated."; @Override public List routes() { diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index 89775b4ca8e15..24fab92ced392 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -43,9 +43,6 @@ @ServerlessScope(Scope.PUBLIC) public class RestMultiSearchAction extends BaseRestHandler { - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" - + " Specifying types in multi search template requests is deprecated."; - private static final Set RESPONSE_PARAMS = Set.of(RestSearchAction.TYPED_KEYS_PARAM, RestSearchAction.TOTAL_HITS_AS_INT_PARAM); private final boolean allowExplicitIndex; diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 09e25350ad4fd..d282ba425b126 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -20,12 +20,10 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.BoostingQueryBuilder; import org.elasticsearch.index.query.CombinedFieldsQueryBuilder; -import org.elasticsearch.index.query.CommonTermsQueryBuilder; import org.elasticsearch.index.query.ConstantScoreQueryBuilder; import org.elasticsearch.index.query.DisMaxQueryBuilder; import org.elasticsearch.index.query.DistanceFeatureQueryBuilder; @@ -68,7 +66,6 @@ import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.index.query.TermsSetQueryBuilder; -import org.elasticsearch.index.query.TypeQueryV7Builder; import org.elasticsearch.index.query.WildcardQueryBuilder; import org.elasticsearch.index.query.WrapperQueryBuilder; import org.elasticsearch.index.query.functionscore.ExponentialDecayFunctionBuilder; @@ -204,7 +201,6 @@ import org.elasticsearch.search.aggregations.pipeline.InternalStatsBucket; import org.elasticsearch.search.aggregations.pipeline.MaxBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.MinBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.MovAvgPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.PercentilesBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.SerialDiffPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.StatsBucketPipelineAggregationBuilder; @@ -686,15 +682,6 @@ private ValuesSourceRegistry registerAggregations(List plugins) { .setAggregatorRegistrar(CompositeAggregationBuilder::registerAggregators), builder ); - if (RestApiVersion.minimumSupported() == RestApiVersion.V_7) { - registerQuery( - new QuerySpec<>( - CommonTermsQueryBuilder.NAME_V7, - (streamInput) -> new CommonTermsQueryBuilder(), - CommonTermsQueryBuilder::fromXContent - ) - ); - } registerFromPlugin(plugins, SearchPlugin::getAggregations, (agg) -> this.registerAggregation(agg, builder)); @@ -815,15 +802,6 @@ private void registerPipelineAggregations(List plugins) { SerialDiffPipelineAggregationBuilder::parse ) ); - if (RestApiVersion.minimumSupported() == RestApiVersion.V_7) { - registerPipelineAggregation( - new PipelineAggregationSpec( - MovAvgPipelineAggregationBuilder.NAME_V7, - MovAvgPipelineAggregationBuilder::new, - MovAvgPipelineAggregationBuilder.PARSER - ) - ); - } registerFromPlugin(plugins, SearchPlugin::getPipelineAggregations, this::registerPipelineAggregation); } @@ -1203,10 +1181,6 @@ private void registerQueryParsers(List plugins) { })); registerFromPlugin(plugins, SearchPlugin::getQueries, this::registerQuery); - - if (RestApiVersion.minimumSupported() == RestApiVersion.V_7) { - registerQuery(new QuerySpec<>(TypeQueryV7Builder.NAME_V7, TypeQueryV7Builder::new, TypeQueryV7Builder::fromXContent)); - } } private void registerIntervalsSourceProviders() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregationBuilder.java deleted file mode 100644 index 068487317dfe5..0000000000000 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregationBuilder.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.search.aggregations.pipeline; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.index.query.CommonTermsQueryBuilder; -import org.elasticsearch.xcontent.ContextParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Map; - -/** - * The actual moving_avg aggregation was removed as a breaking change in 8.0. This class exists to provide a friendlier error message - * if somebody attempts to use the moving_avg aggregation via the compatible-with=7 mechanism. - * - * We can remove this class entirely when v7 rest api compatibility is dropped. - * - * @deprecated Only for 7.x rest compat - */ -@UpdateForV9(owner = UpdateForV9.Owner.SEARCH_ANALYTICS) // remove this since it's only for 7.x compat and 7.x compat will be removed in 9.0 -@Deprecated -public class MovAvgPipelineAggregationBuilder extends AbstractPipelineAggregationBuilder { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(CommonTermsQueryBuilder.class); - public static final String MOVING_AVG_AGG_DEPRECATION_MSG = "Moving Average aggregation usage is not supported. " - + "Use the [moving_fn] aggregation instead."; - - public static final ParseField NAME_V7 = new ParseField("moving_avg").withAllDeprecated(MOVING_AVG_AGG_DEPRECATION_MSG) - .forRestApiVersion(RestApiVersion.equalTo(RestApiVersion.V_7)); - - public static final ContextParser PARSER = (parser, name) -> { - deprecationLogger.compatibleCritical("moving_avg_aggregation", MOVING_AVG_AGG_DEPRECATION_MSG); - throw new ParsingException(parser.getTokenLocation(), MOVING_AVG_AGG_DEPRECATION_MSG); - }; - - public MovAvgPipelineAggregationBuilder(StreamInput in) throws IOException { - super(in, NAME_V7.getPreferredName()); - throw new UnsupportedOperationException("moving_avg is not meant to be used."); - } - - @Override - protected void doWriteTo(StreamOutput out) throws IOException { - throw new UnsupportedOperationException("moving_avg is not meant to be used."); - } - - @Override - protected PipelineAggregator createInternal(Map metadata) { - throw new UnsupportedOperationException("moving_avg is not meant to be used."); - } - - @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - throw new UnsupportedOperationException("moving_avg is not meant to be used."); - } - - @Override - protected void validate(ValidationContext context) { - throw new UnsupportedOperationException("moving_avg is not meant to be used."); - } - - @Override - public final String getWriteableName() { - return null; - } - - @Override - public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ZERO; - } -} diff --git a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index cd597f3328c0f..5691435c83ecb 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -18,7 +18,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.time.DateMathParser; @@ -729,13 +728,6 @@ public static FieldSortBuilder fromXContent(XContentParser parser, String fieldN PARSER.declareObject(FieldSortBuilder::setNestedSort, (p, c) -> NestedSortBuilder.fromXContent(p), NESTED_FIELD); PARSER.declareString(FieldSortBuilder::setNumericType, NUMERIC_TYPE); PARSER.declareString(FieldSortBuilder::setFormat, FORMAT); - PARSER.declareField((b, v) -> {}, (p, c) -> { - throw new ParsingException(p.getTokenLocation(), "[nested_path] has been removed in favour of the [nested] parameter", c); - }, NESTED_PATH_FIELD, ValueType.STRING); - - PARSER.declareObject((b, v) -> {}, (p, c) -> { - throw new ParsingException(p.getTokenLocation(), "[nested_filter] has been removed in favour of the [nested] parameter", c); - }, NESTED_FILTER_FIELD); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index 48773eec8371b..445c55dc546bc 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -17,7 +17,6 @@ import org.apache.lucene.util.BytesRefBuilder; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -220,14 +219,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) PARSER.declareString((b, v) -> b.order(SortOrder.fromString(v)), ORDER_FIELD); PARSER.declareString((b, v) -> b.sortMode(SortMode.fromString(v)), SORTMODE_FIELD); PARSER.declareObject(ScriptSortBuilder::setNestedSort, (p, c) -> NestedSortBuilder.fromXContent(p), NESTED_FIELD); - - PARSER.declareObject((b, v) -> {}, (p, c) -> { - throw new ParsingException(p.getTokenLocation(), "[nested_path] has been removed in favour of the [nested] parameter", c); - }, NESTED_PATH_FIELD); - - PARSER.declareObject((b, v) -> {}, (p, c) -> { - throw new ParsingException(p.getTokenLocation(), "[nested_filter] has been removed in favour of the [nested] parameter", c); - }, NESTED_FILTER_FIELD); } /** diff --git a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java index 5832b93b9462f..4a8cdbcdffa55 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java @@ -18,8 +18,6 @@ import org.elasticsearch.common.io.stream.VersionedNamedWriteable; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.mapper.NestedObjectMapper; import org.elasticsearch.index.query.QueryBuilder; @@ -52,12 +50,6 @@ public abstract class SortBuilder> // parse fields common to more than one SortBuilder public static final ParseField ORDER_FIELD = new ParseField("order"); - @UpdateForV9(owner = UpdateForV9.Owner.SEARCH_FOUNDATIONS) // v7 REST API no longer exists: eliminate ref to RestApiVersion.V_7 - public static final ParseField NESTED_FILTER_FIELD = new ParseField("nested_filter").withAllDeprecated() - .forRestApiVersion(RestApiVersion.equalTo(RestApiVersion.V_7)); - public static final ParseField NESTED_PATH_FIELD = new ParseField("nested_path").withAllDeprecated() - .forRestApiVersion(RestApiVersion.equalTo(RestApiVersion.V_7)); - private static final Map> PARSERS = Map.of( ScriptSortBuilder.NAME, ScriptSortBuilder::fromXContent, diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java index f2bc561792991..9f81b999c9d98 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.CheckedRunnable; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.rest.RestRequest; @@ -268,12 +267,12 @@ public void testMsearchTerminatedByNewline() throws Exception { assertEquals(3, msearchRequest.requests().size()); } - private MultiSearchRequest parseMultiSearchRequestFromString(String request, RestApiVersion restApiVersion) throws IOException { - return parseMultiSearchRequest(createRestRequest(request.getBytes(StandardCharsets.UTF_8), restApiVersion)); + private MultiSearchRequest parseMultiSearchRequestFromString(String request) throws IOException { + return parseMultiSearchRequest(createRestRequest(request.getBytes(StandardCharsets.UTF_8))); } private MultiSearchRequest parseMultiSearchRequest(String sample) throws IOException { - return parseMultiSearchRequest(createRestRequest(sample, null)); + return parseMultiSearchRequest(createRestRequest(sample)); } private MultiSearchRequest parseMultiSearchRequest(RestRequest restRequest) throws IOException { @@ -288,22 +287,13 @@ private MultiSearchRequest parseMultiSearchRequest(RestRequest restRequest) thro return request; } - private RestRequest createRestRequest(String sample, RestApiVersion restApiVersion) throws IOException { + private RestRequest createRestRequest(String sample) throws IOException { byte[] data = StreamsUtils.copyToBytesFromClasspath(sample); - return createRestRequest(data, restApiVersion); + return createRestRequest(data); } - private FakeRestRequest createRestRequest(byte[] data, RestApiVersion restApiVersion) { - if (restApiVersion != null) { - final List contentTypeHeader = Collections.singletonList( - compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_7) - ); - return new FakeRestRequest.Builder(xContentRegistry()).withHeaders( - Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader) - ).withContent(new BytesArray(data), null).build(); - } else { - return new FakeRestRequest.Builder(xContentRegistry()).withContent(new BytesArray(data), XContentType.JSON).build(); - } + private FakeRestRequest createRestRequest(byte[] data) { + return new FakeRestRequest.Builder(xContentRegistry()).withContent(new BytesArray(data), XContentType.JSON).build(); } @Override @@ -517,7 +507,7 @@ public void testFailOnExtraCharacters() throws IOException { parseMultiSearchRequestFromString(""" {"index": "test"}{{{{{extra chars that shouldn't be here { "query": {"match_all": {}}} - """, null); + """); fail("should have caught first line; extra open brackets"); } catch (XContentParseException e) { assertEquals("[1:18] Unexpected token after end of object", e.getMessage()); @@ -526,7 +516,7 @@ public void testFailOnExtraCharacters() throws IOException { parseMultiSearchRequestFromString(""" {"index": "test"} { "query": {"match_all": {}}}{{{{even more chars - """, null); + """); fail("should have caught second line"); } catch (XContentParseException e) { assertEquals("[1:30] Unexpected token after end of object", e.getMessage()); @@ -535,7 +525,7 @@ public void testFailOnExtraCharacters() throws IOException { parseMultiSearchRequestFromString(""" {} { "query": {"match_all": {}}}}}}different error message - """, null); + """); fail("should have caught second line; extra closing brackets"); } catch (XContentParseException e) { assertThat( From a514aad3c2da305b0b63d8545cab75bb2c2d3032 Mon Sep 17 00:00:00 2001 From: Dimitris Rempapis Date: Tue, 3 Dec 2024 10:58:20 +0200 Subject: [PATCH 363/386] Fix/meta fields bad request (#117229) 400 rather a 5xx error is returned when _source / _seq_no / _feature / _nested_path / _field_names is requested, via fields --- docs/changelog/117229.yaml | 6 ++ .../extras/RankFeatureMetaFieldMapper.java | 2 +- rest-api-spec/build.gradle | 1 + .../test/search/520_fetch_fields.yml | 80 +++++++++++++++++-- .../index/mapper/FieldNamesFieldMapper.java | 2 +- .../index/mapper/MapperFeatures.java | 5 +- .../index/mapper/NestedPathFieldMapper.java | 2 +- .../index/mapper/SeqNoFieldMapper.java | 2 +- .../index/mapper/SourceFieldMapper.java | 2 +- .../fetch/subphase/FieldFetcherTests.java | 2 +- 10 files changed, 92 insertions(+), 12 deletions(-) create mode 100644 docs/changelog/117229.yaml diff --git a/docs/changelog/117229.yaml b/docs/changelog/117229.yaml new file mode 100644 index 0000000000000..f1b859c03e4fa --- /dev/null +++ b/docs/changelog/117229.yaml @@ -0,0 +1,6 @@ +pr: 117229 +summary: "In this pr, a 400 error is returned when _source / _seq_no / _feature /\ + \ _nested_path / _field_names is requested, rather a 5xx" +area: Search +type: bug +issues: [] diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureMetaFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureMetaFieldMapper.java index 15398b1f178ee..ed1cc57b84863 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureMetaFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureMetaFieldMapper.java @@ -48,7 +48,7 @@ public String typeName() { @Override public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - throw new UnsupportedOperationException("Cannot fetch values for internal field [" + typeName() + "]."); + throw new IllegalArgumentException("Cannot fetch values for internal field [" + typeName() + "]."); } @Override diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 650d17e41de7f..e2af894eb0939 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -66,4 +66,5 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.skipTest("logsdb/20_source_mapping/stored _source mode is supported", "no longer serialize source_mode") task.skipTest("logsdb/20_source_mapping/include/exclude is supported with stored _source", "no longer serialize source_mode") task.skipTest("logsdb/20_source_mapping/synthetic _source is default", "no longer serialize source_mode") + task.skipTest("search/520_fetch_fields/fetch _seq_no via fields", "error code is changed from 5xx to 400 in 9.0") }) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/520_fetch_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/520_fetch_fields.yml index 2b309f502f0c2..9a43199755d75 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/520_fetch_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/520_fetch_fields.yml @@ -128,18 +128,88 @@ fetch _seq_no via stored_fields: --- fetch _seq_no via fields: + - requires: + cluster_features: ["meta_fetch_fields_error_code_changed"] + reason: The fields_api returns a 400 instead a 5xx when _seq_no is requested via fields - do: - catch: "request" + catch: bad_request search: index: test body: fields: [ _seq_no ] - # This should be `unauthorized` (401) or `forbidden` (403) or at least `bad request` (400) - # while instead it is mapped to an `internal_server_error (500)` - - match: { status: 500 } - - match: { error.root_cause.0.type: unsupported_operation_exception } + - match: { status: 400 } + - match: { error.root_cause.0.type: illegal_argument_exception } + - match: { error.root_cause.0.reason: "error fetching [_seq_no]: Cannot fetch values for internal field [_seq_no]." } + +--- +fetch _source via fields: + - requires: + cluster_features: ["meta_fetch_fields_error_code_changed"] + reason: The fields_api returns a 400 instead a 5xx when _seq_no is requested via fields + + - do: + catch: bad_request + search: + index: test + body: + fields: [ _source ] + + - match: { status: 400 } + - match: { error.root_cause.0.type: illegal_argument_exception } + - match: { error.root_cause.0.reason: "error fetching [_source]: Cannot fetch values for internal field [_source]." } + +--- +fetch _feature via fields: + - requires: + cluster_features: ["meta_fetch_fields_error_code_changed"] + reason: The fields_api returns a 400 instead a 5xx when _seq_no is requested via fields + + - do: + catch: bad_request + search: + index: test + body: + fields: [ _feature ] + + - match: { status: 400 } + - match: { error.root_cause.0.type: illegal_argument_exception } + - match: { error.root_cause.0.reason: "error fetching [_feature]: Cannot fetch values for internal field [_feature]." } + +--- +fetch _nested_path via fields: + - requires: + cluster_features: ["meta_fetch_fields_error_code_changed"] + reason: The fields_api returns a 400 instead a 5xx when _seq_no is requested via fields + + - do: + catch: bad_request + search: + index: test + body: + fields: [ _nested_path ] + + - match: { status: 400 } + - match: { error.root_cause.0.type: illegal_argument_exception } + - match: { error.root_cause.0.reason: "error fetching [_nested_path]: Cannot fetch values for internal field [_nested_path]." } + +--- +fetch _field_names via fields: + - requires: + cluster_features: ["meta_fetch_fields_error_code_changed"] + reason: The fields_api returns a 400 instead a 5xx when _seq_no is requested via fields + + - do: + catch: bad_request + search: + index: test + body: + fields: [ _field_names ] + + - match: { status: 400 } + - match: { error.root_cause.0.type: illegal_argument_exception } + - match: { error.root_cause.0.reason: "error fetching [_field_names]: Cannot fetch values for internal field [_field_names]." } --- fetch fields with none stored_fields: diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java index 565b1ff28a39f..425e3c664c262 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java @@ -135,7 +135,7 @@ public boolean isEnabled() { @Override public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "]."); + throw new IllegalArgumentException("Cannot fetch values for internal field [" + name() + "]."); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index 333c37381c587..bf6c729f95653 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -61,6 +61,8 @@ public Set getFeatures() { "mapper.constant_keyword.synthetic_source_write_fix" ); + public static final NodeFeature META_FETCH_FIELDS_ERROR_CODE_CHANGED = new NodeFeature("meta_fetch_fields_error_code_changed"); + @Override public Set getTestFeatures() { return Set.of( @@ -71,7 +73,8 @@ public Set getTestFeatures() { IgnoredSourceFieldMapper.IGNORED_SOURCE_AS_TOP_LEVEL_METADATA_ARRAY_FIELD, IgnoredSourceFieldMapper.ALWAYS_STORE_OBJECT_ARRAYS_IN_NESTED_OBJECTS, MapperService.LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT, - CONSTANT_KEYWORD_SYNTHETIC_SOURCE_WRITE_FIX + CONSTANT_KEYWORD_SYNTHETIC_SOURCE_WRITE_FIX, + META_FETCH_FIELDS_ERROR_CODE_CHANGED ); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedPathFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedPathFieldMapper.java index b22c3a12fcda3..1cd752dc34403 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedPathFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedPathFieldMapper.java @@ -67,7 +67,7 @@ public Query existsQuery(SearchExecutionContext context) { @Override public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "]."); + throw new IllegalArgumentException("Cannot fetch values for internal field [" + name() + "]."); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java index e126102b0f3c2..66ee42dfc56f9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java @@ -168,7 +168,7 @@ public boolean mayExistInIndex(SearchExecutionContext context) { @Override public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "]."); + throw new IllegalArgumentException("Cannot fetch values for internal field [" + name() + "]."); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index b97e04fcddb5d..1cea8154aad43 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -325,7 +325,7 @@ public String typeName() { @Override public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "]."); + throw new IllegalArgumentException("Cannot fetch values for internal field [" + name() + "]."); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java index f01f760ed71c3..c5f1efe561c22 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java @@ -271,7 +271,7 @@ public void testMetadataFields() throws IOException { FieldNamesFieldMapper.NAME, NestedPathFieldMapper.name(IndexVersion.current()) )) { - expectThrows(UnsupportedOperationException.class, () -> fetchFields(mapperService, source, fieldname)); + expectThrows(IllegalArgumentException.class, () -> fetchFields(mapperService, source, fieldname)); } } From b1412f65b90893c3d29756c921c32d39f3172a65 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 3 Dec 2024 10:57:05 +0100 Subject: [PATCH 364/386] Clean up search timeout handling code (#116678) TimeExceededException was made public to be able to catch it outside of the search.internal package. That is rather dangerous, because we really need it to be created only from `ContextIndexSearcher#throwTimeExceededException`. This commit makes its constructor private to prevent it from being created outside of ContextIndexSearcher. It also adds javadocs around that. I took the chance to also share the timeout handling code that is now copy pasted in different places. --- .../search/fetch/FetchPhase.java | 7 +--- .../search/fetch/FetchPhaseDocsIterator.java | 41 +++++++++---------- .../search/internal/ContextIndexSearcher.java | 18 +++++--- .../search/query/QueryPhase.java | 9 ++-- .../search/query/SearchTimeoutException.java | 13 ++++++ .../search/rescore/RescorePhase.java | 9 ++-- .../fetch/FetchPhaseDocsIteratorTests.java | 8 +++- 7 files changed, 63 insertions(+), 42 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 546586a9ff3c3..2fbe3c1fc1532 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -195,13 +195,10 @@ protected SearchHit nextDoc(int doc) throws IOException { context.shardTarget(), context.searcher().getIndexReader(), docIdsToLoad, - context.request().allowPartialSearchResults() + context.request().allowPartialSearchResults(), + context.queryResult() ); - if (docsIterator.isTimedOut()) { - context.queryResult().searchTimedOut(true); - } - if (context.isCancelled()) { for (SearchHit hit : hits) { // release all hits that would otherwise become owned and eventually released by SearchHits below diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java index df4e7649ffd3b..4a242f70e8d02 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java @@ -16,6 +16,7 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.SearchTimeoutException; import java.io.IOException; @@ -30,12 +31,6 @@ */ abstract class FetchPhaseDocsIterator { - private boolean timedOut = false; - - public boolean isTimedOut() { - return timedOut; - } - /** * Called when a new leaf reader is reached * @param ctx the leaf reader for this set of doc ids @@ -53,7 +48,13 @@ public boolean isTimedOut() { /** * Iterate over a set of docsIds within a particular shard and index reader */ - public final SearchHit[] iterate(SearchShardTarget shardTarget, IndexReader indexReader, int[] docIds, boolean allowPartialResults) { + public final SearchHit[] iterate( + SearchShardTarget shardTarget, + IndexReader indexReader, + int[] docIds, + boolean allowPartialResults, + QuerySearchResult querySearchResult + ) { SearchHit[] searchHits = new SearchHit[docIds.length]; DocIdToIndex[] docs = new DocIdToIndex[docIds.length]; for (int index = 0; index < docIds.length; index++) { @@ -69,12 +70,10 @@ public final SearchHit[] iterate(SearchShardTarget shardTarget, IndexReader inde int[] docsInLeaf = docIdsInLeaf(0, endReaderIdx, docs, ctx.docBase); try { setNextReader(ctx, docsInLeaf); - } catch (ContextIndexSearcher.TimeExceededException timeExceededException) { - if (allowPartialResults) { - timedOut = true; - return SearchHits.EMPTY; - } - throw new SearchTimeoutException(shardTarget, "Time exceeded"); + } catch (ContextIndexSearcher.TimeExceededException e) { + SearchTimeoutException.handleTimeout(allowPartialResults, shardTarget, querySearchResult); + assert allowPartialResults; + return SearchHits.EMPTY; } for (int i = 0; i < docs.length; i++) { try { @@ -88,15 +87,15 @@ public final SearchHit[] iterate(SearchShardTarget shardTarget, IndexReader inde currentDoc = docs[i].docId; assert searchHits[docs[i].index] == null; searchHits[docs[i].index] = nextDoc(docs[i].docId); - } catch (ContextIndexSearcher.TimeExceededException timeExceededException) { - if (allowPartialResults) { - timedOut = true; - SearchHit[] partialSearchHits = new SearchHit[i]; - System.arraycopy(searchHits, 0, partialSearchHits, 0, i); - return partialSearchHits; + } catch (ContextIndexSearcher.TimeExceededException e) { + if (allowPartialResults == false) { + purgeSearchHits(searchHits); } - purgeSearchHits(searchHits); - throw new SearchTimeoutException(shardTarget, "Time exceeded"); + SearchTimeoutException.handleTimeout(allowPartialResults, shardTarget, querySearchResult); + assert allowPartialResults; + SearchHit[] partialSearchHits = new SearchHit[i]; + System.arraycopy(searchHits, 0, partialSearchHits, 0, i); + return partialSearchHits; } } } catch (SearchTimeoutException e) { diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index 78d90377cdc3f..9f990fbd97cdf 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -169,8 +169,8 @@ public void setProfiler(QueryProfiler profiler) { * Add a {@link Runnable} that will be run on a regular basis while accessing documents in the * DirectoryReader but also while collecting them and check for query cancellation or timeout. */ - public Runnable addQueryCancellation(Runnable action) { - return this.cancellable.add(action); + public void addQueryCancellation(Runnable action) { + this.cancellable.add(action); } /** @@ -425,8 +425,16 @@ public void throwTimeExceededException() { } } - public static class TimeExceededException extends RuntimeException { + /** + * Exception thrown whenever a search timeout occurs. May be thrown by {@link ContextIndexSearcher} or {@link ExitableDirectoryReader}. + */ + public static final class TimeExceededException extends RuntimeException { // This exception should never be re-thrown, but we fill in the stacktrace to be able to trace where it does not get properly caught + + /** + * Created via {@link #throwTimeExceededException()} + */ + private TimeExceededException() {} } @Override @@ -570,14 +578,12 @@ public DirectoryReader getDirectoryReader() { } private static class MutableQueryTimeout implements ExitableDirectoryReader.QueryCancellation { - private final List runnables = new ArrayList<>(); - private Runnable add(Runnable action) { + private void add(Runnable action) { Objects.requireNonNull(action, "cancellation runnable should not be null"); assert runnables.contains(action) == false : "Cancellation runnable already added"; runnables.add(action); - return action; } private void remove(Runnable action) { diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index af65c30b49dcf..3036a295d459a 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -217,10 +217,11 @@ static void addCollectorsAndSearch(SearchContext searchContext) throws QueryPhas queryResult.topDocs(queryPhaseResult.topDocsAndMaxScore(), queryPhaseResult.sortValueFormats()); if (searcher.timeExceeded()) { assert timeoutRunnable != null : "TimeExceededException thrown even though timeout wasn't set"; - if (searchContext.request().allowPartialSearchResults() == false) { - throw new SearchTimeoutException(searchContext.shardTarget(), "Time exceeded"); - } - queryResult.searchTimedOut(true); + SearchTimeoutException.handleTimeout( + searchContext.request().allowPartialSearchResults(), + searchContext.shardTarget(), + searchContext.queryResult() + ); } if (searchContext.terminateAfter() != SearchContext.DEFAULT_TERMINATE_AFTER) { queryResult.terminatedEarly(queryPhaseResult.terminatedAfter()); diff --git a/server/src/main/java/org/elasticsearch/search/query/SearchTimeoutException.java b/server/src/main/java/org/elasticsearch/search/query/SearchTimeoutException.java index 0ed64811fee28..e006f176ff91a 100644 --- a/server/src/main/java/org/elasticsearch/search/query/SearchTimeoutException.java +++ b/server/src/main/java/org/elasticsearch/search/query/SearchTimeoutException.java @@ -33,4 +33,17 @@ public SearchTimeoutException(StreamInput in) throws IOException { public RestStatus status() { return RestStatus.GATEWAY_TIMEOUT; } + + /** + * Propagate a timeout according to whether partial search results are allowed or not. + * In case partial results are allowed, a flag will be set to the provided {@link QuerySearchResult} to indicate that there was a + * timeout, but the execution will continue and partial results will be returned to the user. + * When partial results are disallowed, a {@link SearchTimeoutException} will be thrown and returned to the user. + */ + public static void handleTimeout(boolean allowPartialSearchResults, SearchShardTarget target, QuerySearchResult querySearchResult) { + if (allowPartialSearchResults == false) { + throw new SearchTimeoutException(target, "Time exceeded"); + } + querySearchResult.searchTimedOut(true); + } } diff --git a/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java b/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java index 1227db5d8e1db..7e3646e7689cc 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java @@ -73,10 +73,11 @@ public static void execute(SearchContext context) { } catch (IOException e) { throw new ElasticsearchException("Rescore Phase Failed", e); } catch (ContextIndexSearcher.TimeExceededException e) { - if (context.request().allowPartialSearchResults() == false) { - throw new SearchTimeoutException(context.shardTarget(), "Time exceeded"); - } - context.queryResult().searchTimedOut(true); + SearchTimeoutException.handleTimeout( + context.request().allowPartialSearchResults(), + context.shardTarget(), + context.queryResult() + ); } } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java b/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java index d5e930321db95..c8d1b6721c64b 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java @@ -17,6 +17,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -77,7 +78,7 @@ protected SearchHit nextDoc(int doc) { } }; - SearchHit[] hits = it.iterate(null, reader, docs, randomBoolean()); + SearchHit[] hits = it.iterate(null, reader, docs, randomBoolean(), new QuerySearchResult()); assertThat(hits.length, equalTo(docs.length)); for (int i = 0; i < hits.length; i++) { @@ -125,7 +126,10 @@ protected SearchHit nextDoc(int doc) { } }; - Exception e = expectThrows(FetchPhaseExecutionException.class, () -> it.iterate(null, reader, docs, randomBoolean())); + Exception e = expectThrows( + FetchPhaseExecutionException.class, + () -> it.iterate(null, reader, docs, randomBoolean(), new QuerySearchResult()) + ); assertThat(e.getMessage(), containsString("Error running fetch phase for doc [" + badDoc + "]")); assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); From 76a382a78d728d90cc84fa3fbcfe61ba1c1e8db2 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Tue, 3 Dec 2024 12:24:55 +0100 Subject: [PATCH 365/386] ESQL: Enable CATEGORIZE tests on non-snapshot builds (#117881) --- .../org/elasticsearch/xpack/esql/action/EsqlCapabilities.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index a93590d7a5bc2..646c4f8240c3e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -407,7 +407,7 @@ public enum Cap { /** * Supported the text categorization function "CATEGORIZE". */ - CATEGORIZE_V4(Build.current().isSnapshot()), + CATEGORIZE_V4, /** * QSTR function From cf9687f56de49bf5f07152b70b388d3f971aa9a5 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Tue, 3 Dec 2024 13:08:02 +0100 Subject: [PATCH 366/386] ESQL: Fix layout when aggregating with aliases (#117837) Forward-port of #117832 Only really relevant for bwc with 8.11/8.12.; port for consistency with 8.x --- .../planner/AbstractPhysicalOperationProviders.java | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java index 69e2d1c45aa3c..35aba7665ec87 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java @@ -120,10 +120,14 @@ public final PhysicalOperation groupingPhysicalOperation( * - before stats (keep x = a | stats by x) which requires the partial input to use a's channel * - after stats (stats by a | keep x = a) which causes the output layout to refer to the follow-up alias */ + // TODO: This is likely required only for pre-8.14 node compatibility; confirm and remove if possible. + // Since https://github.com/elastic/elasticsearch/pull/104958, it shouldn't be possible to have aliases in the aggregates + // which the groupings refer to. Except for `BY CATEGORIZE(field)`, which remains as alias in the grouping, all aliases + // should've become EVALs before or after the STATS. for (NamedExpression agg : aggregates) { if (agg instanceof Alias a) { if (a.child() instanceof Attribute attr) { - if (groupAttribute.id().equals(attr.id())) { + if (sourceGroupAttribute.id().equals(attr.id())) { groupAttributeLayout.nameIds().add(a.id()); // TODO: investigate whether a break could be used since it shouldn't be possible to have multiple // attributes pointing to the same attribute @@ -133,8 +137,8 @@ public final PhysicalOperation groupingPhysicalOperation( // is in the output form // if the group points to an alias declared in the aggregate, use the alias child as source else if (aggregatorMode.isOutputPartial()) { - if (groupAttribute.semanticEquals(a.toAttribute())) { - groupAttribute = attr; + if (sourceGroupAttribute.semanticEquals(a.toAttribute())) { + sourceGroupAttribute = attr; break; } } From 2a9a3a44dc8bcf71659df5893ef23df535967eea Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Wed, 4 Dec 2024 00:13:04 +1100 Subject: [PATCH 367/386] Add a not-master state for desired balance (#116904) The new state prevents a long running desired balance computation to set result after the node stands down as master. --- docs/changelog/116904.yaml | 5 ++ .../allocation/allocator/DesiredBalance.java | 9 ++- .../DesiredBalanceShardsAllocator.java | 71 ++++++++++++++----- ...nsportDeleteDesiredBalanceActionTests.java | 2 +- .../DesiredBalanceComputerTests.java | 51 +++++++++---- .../DesiredBalanceShardsAllocatorTests.java | 13 ++-- 6 files changed, 112 insertions(+), 39 deletions(-) create mode 100644 docs/changelog/116904.yaml diff --git a/docs/changelog/116904.yaml b/docs/changelog/116904.yaml new file mode 100644 index 0000000000000..46fa445f36154 --- /dev/null +++ b/docs/changelog/116904.yaml @@ -0,0 +1,5 @@ +pr: 116904 +summary: Add a not-master state for desired balance +area: Allocation +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java index 6ad44fdf3a9c0..406ca72868a40 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalance.java @@ -40,7 +40,14 @@ public DesiredBalance(long lastConvergedIndex, Map ass this(lastConvergedIndex, assignments, Map.of(), ComputationFinishReason.CONVERGED); } - public static final DesiredBalance INITIAL = new DesiredBalance(-1, Map.of()); + /** + * The placeholder value for {@link DesiredBalance} when the node stands down as master. + */ + public static final DesiredBalance NOT_MASTER = new DesiredBalance(-2, Map.of()); + /** + * The starting value for {@link DesiredBalance} when the node becomes the master. + */ + public static final DesiredBalance BECOME_MASTER_INITIAL = new DesiredBalance(-1, Map.of()); public ShardAssignment getAssignment(ShardId shardId) { return assignments.get(shardId); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java index 72261df658ca1..8408386b8da58 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.service.MasterService; import org.elasticsearch.cluster.service.MasterServiceTaskQueue; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.metrics.MeanMetric; import org.elasticsearch.common.settings.ClusterSettings; @@ -43,6 +44,7 @@ import java.util.Set; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; /** * A {@link ShardsAllocator} which asynchronously refreshes the desired balance held by the {@link DesiredBalanceComputer} and then takes @@ -62,7 +64,7 @@ public class DesiredBalanceShardsAllocator implements ShardsAllocator { private final AtomicLong indexGenerator = new AtomicLong(-1); private final ConcurrentLinkedQueue> pendingDesiredBalanceMoves = new ConcurrentLinkedQueue<>(); private final MasterServiceTaskQueue masterServiceTaskQueue; - private volatile DesiredBalance currentDesiredBalance = DesiredBalance.INITIAL; + private final AtomicReference currentDesiredBalanceRef = new AtomicReference<>(DesiredBalance.NOT_MASTER); private volatile boolean resetCurrentDesiredBalance = false; private final Set processedNodeShutdowns = new HashSet<>(); private final DesiredBalanceMetrics desiredBalanceMetrics; @@ -129,6 +131,12 @@ protected void processInput(DesiredBalanceInput desiredBalanceInput) { long index = desiredBalanceInput.index(); logger.debug("Starting desired balance computation for [{}]", index); + final DesiredBalance initialDesiredBalance = getInitialDesiredBalance(); + if (initialDesiredBalance == DesiredBalance.NOT_MASTER) { + logger.debug("Abort desired balance computation because node is no longer master"); + return; + } + recordTime( cumulativeComputationTime, // We set currentDesiredBalance back to INITIAL when the node stands down as master in onNoLongerMaster. @@ -137,7 +145,7 @@ protected void processInput(DesiredBalanceInput desiredBalanceInput) { // lead to unexpected behaviours for tests. See also https://github.com/elastic/elasticsearch/pull/116904 () -> setCurrentDesiredBalance( desiredBalanceComputer.compute( - getInitialDesiredBalance(), + initialDesiredBalance, desiredBalanceInput, pendingDesiredBalanceMoves, this::isFresh @@ -146,7 +154,17 @@ protected void processInput(DesiredBalanceInput desiredBalanceInput) { ); computationsExecuted.inc(); - if (currentDesiredBalance.finishReason() == DesiredBalance.ComputationFinishReason.STOP_EARLY) { + final DesiredBalance currentDesiredBalance = currentDesiredBalanceRef.get(); + if (currentDesiredBalance == DesiredBalance.NOT_MASTER || currentDesiredBalance == DesiredBalance.BECOME_MASTER_INITIAL) { + logger.debug( + () -> Strings.format( + "Desired balance computation for [%s] is discarded since master has concurrently changed. " + + "Current desiredBalance=[%s]", + index, + currentDesiredBalance + ) + ); + } else if (currentDesiredBalance.finishReason() == DesiredBalance.ComputationFinishReason.STOP_EARLY) { logger.debug( "Desired balance computation for [{}] terminated early with partial result, scheduling reconciliation", index @@ -164,10 +182,13 @@ protected void processInput(DesiredBalanceInput desiredBalanceInput) { } private DesiredBalance getInitialDesiredBalance() { + final DesiredBalance currentDesiredBalance = currentDesiredBalanceRef.get(); if (resetCurrentDesiredBalance) { logger.info("Resetting current desired balance"); resetCurrentDesiredBalance = false; - return new DesiredBalance(currentDesiredBalance.lastConvergedIndex(), Map.of()); + return currentDesiredBalance == DesiredBalance.NOT_MASTER + ? DesiredBalance.NOT_MASTER + : new DesiredBalance(currentDesiredBalance.lastConvergedIndex(), Map.of()); } else { return currentDesiredBalance; } @@ -215,6 +236,10 @@ public void allocate(RoutingAllocation allocation, ActionListener listener var index = indexGenerator.incrementAndGet(); logger.debug("Executing allocate for [{}]", index); queue.add(index, listener); + // This can only run on master, so unset not-master if exists + if (currentDesiredBalanceRef.compareAndSet(DesiredBalance.NOT_MASTER, DesiredBalance.BECOME_MASTER_INITIAL)) { + logger.debug("initialized desired balance for becoming master"); + } desiredBalanceComputation.onNewInput(DesiredBalanceInput.create(index, allocation)); if (allocation.routingTable().indicesRouting().isEmpty()) { @@ -224,7 +249,7 @@ public void allocate(RoutingAllocation allocation, ActionListener listener // Starts reconciliation towards desired balance that might have not been updated with a recent calculation yet. // This is fine as balance should have incremental rather than radical changes. // This should speed up achieving the desired balance in cases current state is still different from it (due to THROTTLING). - reconcile(currentDesiredBalance, allocation); + reconcile(currentDesiredBalanceRef.get(), allocation); } private void processNodeShutdowns(ClusterState clusterState) { @@ -267,16 +292,26 @@ private static List getMoveCommands(AllocationCommands co } private void setCurrentDesiredBalance(DesiredBalance newDesiredBalance) { - if (logger.isTraceEnabled()) { - var diff = DesiredBalance.hasChanges(currentDesiredBalance, newDesiredBalance) - ? "Diff: " + DesiredBalance.humanReadableDiff(currentDesiredBalance, newDesiredBalance) - : "No changes"; - logger.trace("Desired balance updated: {}. {}", newDesiredBalance, diff); - } else { - logger.debug("Desired balance updated for [{}]", newDesiredBalance.lastConvergedIndex()); + while (true) { + final var oldDesiredBalance = currentDesiredBalanceRef.get(); + if (oldDesiredBalance == DesiredBalance.NOT_MASTER) { + logger.debug("discard desired balance for [{}] since node is no longer master", newDesiredBalance.lastConvergedIndex()); + return; + } + + if (currentDesiredBalanceRef.compareAndSet(oldDesiredBalance, newDesiredBalance)) { + if (logger.isTraceEnabled()) { + var diff = DesiredBalance.hasChanges(oldDesiredBalance, newDesiredBalance) + ? "Diff: " + DesiredBalance.humanReadableDiff(oldDesiredBalance, newDesiredBalance) + : "No changes"; + logger.trace("Desired balance updated: {}. {}", newDesiredBalance, diff); + } else { + logger.debug("Desired balance updated for [{}]", newDesiredBalance.lastConvergedIndex()); + } + computedShardMovements.inc(DesiredBalance.shardMovements(oldDesiredBalance, newDesiredBalance)); + break; + } } - computedShardMovements.inc(DesiredBalance.shardMovements(currentDesiredBalance, newDesiredBalance)); - currentDesiredBalance = newDesiredBalance; } protected void submitReconcileTask(DesiredBalance desiredBalance) { @@ -316,7 +351,7 @@ public void execute(RoutingAllocation allocation) { } public DesiredBalance getDesiredBalance() { - return currentDesiredBalance; + return currentDesiredBalanceRef.get(); } public void resetDesiredBalance() { @@ -325,7 +360,7 @@ public void resetDesiredBalance() { public DesiredBalanceStats getStats() { return new DesiredBalanceStats( - Math.max(currentDesiredBalance.lastConvergedIndex(), 0L), + Math.max(currentDesiredBalanceRef.get().lastConvergedIndex(), 0L), desiredBalanceComputation.isActive(), computationsSubmitted.count(), computationsExecuted.count(), @@ -342,7 +377,7 @@ public DesiredBalanceStats getStats() { private void onNoLongerMaster() { if (indexGenerator.getAndSet(-1) != -1) { - currentDesiredBalance = DesiredBalance.INITIAL; + currentDesiredBalanceRef.set(DesiredBalance.NOT_MASTER); queue.completeAllAsNotMaster(); pendingDesiredBalanceMoves.clear(); desiredBalanceReconciler.clear(); @@ -412,7 +447,7 @@ private static void discardSupersededTasks( // only for tests - in production, this happens after reconciliation protected final void completeToLastConvergedIndex() { - queue.complete(currentDesiredBalance.lastConvergedIndex()); + queue.complete(currentDesiredBalanceRef.get().lastConvergedIndex()); } private void recordTime(CounterMetric metric, Runnable action) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportDeleteDesiredBalanceActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportDeleteDesiredBalanceActionTests.java index 3dafc8f000f3f..385ac600666db 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportDeleteDesiredBalanceActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportDeleteDesiredBalanceActionTests.java @@ -136,7 +136,7 @@ public DesiredBalance compute( safeAwait((ActionListener listener) -> allocationService.reroute(clusterState, "inital-allocate", listener)); var balanceBeforeReset = allocator.getDesiredBalance(); - assertThat(balanceBeforeReset.lastConvergedIndex(), greaterThan(DesiredBalance.INITIAL.lastConvergedIndex())); + assertThat(balanceBeforeReset.lastConvergedIndex(), greaterThan(DesiredBalance.BECOME_MASTER_INITIAL.lastConvergedIndex())); assertThat(balanceBeforeReset.assignments(), not(anEmptyMap())); var listener = new PlainActionFuture(); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java index 7b77947792bd4..679d04224aefe 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java @@ -96,7 +96,12 @@ public void testComputeBalance() { var clusterState = createInitialClusterState(3); var index = clusterState.metadata().index(TEST_INDEX).getIndex(); - var desiredBalance = desiredBalanceComputer.compute(DesiredBalance.INITIAL, createInput(clusterState), queue(), input -> true); + var desiredBalance = desiredBalanceComputer.compute( + DesiredBalance.BECOME_MASTER_INITIAL, + createInput(clusterState), + queue(), + input -> true + ); assertDesiredAssignments( desiredBalance, @@ -115,7 +120,7 @@ public void testStopsComputingWhenStale() { var index = clusterState.metadata().index(TEST_INDEX).getIndex(); // if the isFresh flag is false then we only do one iteration, allocating the primaries but not the replicas - var desiredBalance0 = DesiredBalance.INITIAL; + var desiredBalance0 = DesiredBalance.BECOME_MASTER_INITIAL; var desiredBalance1 = desiredBalanceComputer.compute(desiredBalance0, createInput(clusterState), queue(), input -> false); assertDesiredAssignments( desiredBalance1, @@ -147,7 +152,7 @@ public void testIgnoresOutOfScopePrimaries() { var primaryShard = mutateAllocationStatus(clusterState.routingTable().index(TEST_INDEX).shard(0).primaryShard()); var desiredBalance = desiredBalanceComputer.compute( - DesiredBalance.INITIAL, + DesiredBalance.BECOME_MASTER_INITIAL, createInput(clusterState, primaryShard), queue(), input -> true @@ -184,7 +189,7 @@ public void testIgnoresOutOfScopeReplicas() { var replicaShard = mutateAllocationStatus(originalReplicaShard); var desiredBalance = desiredBalanceComputer.compute( - DesiredBalance.INITIAL, + DesiredBalance.BECOME_MASTER_INITIAL, createInput(clusterState, replicaShard), queue(), input -> true @@ -241,7 +246,7 @@ public void testAssignShardsToTheirPreviousLocationIfAvailable() { : new ShardRouting[] { clusterState.routingTable().index(TEST_INDEX).shard(0).primaryShard() }; var desiredBalance = desiredBalanceComputer.compute( - DesiredBalance.INITIAL, + DesiredBalance.BECOME_MASTER_INITIAL, createInput(clusterState, ignored), queue(), input -> true @@ -284,7 +289,12 @@ public void testRespectsAssignmentOfUnknownPrimaries() { } clusterState = ClusterState.builder(clusterState).routingTable(RoutingTable.of(routingNodes)).build(); - var desiredBalance = desiredBalanceComputer.compute(DesiredBalance.INITIAL, createInput(clusterState), queue(), input -> true); + var desiredBalance = desiredBalanceComputer.compute( + DesiredBalance.BECOME_MASTER_INITIAL, + createInput(clusterState), + queue(), + input -> true + ); assertDesiredAssignments( desiredBalance, @@ -331,7 +341,12 @@ public void testRespectsAssignmentOfUnknownReplicas() { } clusterState = ClusterState.builder(clusterState).routingTable(RoutingTable.of(routingNodes)).build(); - var desiredBalance = desiredBalanceComputer.compute(DesiredBalance.INITIAL, createInput(clusterState), queue(), input -> true); + var desiredBalance = desiredBalanceComputer.compute( + DesiredBalance.BECOME_MASTER_INITIAL, + createInput(clusterState), + queue(), + input -> true + ); assertDesiredAssignments( desiredBalance, @@ -367,7 +382,7 @@ public void testRespectsAssignmentByGatewayAllocators() { } var desiredBalance = desiredBalanceComputer.compute( - DesiredBalance.INITIAL, + DesiredBalance.BECOME_MASTER_INITIAL, DesiredBalanceInput.create(randomNonNegativeLong(), routingAllocation), queue(), input -> true @@ -427,7 +442,12 @@ public ShardAllocationDecision decideShardAllocation(ShardRouting shard, Routing } clusterState = ClusterState.builder(clusterState).routingTable(RoutingTable.of(desiredRoutingNodes)).build(); - var desiredBalance1 = desiredBalanceComputer.compute(DesiredBalance.INITIAL, createInput(clusterState), queue(), input -> true); + var desiredBalance1 = desiredBalanceComputer.compute( + DesiredBalance.BECOME_MASTER_INITIAL, + createInput(clusterState), + queue(), + input -> true + ); assertDesiredAssignments( desiredBalance1, Map.of( @@ -513,7 +533,12 @@ public void testNoDataNodes() { var desiredBalanceComputer = createDesiredBalanceComputer(); var clusterState = createInitialClusterState(0); - var desiredBalance = desiredBalanceComputer.compute(DesiredBalance.INITIAL, createInput(clusterState), queue(), input -> true); + var desiredBalance = desiredBalanceComputer.compute( + DesiredBalance.BECOME_MASTER_INITIAL, + createInput(clusterState), + queue(), + input -> true + ); assertDesiredAssignments(desiredBalance, Map.of()); } @@ -532,7 +557,7 @@ public void testAppliesMoveCommands() { clusterState = ClusterState.builder(clusterState).routingTable(RoutingTable.of(routingNodes)).build(); var desiredBalance = desiredBalanceComputer.compute( - DesiredBalance.INITIAL, + DesiredBalance.BECOME_MASTER_INITIAL, createInput(clusterState), queue( new MoveAllocationCommand(index.getName(), 0, "node-1", "node-2"), @@ -662,7 +687,7 @@ public void testDesiredBalanceShouldConvergeInABigCluster() { var input = new DesiredBalanceInput(randomInt(), routingAllocationWithDecidersOf(clusterState, clusterInfo, settings), List.of()); var desiredBalance = createDesiredBalanceComputer(new BalancedShardsAllocator(settings)).compute( - DesiredBalance.INITIAL, + DesiredBalance.BECOME_MASTER_INITIAL, input, queue(), ignored -> iteration.incrementAndGet() < 1000 @@ -1243,7 +1268,7 @@ public ShardAllocationDecision decideShardAllocation(ShardRouting shard, Routing assertThatLogger(() -> { var iteration = new AtomicInteger(0); desiredBalanceComputer.compute( - DesiredBalance.INITIAL, + DesiredBalance.BECOME_MASTER_INITIAL, createInput(createInitialClusterState(3)), queue(), input -> iteration.incrementAndGet() < iterations diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java index 9d33b697e31ca..9caf89d4d7613 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java @@ -698,6 +698,7 @@ public void onFailure(Exception e) { try { assertTrue(listenersCalled.await(10, TimeUnit.SECONDS)); + assertThat(desiredBalanceShardsAllocator.getDesiredBalance(), sameInstance(DesiredBalance.NOT_MASTER)); } finally { clusterService.close(); terminate(threadPool); @@ -753,7 +754,7 @@ public DesiredBalance compute( try { // initial computation is based on DesiredBalance.INITIAL rerouteAndWait(service, clusterState, "initial-allocation"); - assertThat(desiredBalanceComputer.lastComputationInput.get(), equalTo(DesiredBalance.INITIAL)); + assertThat(desiredBalanceComputer.lastComputationInput.get(), equalTo(DesiredBalance.BECOME_MASTER_INITIAL)); // any next computation is based on current desired balance var current = desiredBalanceShardsAllocator.getDesiredBalance(); @@ -806,7 +807,7 @@ public void testResetDesiredBalanceOnNoLongerMaster() { try { rerouteAndWait(service, clusterState, "initial-allocation"); - assertThat(desiredBalanceShardsAllocator.getDesiredBalance(), not(equalTo(DesiredBalance.INITIAL))); + assertThat(desiredBalanceShardsAllocator.getDesiredBalance(), not(equalTo(DesiredBalance.BECOME_MASTER_INITIAL))); clusterState = ClusterState.builder(clusterState) .nodes(DiscoveryNodes.builder(clusterState.getNodes()).localNodeId(node1.getId()).masterNodeId(node2.getId())) @@ -816,7 +817,7 @@ public void testResetDesiredBalanceOnNoLongerMaster() { assertThat( "desired balance should be resetted on no longer master", desiredBalanceShardsAllocator.getDesiredBalance(), - equalTo(DesiredBalance.INITIAL) + equalTo(DesiredBalance.NOT_MASTER) ); } finally { clusterService.close(); @@ -862,7 +863,7 @@ public void resetDesiredBalance() { try { rerouteAndWait(service, clusterState, "initial-allocation"); - assertThat(desiredBalanceAllocator.getDesiredBalance(), not(equalTo(DesiredBalance.INITIAL))); + assertThat(desiredBalanceAllocator.getDesiredBalance(), not(equalTo(DesiredBalance.BECOME_MASTER_INITIAL))); final var shutdownType = randomFrom(Type.SIGTERM, Type.REMOVE, Type.REPLACE); final var singleShutdownMetadataBuilder = SingleNodeShutdownMetadata.builder() @@ -938,7 +939,7 @@ public DesiredBalance compute( Queue> pendingDesiredBalanceMoves, Predicate isFresh ) { - assertThat(previousDesiredBalance, sameInstance(DesiredBalance.INITIAL)); + assertThat(previousDesiredBalance, sameInstance(DesiredBalance.BECOME_MASTER_INITIAL)); return new DesiredBalance(desiredBalanceInput.index(), Map.of()); } }, @@ -967,7 +968,7 @@ protected void submitReconcileTask(DesiredBalance desiredBalance) { lastListener.onResponse(null); } }; - assertThat(desiredBalanceShardsAllocator.getDesiredBalance(), sameInstance(DesiredBalance.INITIAL)); + assertThat(desiredBalanceShardsAllocator.getDesiredBalance(), sameInstance(DesiredBalance.NOT_MASTER)); try { final PlainActionFuture future = new PlainActionFuture<>(); desiredBalanceShardsAllocator.allocate( From cab6dc5d56a7fcdbbd2fe355bc6d1277094f1400 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 4 Dec 2024 00:26:23 +1100 Subject: [PATCH 368/386] Mute org.elasticsearch.validation.DotPrefixClientYamlTestSuiteIT org.elasticsearch.validation.DotPrefixClientYamlTestSuiteIT #117893 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 57db22feba059..cf39eae210f88 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -239,6 +239,8 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=esql/60_usage/Basic ESQL usage output (telemetry) non-snapshot version} issue: https://github.com/elastic/elasticsearch/issues/117862 +- class: org.elasticsearch.validation.DotPrefixClientYamlTestSuiteIT + issue: https://github.com/elastic/elasticsearch/issues/117893 # Examples: # From cca7051e73ff089b26f3d1825e4b4e15b81e04aa Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Tue, 3 Dec 2024 14:28:07 +0100 Subject: [PATCH 369/386] ESQL: Simplify CombineProjections (#117882) Make combineUpperGroupingsAndLowerProjections a bit simpler. Also slightly improve a test and add comments to provide more context. --- .../rules/logical/CombineProjections.java | 40 ++++++++++--------- .../optimizer/LogicalPlanOptimizerTests.java | 2 +- 2 files changed, 22 insertions(+), 20 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineProjections.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineProjections.java index be7096538fb9a..957db4a7273e5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineProjections.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineProjections.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import java.util.ArrayList; +import java.util.LinkedHashSet; import java.util.List; public final class CombineProjections extends OptimizerRules.OptimizerRule { @@ -144,30 +145,31 @@ private static List combineUpperGroupingsAndLowerProjections( List upperGroupings, List lowerProjections ) { + assert upperGroupings.size() <= 1 + || upperGroupings.stream().anyMatch(group -> group.anyMatch(expr -> expr instanceof Categorize)) == false + : "CombineProjections only tested with a single CATEGORIZE with no additional groups"; // Collect the alias map for resolving the source (f1 = 1, f2 = f1, etc..) - AttributeMap aliases = new AttributeMap<>(); + AttributeMap aliases = new AttributeMap<>(); for (NamedExpression ne : lowerProjections) { - // record the alias - aliases.put(ne.toAttribute(), Alias.unwrap(ne)); + // Record the aliases. + // Projections are just aliases for attributes, so casting is safe. + aliases.put(ne.toAttribute(), (Attribute) Alias.unwrap(ne)); } - // Replace any matching attribute directly with the aliased attribute from the projection. - AttributeSet seen = new AttributeSet(); - List replaced = new ArrayList<>(); + + // Propagate any renames from the lower projection into the upper groupings. + // This can lead to duplicates: e.g. + // | EVAL x = y | STATS ... BY x, y + // All substitutions happen before; groupings must be attributes at this point except for CATEGORIZE which will be an alias like + // `c = CATEGORIZE(attribute)`. + // Therefore, it is correct to deduplicate based on simple equality (based on names) instead of name ids (Set vs. AttributeSet). + // TODO: The deduplication based on simple equality will be insufficient in case of multiple CATEGORIZEs, e.g. for + // `| EVAL x = y | STATS ... BY CATEGORIZE(x), CATEGORIZE(y)`. That will require semantic equality instead. + LinkedHashSet resolvedGroupings = new LinkedHashSet<>(); for (NamedExpression ne : upperGroupings) { - // Duplicated attributes are ignored. - if (ne instanceof Attribute attribute) { - var newExpression = aliases.resolve(attribute, attribute); - if (newExpression instanceof Attribute newAttribute && seen.add(newAttribute) == false) { - // Already seen, skip - continue; - } - replaced.add(newExpression); - } else { - // For grouping functions, this will replace nested properties too - replaced.add(ne.transformUp(Attribute.class, a -> aliases.resolve(a, a))); - } + NamedExpression transformed = (NamedExpression) ne.transformUp(Attribute.class, a -> aliases.resolve(a, a)); + resolvedGroupings.add(transformed); } - return replaced; + return new ArrayList<>(resolvedGroupings); } /** diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 57d0c7432f97b..a74efca3b3d99 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -1217,7 +1217,7 @@ public void testCombineProjectionWithCategorizeGrouping() { var plan = plan(""" from test | eval k = first_name, k1 = k - | stats s = sum(salary) by cat = CATEGORIZE(k) + | stats s = sum(salary) by cat = CATEGORIZE(k1) | keep s, cat """); From 03a71d2deee7bb2788fc40b8d21d90cc75b787e4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Iv=C3=A1n=20Cea=20Fontenla?= Date: Tue, 3 Dec 2024 14:47:40 +0100 Subject: [PATCH 370/386] ESQL: Make Categorize usable in aggs when identical to a grouping (#117835) Cases like `STATS MV_APPEND(cat, CATEGORIZE(x)) BY cat=CATEGORIZE(x)` should work, as they're moved to an EVAL by a rule. Also, these cases were discarded, as they fail because of other verifications (Which also fail for BUCKET): ``` STATS x = category BY category=CATEGORIZE(message) STATS x = CATEGORIZE(message) BY CATEGORIZE(message) STATS x = CATEGORIZE(message) BY category=CATEGORIZE(message) --- .../src/main/resources/bucket.csv-spec | 21 +++ .../src/main/resources/categorize.csv-spec | 121 ++++++++++++------ .../src/main/resources/docs.csv-spec | 2 +- .../xpack/esql/action/EsqlCapabilities.java | 2 +- .../xpack/esql/analysis/Verifier.java | 39 +++--- ...ReplaceAggregateAggExpressionWithEval.java | 16 +++ ...laceAggregateNestedExpressionWithEval.java | 6 +- .../xpack/esql/analysis/VerifierTests.java | 34 +++-- .../optimizer/LogicalPlanOptimizerTests.java | 4 +- 9 files changed, 167 insertions(+), 78 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec index 7bbf011176693..b29c489910f65 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec @@ -503,6 +503,27 @@ FROM employees //end::reuseGroupingFunctionWithExpression-result[] ; +reuseGroupingFunctionImplicitAliasWithExpression#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +FROM employees +| STATS s1 = `BUCKET(salary / 100 + 99, 50.)` + 1, s2 = BUCKET(salary / 1000 + 999, 50.) + 2 BY BUCKET(salary / 100 + 99, 50.), b2 = BUCKET(salary / 1000 + 999, 50.) +| SORT `BUCKET(salary / 100 + 99, 50.)`, b2 +| KEEP s1, `BUCKET(salary / 100 + 99, 50.)`, s2, b2 +; + + s1:double | BUCKET(salary / 100 + 99, 50.):double | s2:double | b2:double +351.0 |350.0 |1002.0 |1000.0 +401.0 |400.0 |1002.0 |1000.0 +451.0 |450.0 |1002.0 |1000.0 +501.0 |500.0 |1002.0 |1000.0 +551.0 |550.0 |1002.0 |1000.0 +601.0 |600.0 |1002.0 |1000.0 +601.0 |600.0 |1052.0 |1050.0 +651.0 |650.0 |1052.0 |1050.0 +701.0 |700.0 |1052.0 |1050.0 +751.0 |750.0 |1052.0 |1050.0 +801.0 |800.0 |1052.0 |1050.0 +; + reuseGroupingFunctionWithinAggs#[skip:-8.13.99, reason:BUCKET renamed in 8.14] FROM employees | STATS sum = 1 + MAX(1 + BUCKET(salary, 1000.)) BY BUCKET(salary, 1000.) + 1 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec index e45b10d1aa122..804c1c56a1eb5 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec @@ -1,5 +1,5 @@ standard aggs -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | STATS count=COUNT(), @@ -17,7 +17,7 @@ count:long | sum:long | avg:double | count_distinct:long | category:keyw ; values aggs -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | STATS values=MV_SORT(VALUES(message)), @@ -33,7 +33,7 @@ values:keyword | top ; mv -required_capability: categorize_v4 +required_capability: categorize_v5 FROM mv_sample_data | STATS COUNT(), SUM(event_duration) BY category=CATEGORIZE(message) @@ -48,7 +48,7 @@ COUNT():long | SUM(event_duration):long | category:keyword ; row mv -required_capability: categorize_v4 +required_capability: categorize_v5 ROW message = ["connected to a", "connected to b", "disconnected"], str = ["a", "b", "c"] | STATS COUNT(), VALUES(str) BY category=CATEGORIZE(message) @@ -61,7 +61,7 @@ COUNT():long | VALUES(str):keyword | category:keyword ; skips stopwords -required_capability: categorize_v4 +required_capability: categorize_v5 ROW message = ["Mon Tue connected to a", "Jul Aug connected to b September ", "UTC connected GMT to c UTC"] | STATS COUNT() BY category=CATEGORIZE(message) @@ -73,7 +73,7 @@ COUNT():long | category:keyword ; with multiple indices -required_capability: categorize_v4 +required_capability: categorize_v5 required_capability: union_types FROM sample_data* @@ -88,7 +88,7 @@ COUNT():long | category:keyword ; mv with many values -required_capability: categorize_v4 +required_capability: categorize_v5 FROM employees | STATS COUNT() BY category=CATEGORIZE(job_positions) @@ -105,7 +105,7 @@ COUNT():long | category:keyword ; mv with many values and SUM -required_capability: categorize_v4 +required_capability: categorize_v5 FROM employees | STATS SUM(languages) BY category=CATEGORIZE(job_positions) @@ -120,7 +120,7 @@ SUM(languages):long | category:keyword ; mv with many values and nulls and SUM -required_capability: categorize_v4 +required_capability: categorize_v5 FROM employees | STATS SUM(languages) BY category=CATEGORIZE(job_positions) @@ -134,7 +134,7 @@ SUM(languages):long | category:keyword ; mv via eval -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | EVAL message = MV_APPEND(message, "Banana") @@ -150,7 +150,7 @@ COUNT():long | category:keyword ; mv via eval const -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | EVAL message = ["Banana", "Bread"] @@ -164,7 +164,7 @@ COUNT():long | category:keyword ; mv via eval const without aliases -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | EVAL message = ["Banana", "Bread"] @@ -178,7 +178,7 @@ COUNT():long | CATEGORIZE(message):keyword ; mv const in parameter -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | STATS COUNT() BY c = CATEGORIZE(["Banana", "Bread"]) @@ -191,7 +191,7 @@ COUNT():long | c:keyword ; agg alias shadowing -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | STATS c = COUNT() BY c = CATEGORIZE(["Banana", "Bread"]) @@ -206,7 +206,7 @@ c:keyword ; chained aggregations using categorize -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(message) @@ -221,7 +221,7 @@ COUNT():long | category:keyword ; stats without aggs -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | STATS BY category=CATEGORIZE(message) @@ -235,7 +235,7 @@ category:keyword ; text field -required_capability: categorize_v4 +required_capability: categorize_v5 FROM hosts | STATS COUNT() BY category=CATEGORIZE(host_group) @@ -253,7 +253,7 @@ COUNT():long | category:keyword ; on TO_UPPER -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(TO_UPPER(message)) @@ -267,7 +267,7 @@ COUNT():long | category:keyword ; on CONCAT -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(CONCAT(message, " banana")) @@ -281,7 +281,7 @@ COUNT():long | category:keyword ; on CONCAT with unicode -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(CONCAT(message, " 👍🏽😊")) @@ -295,7 +295,7 @@ COUNT():long | category:keyword ; on REVERSE(CONCAT()) -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(REVERSE(CONCAT(message, " 👍🏽😊"))) @@ -309,7 +309,7 @@ COUNT():long | category:keyword ; and then TO_LOWER -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(message) @@ -324,7 +324,7 @@ COUNT():long | category:keyword ; on const empty string -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | STATS COUNT() BY category=CATEGORIZE("") @@ -336,7 +336,7 @@ COUNT():long | category:keyword ; on const empty string from eval -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | EVAL x = "" @@ -349,7 +349,7 @@ COUNT():long | category:keyword ; on null -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | EVAL x = null @@ -362,7 +362,7 @@ COUNT():long | SUM(event_duration):long | category:keyword ; on null string -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | EVAL x = null::string @@ -375,7 +375,7 @@ COUNT():long | category:keyword ; filtering out all data -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | WHERE @timestamp < "2023-10-23T00:00:00Z" @@ -387,7 +387,7 @@ COUNT():long | category:keyword ; filtering out all data with constant -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | STATS COUNT() BY category=CATEGORIZE(message) @@ -398,7 +398,7 @@ COUNT():long | category:keyword ; drop output columns -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | STATS count=COUNT() BY category=CATEGORIZE(message) @@ -413,7 +413,7 @@ x:integer ; category value processing -required_capability: categorize_v4 +required_capability: categorize_v5 ROW message = ["connected to a", "connected to b", "disconnected"] | STATS COUNT() BY category=CATEGORIZE(message) @@ -427,7 +427,7 @@ COUNT():long | category:keyword ; row aliases -required_capability: categorize_v4 +required_capability: categorize_v5 ROW message = "connected to xyz" | EVAL x = message @@ -441,7 +441,7 @@ COUNT():long | category:keyword | y:keyword ; from aliases -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | EVAL x = message @@ -457,7 +457,7 @@ COUNT():long | category:keyword | y:keyword ; row aliases with keep -required_capability: categorize_v4 +required_capability: categorize_v5 ROW message = "connected to xyz" | EVAL x = message @@ -473,7 +473,7 @@ COUNT():long | y:keyword ; from aliases with keep -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | EVAL x = message @@ -491,7 +491,7 @@ COUNT():long | y:keyword ; row rename -required_capability: categorize_v4 +required_capability: categorize_v5 ROW message = "connected to xyz" | RENAME message as x @@ -505,7 +505,7 @@ COUNT():long | y:keyword ; from rename -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | RENAME message as x @@ -521,7 +521,7 @@ COUNT():long | y:keyword ; row drop -required_capability: categorize_v4 +required_capability: categorize_v5 ROW message = "connected to a" | STATS c = COUNT() BY category=CATEGORIZE(message) @@ -534,7 +534,7 @@ c:long ; from drop -required_capability: categorize_v4 +required_capability: categorize_v5 FROM sample_data | STATS c = COUNT() BY category=CATEGORIZE(message) @@ -547,3 +547,48 @@ c:long 3 3 ; + +categorize in aggs inside function +required_capability: categorize_v5 + +FROM sample_data + | STATS COUNT(), x = MV_APPEND(category, category) BY category=CATEGORIZE(message) + | SORT x + | KEEP `COUNT()`, x +; + +COUNT():long | x:keyword + 3 | [.*?Connected.+?to.*?,.*?Connected.+?to.*?] + 3 | [.*?Connection.+?error.*?,.*?Connection.+?error.*?] + 1 | [.*?Disconnected.*?,.*?Disconnected.*?] +; + +categorize in aggs same as grouping inside function +required_capability: categorize_v5 + +FROM sample_data + | STATS COUNT(), x = MV_APPEND(CATEGORIZE(message), `CATEGORIZE(message)`) BY CATEGORIZE(message) + | SORT x + | KEEP `COUNT()`, x +; + +COUNT():long | x:keyword + 3 | [.*?Connected.+?to.*?,.*?Connected.+?to.*?] + 3 | [.*?Connection.+?error.*?,.*?Connection.+?error.*?] + 1 | [.*?Disconnected.*?,.*?Disconnected.*?] +; + +categorize in aggs same as grouping inside function with explicit alias +required_capability: categorize_v5 + +FROM sample_data + | STATS COUNT(), x = MV_APPEND(CATEGORIZE(message), category) BY category=CATEGORIZE(message) + | SORT x + | KEEP `COUNT()`, x +; + +COUNT():long | x:keyword + 3 | [.*?Connected.+?to.*?,.*?Connected.+?to.*?] + 3 | [.*?Connection.+?error.*?,.*?Connection.+?error.*?] + 1 | [.*?Disconnected.*?,.*?Disconnected.*?] +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec index 24baf1263d06a..aa89c775da4cf 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec @@ -678,7 +678,7 @@ Bangalore | 9 | 72 ; docsCategorize -required_capability: categorize_v4 +required_capability: categorize_v5 // tag::docsCategorize[] FROM sample_data | STATS count=COUNT() BY category=CATEGORIZE(message) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 646c4f8240c3e..b5d6dd8584e8c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -407,7 +407,7 @@ public enum Cap { /** * Supported the text categorization function "CATEGORIZE". */ - CATEGORIZE_V4, + CATEGORIZE_V5, /** * QSTR function diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index 5f8c011cff53a..49d8a5ee8caad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; -import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.function.Function; @@ -63,12 +62,10 @@ import java.util.ArrayList; import java.util.BitSet; import java.util.Collection; -import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; -import java.util.Map; import java.util.Set; import java.util.function.BiConsumer; import java.util.function.Consumer; @@ -364,35 +361,35 @@ private static void checkCategorizeGrouping(Aggregate agg, Set failures ); }); - // Forbid CATEGORIZE being used in the aggregations - agg.aggregates().forEach(a -> { - a.forEachDown( - Categorize.class, - categorize -> failures.add( - fail(categorize, "cannot use CATEGORIZE grouping function [{}] within the aggregations", categorize.sourceText()) + // Forbid CATEGORIZE being used in the aggregations, unless it appears as a grouping + agg.aggregates() + .forEach( + a -> a.forEachDown( + AggregateFunction.class, + aggregateFunction -> aggregateFunction.forEachDown( + Categorize.class, + categorize -> failures.add( + fail(categorize, "cannot use CATEGORIZE grouping function [{}] within an aggregation", categorize.sourceText()) + ) + ) ) ); - }); - // Forbid CATEGORIZE being referenced in the aggregation functions - Map categorizeByAliasId = new HashMap<>(); + // Forbid CATEGORIZE being referenced as a child of an aggregation function + AttributeMap categorizeByAttribute = new AttributeMap<>(); agg.groupings().forEach(g -> { g.forEachDown(Alias.class, alias -> { if (alias.child() instanceof Categorize categorize) { - categorizeByAliasId.put(alias.id(), categorize); + categorizeByAttribute.put(alias.toAttribute(), categorize); } }); }); agg.aggregates() .forEach(a -> a.forEachDown(AggregateFunction.class, aggregate -> aggregate.forEachDown(Attribute.class, attribute -> { - var categorize = categorizeByAliasId.get(attribute.id()); + var categorize = categorizeByAttribute.get(attribute); if (categorize != null) { failures.add( - fail( - attribute, - "cannot reference CATEGORIZE grouping function [{}] within the aggregations", - attribute.sourceText() - ) + fail(attribute, "cannot reference CATEGORIZE grouping function [{}] within an aggregation", attribute.sourceText()) ); } }))); @@ -449,7 +446,7 @@ private static void checkInvalidNamedExpressionUsage( // check the bucketing function against the group else if (c instanceof GroupingFunction gf) { if (Expressions.anyMatch(groups, ex -> ex instanceof Alias a && a.child().semanticEquals(gf)) == false) { - failures.add(fail(gf, "can only use grouping function [{}] part of the BY clause", gf.sourceText())); + failures.add(fail(gf, "can only use grouping function [{}] as part of the BY clause", gf.sourceText())); } } }); @@ -466,7 +463,7 @@ else if (c instanceof GroupingFunction gf) { // optimizer will later unroll expressions with aggs and non-aggs with a grouping function into an EVAL, but that will no longer // be verified (by check above in checkAggregate()), so do it explicitly here if (Expressions.anyMatch(groups, ex -> ex instanceof Alias a && a.child().semanticEquals(gf)) == false) { - failures.add(fail(gf, "can only use grouping function [{}] part of the BY clause", gf.sourceText())); + failures.add(fail(gf, "can only use grouping function [{}] as part of the BY clause", gf.sourceText())); } else if (level == 0) { addFailureOnGroupingUsedNakedInAggs(failures, gf, "function"); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceAggregateAggExpressionWithEval.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceAggregateAggExpressionWithEval.java index 2361b46b2be6f..c36d4caf7f599 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceAggregateAggExpressionWithEval.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceAggregateAggExpressionWithEval.java @@ -9,18 +9,21 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.xpack.esql.core.expression.Alias; +import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.AttributeMap; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.esql.expression.function.grouping.Categorize; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Project; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -51,6 +54,16 @@ protected LogicalPlan rule(Aggregate aggregate) { AttributeMap aliases = new AttributeMap<>(); aggregate.forEachExpressionUp(Alias.class, a -> aliases.put(a.toAttribute(), a.child())); + // Build Categorize grouping functions map. + // Functions like BUCKET() shouldn't reach this point, + // as they are moved to an early EVAL by ReplaceAggregateNestedExpressionWithEval + Map groupingAttributes = new HashMap<>(); + aggregate.forEachExpressionUp(Alias.class, a -> { + if (a.child() instanceof Categorize groupingFunction) { + groupingAttributes.put(groupingFunction, a.toAttribute()); + } + }); + // break down each aggregate into AggregateFunction and/or grouping key // preserve the projection at the end List aggs = aggregate.aggregates(); @@ -109,6 +122,9 @@ protected LogicalPlan rule(Aggregate aggregate) { return alias.toAttribute(); }); + // replace grouping functions with their references + aggExpression = aggExpression.transformUp(Categorize.class, groupingAttributes::get); + Alias alias = as.replaceChild(aggExpression); newEvals.add(alias); newProjections.add(alias.toAttribute()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceAggregateNestedExpressionWithEval.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceAggregateNestedExpressionWithEval.java index 985e68252a1f9..4dbc43454a023 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceAggregateNestedExpressionWithEval.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceAggregateNestedExpressionWithEval.java @@ -51,6 +51,7 @@ protected LogicalPlan rule(Aggregate aggregate) { // Exception: Categorize is internal to the aggregation and remains in the groupings. We move its child expression into an eval. if (g instanceof Alias as) { if (as.child() instanceof Categorize cat) { + // For Categorize grouping function, we only move the child expression into an eval if (cat.field() instanceof Attribute == false) { groupingChanged = true; var fieldAs = new Alias(as.source(), as.name(), cat.field(), null, true); @@ -59,7 +60,6 @@ protected LogicalPlan rule(Aggregate aggregate) { evalNames.put(fieldAs.name(), fieldAttr); Categorize replacement = cat.replaceChildren(List.of(fieldAttr)); newGroupings.set(i, as.replaceChild(replacement)); - groupingAttributes.put(cat, fieldAttr); } } else { groupingChanged = true; @@ -135,6 +135,10 @@ protected LogicalPlan rule(Aggregate aggregate) { }); // replace any grouping functions with their references pointing to the added synthetic eval replaced = replaced.transformDown(GroupingFunction.class, gf -> { + // Categorize in aggs depends on the grouping result, not on an early eval + if (gf instanceof Categorize) { + return gf; + } aggsChanged.set(true); // should never return null, as it's verified. // but even if broken, the transform will fail safely; otoh, returning `gf` will fail later due to incorrect plan. diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index d02e78202e0c2..74e2de1141728 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -407,12 +407,12 @@ public void testAggFilterOnBucketingOrAggFunctions() { // but fails if it's different assertEquals( - "1:32: can only use grouping function [bucket(a, 3)] part of the BY clause", + "1:32: can only use grouping function [bucket(a, 3)] as part of the BY clause", error("row a = 1 | stats sum(a) where bucket(a, 3) > -1 by bucket(a,2)") ); assertEquals( - "1:40: can only use grouping function [bucket(salary, 10)] part of the BY clause", + "1:40: can only use grouping function [bucket(salary, 10)] as part of the BY clause", error("from test | stats max(languages) WHERE bucket(salary, 10) > 1 by emp_no") ); @@ -444,19 +444,19 @@ public void testAggWithNonBooleanFilter() { public void testGroupingInsideAggsAsAgg() { assertEquals( - "1:18: can only use grouping function [bucket(emp_no, 5.)] part of the BY clause", + "1:18: can only use grouping function [bucket(emp_no, 5.)] as part of the BY clause", error("from test| stats bucket(emp_no, 5.) by emp_no") ); assertEquals( - "1:18: can only use grouping function [bucket(emp_no, 5.)] part of the BY clause", + "1:18: can only use grouping function [bucket(emp_no, 5.)] as part of the BY clause", error("from test| stats bucket(emp_no, 5.)") ); assertEquals( - "1:18: can only use grouping function [bucket(emp_no, 5.)] part of the BY clause", + "1:18: can only use grouping function [bucket(emp_no, 5.)] as part of the BY clause", error("from test| stats bucket(emp_no, 5.) by bucket(emp_no, 6.)") ); assertEquals( - "1:22: can only use grouping function [bucket(emp_no, 5.)] part of the BY clause", + "1:22: can only use grouping function [bucket(emp_no, 5.)] as part of the BY clause", error("from test| stats 3 + bucket(emp_no, 5.) by bucket(emp_no, 6.)") ); } @@ -1846,7 +1846,7 @@ public void testIntervalAsString() { } public void testCategorizeSingleGrouping() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V4.isEnabled()); + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V5.isEnabled()); query("from test | STATS COUNT(*) BY CATEGORIZE(first_name)"); query("from test | STATS COUNT(*) BY cat = CATEGORIZE(first_name)"); @@ -1875,7 +1875,7 @@ public void testCategorizeSingleGrouping() { } public void testCategorizeNestedGrouping() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V4.isEnabled()); + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V5.isEnabled()); query("from test | STATS COUNT(*) BY CATEGORIZE(LENGTH(first_name)::string)"); @@ -1890,27 +1890,33 @@ public void testCategorizeNestedGrouping() { } public void testCategorizeWithinAggregations() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V4.isEnabled()); + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V5.isEnabled()); query("from test | STATS MV_COUNT(cat), COUNT(*) BY cat = CATEGORIZE(first_name)"); + query("from test | STATS MV_COUNT(CATEGORIZE(first_name)), COUNT(*) BY cat = CATEGORIZE(first_name)"); + query("from test | STATS MV_COUNT(CATEGORIZE(first_name)), COUNT(*) BY CATEGORIZE(first_name)"); assertEquals( - "1:25: cannot use CATEGORIZE grouping function [CATEGORIZE(first_name)] within the aggregations", + "1:25: cannot use CATEGORIZE grouping function [CATEGORIZE(first_name)] within an aggregation", error("FROM test | STATS COUNT(CATEGORIZE(first_name)) BY CATEGORIZE(first_name)") ); - assertEquals( - "1:25: cannot reference CATEGORIZE grouping function [cat] within the aggregations", + "1:25: cannot reference CATEGORIZE grouping function [cat] within an aggregation", error("FROM test | STATS COUNT(cat) BY cat = CATEGORIZE(first_name)") ); assertEquals( - "1:30: cannot reference CATEGORIZE grouping function [cat] within the aggregations", + "1:30: cannot reference CATEGORIZE grouping function [cat] within an aggregation", error("FROM test | STATS SUM(LENGTH(cat::keyword) + LENGTH(last_name)) BY cat = CATEGORIZE(first_name)") ); assertEquals( - "1:25: cannot reference CATEGORIZE grouping function [`CATEGORIZE(first_name)`] within the aggregations", + "1:25: cannot reference CATEGORIZE grouping function [`CATEGORIZE(first_name)`] within an aggregation", error("FROM test | STATS COUNT(`CATEGORIZE(first_name)`) BY CATEGORIZE(first_name)") ); + + assertEquals( + "1:28: can only use grouping function [CATEGORIZE(last_name)] as part of the BY clause", + error("FROM test | STATS MV_COUNT(CATEGORIZE(last_name)) BY CATEGORIZE(first_name)") + ); } public void testSortByAggregate() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index a74efca3b3d99..b76781f76f4af 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -1212,7 +1212,7 @@ public void testCombineProjectionWithAggregationFirstAndAliasedGroupingUsedInAgg * \_EsRelation[test][_meta_field{f}#23, emp_no{f}#17, first_name{f}#18, ..] */ public void testCombineProjectionWithCategorizeGrouping() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V4.isEnabled()); + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V5.isEnabled()); var plan = plan(""" from test @@ -3949,7 +3949,7 @@ public void testNestedExpressionsInGroups() { * \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] */ public void testNestedExpressionsInGroupsWithCategorize() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V4.isEnabled()); + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V5.isEnabled()); var plan = optimizedPlan(""" from test From ed1e3664ad6c50d2af24b09db51448072764f663 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Tue, 3 Dec 2024 13:51:07 +0000 Subject: [PATCH 371/386] Move SparseVectorQueryBuilder and TextExpansionQueryBuilder to x-pack core (#117857) This commit moves the SparseVectorQueryBuilder and TextExpansionQueryBuilder classes to the x-pack core module, enabling other modules to utilize these query builders. Additionally, it introduces a SparseVectorQueryWrapper to extract sparse vector queries from standard Lucene queries. This is needed for supporting semantic highlighting with sparse vector fields as follow up. --- .../xpack/core/XPackClientPlugin.java | 10 +++ .../ml/search}/SparseVectorQueryBuilder.java | 7 +- .../ml/search/SparseVectorQueryWrapper.java | 77 +++++++++++++++++++ .../ml/search}/TextExpansionQueryBuilder.java | 4 +- .../ml/search/WeightedTokensQueryBuilder.java | 2 +- .../core/ml/search/WeightedTokensUtils.java | 11 ++- .../SparseVectorQueryBuilderTests.java | 21 ++--- .../TextExpansionQueryBuilderTests.java | 14 ++-- .../WeightedTokensQueryBuilderTests.java | 13 +++- .../xpack/ml/MachineLearning.java | 19 ----- 10 files changed, 125 insertions(+), 53 deletions(-) rename x-pack/plugin/{ml/src/main/java/org/elasticsearch/xpack/ml/queries => core/src/main/java/org/elasticsearch/xpack/core/ml/search}/SparseVectorQueryBuilder.java (97%) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/SparseVectorQueryWrapper.java rename x-pack/plugin/{ml/src/main/java/org/elasticsearch/xpack/ml/queries => core/src/main/java/org/elasticsearch/xpack/core/ml/search}/TextExpansionQueryBuilder.java (98%) rename x-pack/plugin/{ml/src/test/java/org/elasticsearch/xpack/ml/queries => core/src/test/java/org/elasticsearch/xpack/core/ml/search}/SparseVectorQueryBuilderTests.java (94%) rename x-pack/plugin/{ml/src/test/java/org/elasticsearch/xpack/ml/queries => core/src/test/java/org/elasticsearch/xpack/core/ml/search}/TextExpansionQueryBuilderTests.java (96%) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index e2435c3396fa8..f5923a4942634 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -71,6 +71,8 @@ import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; import org.elasticsearch.xpack.core.ml.job.snapshot.upgrade.SnapshotUpgradeTaskParams; import org.elasticsearch.xpack.core.ml.job.snapshot.upgrade.SnapshotUpgradeTaskState; +import org.elasticsearch.xpack.core.ml.search.SparseVectorQueryBuilder; +import org.elasticsearch.xpack.core.ml.search.TextExpansionQueryBuilder; import org.elasticsearch.xpack.core.ml.search.WeightedTokensQueryBuilder; import org.elasticsearch.xpack.core.monitoring.MonitoringFeatureSetUsage; import org.elasticsearch.xpack.core.rollup.RollupFeatureSetUsage; @@ -398,6 +400,14 @@ public List getNamedXContent() { @Override public List> getQueries() { return List.of( + new QuerySpec<>(SparseVectorQueryBuilder.NAME, SparseVectorQueryBuilder::new, SparseVectorQueryBuilder::fromXContent), + new QuerySpec( + TextExpansionQueryBuilder.NAME, + TextExpansionQueryBuilder::new, + TextExpansionQueryBuilder::fromXContent + ), + // TODO: The WeightedTokensBuilder is slated for removal after the SparseVectorQueryBuilder is available. + // The logic to create a Boolean query based on weighted tokens will remain and/or be moved to server. new SearchPlugin.QuerySpec( WeightedTokensQueryBuilder.NAME, WeightedTokensQueryBuilder::new, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/SparseVectorQueryBuilder.java similarity index 97% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilder.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/SparseVectorQueryBuilder.java index 5a63ad8e85e9b..e9e4e90421adc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/SparseVectorQueryBuilder.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.ml.queries; +package org.elasticsearch.xpack.core.ml.search; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; @@ -33,9 +33,6 @@ import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfigUpdate; -import org.elasticsearch.xpack.core.ml.search.TokenPruningConfig; -import org.elasticsearch.xpack.core.ml.search.WeightedToken; -import org.elasticsearch.xpack.core.ml.search.WeightedTokensUtils; import java.io.IOException; import java.util.ArrayList; @@ -210,7 +207,7 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { return (shouldPruneTokens) ? WeightedTokensUtils.queryBuilderWithPrunedTokens(fieldName, tokenPruningConfig, queryVectors, ft, context) - : WeightedTokensUtils.queryBuilderWithAllTokens(queryVectors, ft, context); + : WeightedTokensUtils.queryBuilderWithAllTokens(fieldName, queryVectors, ft, context); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/SparseVectorQueryWrapper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/SparseVectorQueryWrapper.java new file mode 100644 index 0000000000000..234560f620d95 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/SparseVectorQueryWrapper.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.search; + +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Weight; +import org.elasticsearch.index.query.SearchExecutionContext; + +import java.io.IOException; +import java.util.Objects; + +/** + * A wrapper class for the Lucene query generated by {@link SparseVectorQueryBuilder#toQuery(SearchExecutionContext)}. + * This wrapper facilitates the extraction of the complete sparse vector query using a {@link QueryVisitor}. + */ +public class SparseVectorQueryWrapper extends Query { + private final String fieldName; + private final Query termsQuery; + + public SparseVectorQueryWrapper(String fieldName, Query termsQuery) { + this.fieldName = fieldName; + this.termsQuery = termsQuery; + } + + public Query getTermsQuery() { + return termsQuery; + } + + @Override + public Query rewrite(IndexSearcher indexSearcher) throws IOException { + var rewrite = termsQuery.rewrite(indexSearcher); + if (rewrite != termsQuery) { + return new SparseVectorQueryWrapper(fieldName, rewrite); + } + return this; + } + + @Override + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { + return termsQuery.createWeight(searcher, scoreMode, boost); + } + + @Override + public String toString(String field) { + return termsQuery.toString(field); + } + + @Override + public void visit(QueryVisitor visitor) { + if (visitor.acceptField(fieldName)) { + termsQuery.visit(visitor.getSubVisitor(BooleanClause.Occur.MUST, this)); + } + } + + @Override + public boolean equals(Object obj) { + if (sameClassAs(obj) == false) { + return false; + } + SparseVectorQueryWrapper that = (SparseVectorQueryWrapper) obj; + return fieldName.equals(that.fieldName) && termsQuery.equals(that.termsQuery); + } + + @Override + public int hashCode() { + return Objects.hash(classHash(), fieldName, termsQuery); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/TextExpansionQueryBuilder.java similarity index 98% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/TextExpansionQueryBuilder.java index 6d972bcf5863a..81758ec5f9342 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/TextExpansionQueryBuilder.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.ml.queries; +package org.elasticsearch.xpack.core.ml.search; import org.apache.lucene.search.Query; import org.apache.lucene.util.SetOnce; @@ -32,8 +32,6 @@ import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfigUpdate; -import org.elasticsearch.xpack.core.ml.search.TokenPruningConfig; -import org.elasticsearch.xpack.core.ml.search.WeightedTokensQueryBuilder; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilder.java index 256c90c3eaa62..f41fcd77ce627 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilder.java @@ -125,7 +125,7 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { } return (this.tokenPruningConfig == null) - ? WeightedTokensUtils.queryBuilderWithAllTokens(tokens, ft, context) + ? WeightedTokensUtils.queryBuilderWithAllTokens(fieldName, tokens, ft, context) : WeightedTokensUtils.queryBuilderWithPrunedTokens(fieldName, tokenPruningConfig, tokens, ft, context); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensUtils.java index 133920416d227..1c2ac23151e6e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensUtils.java @@ -24,13 +24,18 @@ public final class WeightedTokensUtils { private WeightedTokensUtils() {} - public static Query queryBuilderWithAllTokens(List tokens, MappedFieldType ft, SearchExecutionContext context) { + public static Query queryBuilderWithAllTokens( + String fieldName, + List tokens, + MappedFieldType ft, + SearchExecutionContext context + ) { var qb = new BooleanQuery.Builder(); for (var token : tokens) { qb.add(new BoostQuery(ft.termQuery(token.token(), context), token.weight()), BooleanClause.Occur.SHOULD); } - return qb.setMinimumNumberShouldMatch(1).build(); + return new SparseVectorQueryWrapper(fieldName, qb.setMinimumNumberShouldMatch(1).build()); } public static Query queryBuilderWithPrunedTokens( @@ -64,7 +69,7 @@ public static Query queryBuilderWithPrunedTokens( } } - return qb.setMinimumNumberShouldMatch(1).build(); + return new SparseVectorQueryWrapper(fieldName, qb.setMinimumNumberShouldMatch(1).build()); } /** diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/SparseVectorQueryBuilderTests.java similarity index 94% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/SparseVectorQueryBuilderTests.java index 13cf6d87728a8..9872d95de024a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/SparseVectorQueryBuilderTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.ml.queries; +package org.elasticsearch.xpack.core.ml.search; import org.apache.lucene.document.Document; import org.apache.lucene.document.FeatureField; @@ -40,9 +40,6 @@ import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; -import org.elasticsearch.xpack.core.ml.search.TokenPruningConfig; -import org.elasticsearch.xpack.core.ml.search.WeightedToken; -import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; import java.lang.reflect.Method; @@ -50,7 +47,7 @@ import java.util.Collection; import java.util.List; -import static org.elasticsearch.xpack.ml.queries.SparseVectorQueryBuilder.QUERY_VECTOR_FIELD; +import static org.elasticsearch.xpack.core.ml.search.SparseVectorQueryBuilder.QUERY_VECTOR_FIELD; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.hasSize; @@ -102,7 +99,7 @@ private SparseVectorQueryBuilder createTestQueryBuilder(TokenPruningConfig token @Override protected Collection> getPlugins() { - return List.of(MachineLearning.class, MapperExtrasPlugin.class, XPackClientPlugin.class); + return List.of(MapperExtrasPlugin.class, XPackClientPlugin.class); } @Override @@ -156,8 +153,10 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws @Override protected void doAssertLuceneQuery(SparseVectorQueryBuilder queryBuilder, Query query, SearchExecutionContext context) { - assertThat(query, instanceOf(BooleanQuery.class)); - BooleanQuery booleanQuery = (BooleanQuery) query; + assertThat(query, instanceOf(SparseVectorQueryWrapper.class)); + var sparseQuery = (SparseVectorQueryWrapper) query; + assertThat(sparseQuery.getTermsQuery(), instanceOf(BooleanQuery.class)); + BooleanQuery booleanQuery = (BooleanQuery) sparseQuery.getTermsQuery(); assertEquals(booleanQuery.getMinimumNumberShouldMatch(), 1); assertThat(booleanQuery.clauses(), hasSize(NUM_TOKENS)); @@ -233,11 +232,13 @@ public void testToQuery() throws IOException { private void testDoToQuery(SparseVectorQueryBuilder queryBuilder, SearchExecutionContext context) throws IOException { Query query = queryBuilder.doToQuery(context); + assertTrue(query instanceof SparseVectorQueryWrapper); + var sparseQuery = (SparseVectorQueryWrapper) query; if (queryBuilder.shouldPruneTokens()) { // It's possible that all documents were pruned for aggressive pruning configurations - assertTrue(query instanceof BooleanQuery || query instanceof MatchNoDocsQuery); + assertTrue(sparseQuery.getTermsQuery() instanceof BooleanQuery || sparseQuery.getTermsQuery() instanceof MatchNoDocsQuery); } else { - assertTrue(query instanceof BooleanQuery); + assertTrue(sparseQuery.getTermsQuery() instanceof BooleanQuery); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/TextExpansionQueryBuilderTests.java similarity index 96% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/TextExpansionQueryBuilderTests.java index 00d50e0d0d7bb..a0263003b72db 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/TextExpansionQueryBuilderTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.ml.queries; +package org.elasticsearch.xpack.core.ml.search; import org.apache.lucene.document.Document; import org.apache.lucene.document.FeatureField; @@ -35,10 +35,6 @@ import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; -import org.elasticsearch.xpack.core.ml.search.TokenPruningConfig; -import org.elasticsearch.xpack.core.ml.search.WeightedToken; -import org.elasticsearch.xpack.core.ml.search.WeightedTokensQueryBuilder; -import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; import java.lang.reflect.Method; @@ -77,7 +73,7 @@ protected TextExpansionQueryBuilder doCreateTestQueryBuilder() { @Override protected Collection> getPlugins() { - return List.of(MachineLearning.class, MapperExtrasPlugin.class, XPackClientPlugin.class); + return List.of(MapperExtrasPlugin.class, XPackClientPlugin.class); } @Override @@ -129,8 +125,10 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws @Override protected void doAssertLuceneQuery(TextExpansionQueryBuilder queryBuilder, Query query, SearchExecutionContext context) { - assertThat(query, instanceOf(BooleanQuery.class)); - BooleanQuery booleanQuery = (BooleanQuery) query; + assertThat(query, instanceOf(SparseVectorQueryWrapper.class)); + var sparseQuery = (SparseVectorQueryWrapper) query; + assertThat(sparseQuery.getTermsQuery(), instanceOf(BooleanQuery.class)); + BooleanQuery booleanQuery = (BooleanQuery) sparseQuery.getTermsQuery(); assertEquals(booleanQuery.getMinimumNumberShouldMatch(), 1); assertThat(booleanQuery.clauses(), hasSize(NUM_TOKENS)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java index 114ad90354c61..cded9b8dce5e2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java @@ -271,8 +271,11 @@ public void testPruningIsAppliedCorrectly() throws IOException { } private void assertCorrectLuceneQuery(String name, Query query, List expectedFeatureFields) { - assertTrue(query instanceof BooleanQuery); - List booleanClauses = ((BooleanQuery) query).clauses(); + assertThat(query, instanceOf(SparseVectorQueryWrapper.class)); + var sparseQuery = (SparseVectorQueryWrapper) query; + assertThat(sparseQuery.getTermsQuery(), instanceOf(BooleanQuery.class)); + BooleanQuery booleanQuery = (BooleanQuery) sparseQuery.getTermsQuery(); + List booleanClauses = booleanQuery.clauses(); assertEquals( name + " had " + booleanClauses.size() + " clauses, expected " + expectedFeatureFields.size(), expectedFeatureFields.size(), @@ -343,8 +346,10 @@ public void testMustRewrite() throws IOException { @Override protected void doAssertLuceneQuery(WeightedTokensQueryBuilder queryBuilder, Query query, SearchExecutionContext context) { - assertThat(query, instanceOf(BooleanQuery.class)); - BooleanQuery booleanQuery = (BooleanQuery) query; + assertThat(query, instanceOf(SparseVectorQueryWrapper.class)); + var sparseQuery = (SparseVectorQueryWrapper) query; + assertThat(sparseQuery.getTermsQuery(), instanceOf(BooleanQuery.class)); + BooleanQuery booleanQuery = (BooleanQuery) sparseQuery.getTermsQuery(); assertEquals(booleanQuery.getMinimumNumberShouldMatch(), 1); assertThat(booleanQuery.clauses(), hasSize(NUM_TOKENS)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 8363e0f5c19a1..c76e43790a259 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -48,7 +48,6 @@ import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; -import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.indices.AssociatedIndexDescriptor; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; @@ -376,8 +375,6 @@ import org.elasticsearch.xpack.ml.process.MlMemoryTracker; import org.elasticsearch.xpack.ml.process.NativeController; import org.elasticsearch.xpack.ml.process.NativeStorageProvider; -import org.elasticsearch.xpack.ml.queries.SparseVectorQueryBuilder; -import org.elasticsearch.xpack.ml.queries.TextExpansionQueryBuilder; import org.elasticsearch.xpack.ml.rest.RestDeleteExpiredDataAction; import org.elasticsearch.xpack.ml.rest.RestMlInfoAction; import org.elasticsearch.xpack.ml.rest.RestMlMemoryAction; @@ -1764,22 +1761,6 @@ public List> getQueryVectorBuilders() { ); } - @Override - public List> getQueries() { - return List.of( - new QuerySpec( - TextExpansionQueryBuilder.NAME, - TextExpansionQueryBuilder::new, - TextExpansionQueryBuilder::fromXContent - ), - new QuerySpec( - SparseVectorQueryBuilder.NAME, - SparseVectorQueryBuilder::new, - SparseVectorQueryBuilder::fromXContent - ) - ); - } - private ContextParser checkAggLicense(ContextParser realParser, LicensedFeature.Momentary feature) { return (parser, name) -> { if (feature.check(getLicenseState()) == false) { From 5c1b3c7197603414614d72487c7327662d622420 Mon Sep 17 00:00:00 2001 From: mmahacek Date: Tue, 3 Dec 2024 06:10:02 -0800 Subject: [PATCH 372/386] Update email.asciidoc (#117867) Fix error in documentation. --- docs/reference/watcher/actions/email.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/watcher/actions/email.asciidoc b/docs/reference/watcher/actions/email.asciidoc index 16b9cc4be0628..efad500e0226b 100644 --- a/docs/reference/watcher/actions/email.asciidoc +++ b/docs/reference/watcher/actions/email.asciidoc @@ -129,7 +129,7 @@ killed by firewalls or load balancers in-between. | Name | Description | `format` | Attaches the watch data, equivalent to specifying `attach_data` in the watch configuration. Possible values are `json` or `yaml`. - Defaults to `json` if not specified. + Defaults to `yaml` if not specified. |====== From d3f0ae04e2b5e107686b9a19ffbe5312bacec753 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Tue, 3 Dec 2024 15:10:57 +0100 Subject: [PATCH 373/386] Enhance LOOKUP JOIN csv-spec tests to cover more cases and fix several bugs found (#117843) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds several more tests to lookup-join.csv-spec, and fixes the following bugs: * FieldCaps on right hand side should ignore fieldNames method and just use "*" because currently the fieldNames search cannot handle lookup fields with aliases (should be fixed in a followup PR). * Stop using the lookup index in the ComputeService (so we don’t get both indices data coming in from the left, and other weird behaviour). * Ignore failing SearchStats checks on fields from the right hand side in the logical planner (so it does not plan EVAL field = null for all right hand fields). This should be fixed properly with the correct updates to TransportSearchShardsAction (or rather to making multiple use of that for each branch of the execution model). --- .../xpack/esql/ccq/MultiClusterSpecIT.java | 4 +- .../xpack/esql/CsvTestsDataLoader.java | 8 + .../resources/clientips_lookup-settings.json | 5 + .../src/main/resources/languages.csv | 2 +- .../src/main/resources/lookup-join.csv-spec | 224 +++++++++++++++++- .../src/main/resources/mapping-clientips.json | 16 +- .../src/main/resources/mapping-languages.json | 2 +- .../main/resources/mapping-message_types.json | 10 + .../src/main/resources/message_types.csv | 6 + .../message_types_lookup-settings.json | 5 + .../xpack/esql/action/EsqlCapabilities.java | 2 +- .../esql/enrich/LookupFromIndexService.java | 11 + .../local/ReplaceMissingFieldWithNull.java | 13 +- .../physical/local/InsertFieldExtraction.java | 15 +- .../esql/plan/physical/LookupJoinExec.java | 2 +- .../esql/planner/LocalExecutionPlanner.java | 1 + .../xpack/esql/planner/PlannerUtils.java | 11 +- .../xpack/esql/plugin/ComputeService.java | 54 ++++- .../xpack/esql/session/EsqlSession.java | 4 +- .../elasticsearch/xpack/esql/CsvTests.java | 2 +- 20 files changed, 355 insertions(+), 42 deletions(-) create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/clientips_lookup-settings.json create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-message_types.json create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/message_types.csv create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/message_types_lookup-settings.json diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index af5eadc7358a2..19b29764559d1 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -47,7 +47,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS_V2; -import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V3; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V4; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_PLANNING_V1; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.METADATA_FIELDS_REMOTE_TEST; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.SYNC; @@ -125,7 +125,7 @@ protected void shouldSkipTest(String testName) throws IOException { assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS_V2.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_PLANNING_V1.capabilityName())); - assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V3.capabilityName())); + assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V4.capabilityName())); } private TestFeatureService remoteFeaturesService() throws IOException { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index 9c987a02aca2d..f9d8cf00695c1 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -72,6 +72,11 @@ public class CsvTestsDataLoader { .withTypeMapping(Map.of("@timestamp", "date_nanos")); private static final TestsDataset MISSING_IP_SAMPLE_DATA = new TestsDataset("missing_ip_sample_data"); private static final TestsDataset CLIENT_IPS = new TestsDataset("clientips"); + private static final TestsDataset CLIENT_IPS_LOOKUP = CLIENT_IPS.withIndex("clientips_lookup") + .withSetting("clientips_lookup-settings.json"); + private static final TestsDataset MESSAGE_TYPES = new TestsDataset("message_types"); + private static final TestsDataset MESSAGE_TYPES_LOOKUP = MESSAGE_TYPES.withIndex("message_types_lookup") + .withSetting("message_types_lookup-settings.json"); private static final TestsDataset CLIENT_CIDR = new TestsDataset("client_cidr"); private static final TestsDataset AGES = new TestsDataset("ages"); private static final TestsDataset HEIGHTS = new TestsDataset("heights"); @@ -112,6 +117,9 @@ public class CsvTestsDataLoader { Map.entry(SAMPLE_DATA_TS_NANOS.indexName, SAMPLE_DATA_TS_NANOS), Map.entry(MISSING_IP_SAMPLE_DATA.indexName, MISSING_IP_SAMPLE_DATA), Map.entry(CLIENT_IPS.indexName, CLIENT_IPS), + Map.entry(CLIENT_IPS_LOOKUP.indexName, CLIENT_IPS_LOOKUP), + Map.entry(MESSAGE_TYPES.indexName, MESSAGE_TYPES), + Map.entry(MESSAGE_TYPES_LOOKUP.indexName, MESSAGE_TYPES_LOOKUP), Map.entry(CLIENT_CIDR.indexName, CLIENT_CIDR), Map.entry(AGES.indexName, AGES), Map.entry(HEIGHTS.indexName, HEIGHTS), diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/clientips_lookup-settings.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/clientips_lookup-settings.json new file mode 100644 index 0000000000000..b73d1f9accf92 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/clientips_lookup-settings.json @@ -0,0 +1,5 @@ +{ + "index": { + "mode": "lookup" + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/languages.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/languages.csv index 3ee60b79970ba..1c1a9776df6cc 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/languages.csv +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/languages.csv @@ -1,4 +1,4 @@ -language_code:keyword,language_name:keyword +language_code:integer,language_name:keyword 1,English 2,French 3,Spanish diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec index 5de353978b307..f2800456ceb33 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -4,8 +4,8 @@ // //TODO: this sometimes returns null instead of the looked up value (likely related to the execution order) -basicOnTheDataNode-Ignore -required_capability: join_lookup_v3 +basicOnTheDataNode +required_capability: join_lookup_v4 FROM employees | EVAL language_code = languages @@ -21,19 +21,19 @@ emp_no:integer | language_code:integer | language_name:keyword 10093 | 3 | Spanish ; -basicRow-Ignore -required_capability: join_lookup_v3 +basicRow +required_capability: join_lookup_v4 ROW language_code = 1 | LOOKUP JOIN languages_lookup ON language_code ; -language_code:keyword | language_name:keyword +language_code:integer | language_name:keyword 1 | English ; basicOnTheCoordinator -required_capability: join_lookup_v3 +required_capability: join_lookup_v4 FROM employees | SORT emp_no @@ -49,9 +49,8 @@ emp_no:integer | language_code:integer | language_name:keyword 10003 | 4 | German ; -//TODO: this sometimes returns null instead of the looked up value (likely related to the execution order) -subsequentEvalOnTheDataNode-Ignore -required_capability: join_lookup_v3 +subsequentEvalOnTheDataNode +required_capability: join_lookup_v4 FROM employees | EVAL language_code = languages @@ -69,7 +68,7 @@ emp_no:integer | language_code:integer | language_name:keyword | language_code_x ; subsequentEvalOnTheCoordinator -required_capability: join_lookup_v3 +required_capability: join_lookup_v4 FROM employees | SORT emp_no @@ -85,3 +84,208 @@ emp_no:integer | language_code:integer | language_name:keyword | language_code_x 10002 | 5 | null | 10 10003 | 4 | german | 8 ; + +lookupIPFromRow +required_capability: join_lookup_v4 + +ROW left = "left", client_ip = "172.21.0.5", right = "right" +| LOOKUP JOIN clientips_lookup ON client_ip +; + +left:keyword | client_ip:keyword | right:keyword | env:keyword +left | 172.21.0.5 | right | Development +; + +lookupIPFromRowWithShadowing +required_capability: join_lookup_v4 + +ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" +| LOOKUP JOIN clientips_lookup ON client_ip +; + +left:keyword | client_ip:keyword | right:keyword | env:keyword +left | 172.21.0.5 | right | Development +; + +lookupIPFromRowWithShadowingKeep +required_capability: join_lookup_v4 + +ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" +| EVAL client_ip = client_ip::keyword +| LOOKUP JOIN clientips_lookup ON client_ip +| KEEP left, client_ip, right, env +; + +left:keyword | client_ip:keyword | right:keyword | env:keyword +left | 172.21.0.5 | right | Development +; + +lookupIPFromIndex +required_capability: join_lookup_v4 + +FROM sample_data +| EVAL client_ip = client_ip::keyword +| LOOKUP JOIN clientips_lookup ON client_ip +; + +@timestamp:date | event_duration:long | message:keyword | client_ip:keyword | env:keyword +2023-10-23T13:55:01.543Z | 1756467 | Connected to 10.1.0.1 | 172.21.3.15 | Production +2023-10-23T13:53:55.832Z | 5033755 | Connection error | 172.21.3.15 | Production +2023-10-23T13:52:55.015Z | 8268153 | Connection error | 172.21.3.15 | Production +2023-10-23T13:51:54.732Z | 725448 | Connection error | 172.21.3.15 | Production +2023-10-23T13:33:34.937Z | 1232382 | Disconnected | 172.21.0.5 | Development +2023-10-23T12:27:28.948Z | 2764889 | Connected to 10.1.0.2 | 172.21.2.113 | QA +2023-10-23T12:15:03.360Z | 3450233 | Connected to 10.1.0.3 | 172.21.2.162 | QA +; + +lookupIPFromIndexKeep +required_capability: join_lookup_v4 + +FROM sample_data +| EVAL client_ip = client_ip::keyword +| LOOKUP JOIN clientips_lookup ON client_ip +| KEEP @timestamp, client_ip, event_duration, message, env +; + +@timestamp:date | client_ip:keyword | event_duration:long | message:keyword | env:keyword +2023-10-23T13:55:01.543Z | 172.21.3.15 | 1756467 | Connected to 10.1.0.1 | Production +2023-10-23T13:53:55.832Z | 172.21.3.15 | 5033755 | Connection error | Production +2023-10-23T13:52:55.015Z | 172.21.3.15 | 8268153 | Connection error | Production +2023-10-23T13:51:54.732Z | 172.21.3.15 | 725448 | Connection error | Production +2023-10-23T13:33:34.937Z | 172.21.0.5 | 1232382 | Disconnected | Development +2023-10-23T12:27:28.948Z | 172.21.2.113 | 2764889 | Connected to 10.1.0.2 | QA +2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 | Connected to 10.1.0.3 | QA +; + +lookupIPFromIndexStats +required_capability: join_lookup_v4 + +FROM sample_data +| EVAL client_ip = client_ip::keyword +| LOOKUP JOIN clientips_lookup ON client_ip +| STATS count = count(client_ip) BY env +| SORT count DESC, env ASC +; + +count:long | env:keyword +4 | Production +2 | QA +1 | Development +; + +lookupIPFromIndexStatsKeep +required_capability: join_lookup_v4 + +FROM sample_data +| EVAL client_ip = client_ip::keyword +| LOOKUP JOIN clientips_lookup ON client_ip +| KEEP client_ip, env +| STATS count = count(client_ip) BY env +| SORT count DESC, env ASC +; + +count:long | env:keyword +4 | Production +2 | QA +1 | Development +; + +lookupMessageFromRow +required_capability: join_lookup_v4 + +ROW left = "left", message = "Connected to 10.1.0.1", right = "right" +| LOOKUP JOIN message_types_lookup ON message +; + +left:keyword | message:keyword | right:keyword | type:keyword +left | Connected to 10.1.0.1 | right | Success +; + +lookupMessageFromRowWithShadowing +required_capability: join_lookup_v4 + +ROW left = "left", message = "Connected to 10.1.0.1", type = "unknown", right = "right" +| LOOKUP JOIN message_types_lookup ON message +; + +left:keyword | message:keyword | right:keyword | type:keyword +left | Connected to 10.1.0.1 | right | Success +; + +lookupMessageFromRowWithShadowingKeep +required_capability: join_lookup_v4 + +ROW left = "left", message = "Connected to 10.1.0.1", type = "unknown", right = "right" +| LOOKUP JOIN message_types_lookup ON message +| KEEP left, message, right, type +; + +left:keyword | message:keyword | right:keyword | type:keyword +left | Connected to 10.1.0.1 | right | Success +; + +lookupMessageFromIndex +required_capability: join_lookup_v4 + +FROM sample_data +| LOOKUP JOIN message_types_lookup ON message +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword | type:keyword +2023-10-23T13:55:01.543Z | 172.21.3.15 | 1756467 | Connected to 10.1.0.1 | Success +2023-10-23T13:53:55.832Z | 172.21.3.15 | 5033755 | Connection error | Error +2023-10-23T13:52:55.015Z | 172.21.3.15 | 8268153 | Connection error | Error +2023-10-23T13:51:54.732Z | 172.21.3.15 | 725448 | Connection error | Error +2023-10-23T13:33:34.937Z | 172.21.0.5 | 1232382 | Disconnected | Disconnected +2023-10-23T12:27:28.948Z | 172.21.2.113 | 2764889 | Connected to 10.1.0.2 | Success +2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 | Connected to 10.1.0.3 | Success +; + +lookupMessageFromIndexKeep +required_capability: join_lookup_v4 + +FROM sample_data +| LOOKUP JOIN message_types_lookup ON message +| KEEP @timestamp, client_ip, event_duration, message, type +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword | type:keyword +2023-10-23T13:55:01.543Z | 172.21.3.15 | 1756467 | Connected to 10.1.0.1 | Success +2023-10-23T13:53:55.832Z | 172.21.3.15 | 5033755 | Connection error | Error +2023-10-23T13:52:55.015Z | 172.21.3.15 | 8268153 | Connection error | Error +2023-10-23T13:51:54.732Z | 172.21.3.15 | 725448 | Connection error | Error +2023-10-23T13:33:34.937Z | 172.21.0.5 | 1232382 | Disconnected | Disconnected +2023-10-23T12:27:28.948Z | 172.21.2.113 | 2764889 | Connected to 10.1.0.2 | Success +2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 | Connected to 10.1.0.3 | Success +; + +lookupMessageFromIndexStats +required_capability: join_lookup_v4 + +FROM sample_data +| LOOKUP JOIN message_types_lookup ON message +| STATS count = count(message) BY type +| SORT count DESC, type ASC +; + +count:long | type:keyword +3 | Error +3 | Success +1 | Disconnected +; + +lookupMessageFromIndexStatsKeep +required_capability: join_lookup_v4 + +FROM sample_data +| LOOKUP JOIN message_types_lookup ON message +| KEEP message, type +| STATS count = count(message) BY type +| SORT count DESC, type ASC +; + +count:long | type:keyword +3 | Error +3 | Success +1 | Disconnected +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-clientips.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-clientips.json index 39bd37ce26c7f..d491810f9134e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-clientips.json +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-clientips.json @@ -1,10 +1,10 @@ { - "properties": { - "client_ip": { - "type": "keyword" - }, - "env": { - "type": "keyword" - } + "properties": { + "client_ip": { + "type": "keyword" + }, + "env": { + "type": "keyword" } - } \ No newline at end of file + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-languages.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-languages.json index 0cec0caf17304..327b692369242 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-languages.json +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-languages.json @@ -1,7 +1,7 @@ { "properties" : { "language_code" : { - "type" : "keyword" + "type" : "integer" }, "language_name" : { "type" : "keyword" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-message_types.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-message_types.json new file mode 100644 index 0000000000000..af545b48da3d2 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-message_types.json @@ -0,0 +1,10 @@ +{ + "properties": { + "message": { + "type": "keyword" + }, + "type": { + "type": "keyword" + } + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/message_types.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/message_types.csv new file mode 100644 index 0000000000000..8e00485771445 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/message_types.csv @@ -0,0 +1,6 @@ +message:keyword,type:keyword +Connection error,Error +Disconnected,Disconnected +Connected to 10.1.0.1,Success +Connected to 10.1.0.2,Success +Connected to 10.1.0.3,Success diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/message_types_lookup-settings.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/message_types_lookup-settings.json new file mode 100644 index 0000000000000..b73d1f9accf92 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/message_types_lookup-settings.json @@ -0,0 +1,5 @@ +{ + "index": { + "mode": "lookup" + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index b5d6dd8584e8c..4845c7061949b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -525,7 +525,7 @@ public enum Cap { /** * LOOKUP JOIN */ - JOIN_LOOKUP_V3(Build.current().isSnapshot()), + JOIN_LOOKUP_V4(Build.current().isSnapshot()), /** * Fix for https://github.com/elastic/elasticsearch/issues/117054 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java index 849e8e890e248..4f429c46b9123 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java @@ -24,6 +24,7 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.action.EsqlQueryAction; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -78,9 +79,19 @@ protected TransportRequest transportRequest(LookupFromIndexService.Request reque @Override protected QueryList queryList(TransportRequest request, SearchExecutionContext context, Block inputBlock, DataType inputDataType) { MappedFieldType fieldType = context.getFieldType(request.matchField); + validateTypes(request.inputDataType, fieldType); return termQueryList(fieldType, context, inputBlock, inputDataType); } + private static void validateTypes(DataType inputDataType, MappedFieldType fieldType) { + // TODO: consider supporting implicit type conversion as done in ENRICH for some types + if (fieldType.typeName().equals(inputDataType.typeName()) == false) { + throw new EsqlIllegalArgumentException( + "LOOKUP JOIN match and input types are incompatible: match[" + fieldType.typeName() + "], input[" + inputDataType + "]" + ); + } + } + public static class Request extends AbstractLookupService.Request { private final String matchField; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/ReplaceMissingFieldWithNull.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/ReplaceMissingFieldWithNull.java index 0fa6d61a0ca9b..096f72f7694e1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/ReplaceMissingFieldWithNull.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/ReplaceMissingFieldWithNull.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.xpack.esql.core.expression.Alias; +import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; @@ -23,6 +24,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.rule.ParameterizedRule; import org.elasticsearch.xpack.esql.stats.SearchStats; @@ -56,10 +58,13 @@ else if (plan instanceof Project project) { var projections = project.projections(); List newProjections = new ArrayList<>(projections.size()); Map nullLiteral = Maps.newLinkedHashMapWithExpectedSize(DataType.types().size()); + AttributeSet joinAttributes = joinAttributes(project); for (NamedExpression projection : projections) { // Do not use the attribute name, this can deviate from the field name for union types. - if (projection instanceof FieldAttribute f && stats.exists(f.fieldName()) == false) { + if (projection instanceof FieldAttribute f && stats.exists(f.fieldName()) == false && joinAttributes.contains(f) == false) { + // TODO: Should do a searchStats lookup for join attributes instead of just ignoring them here + // See TransportSearchShardsAction DataType dt = f.dataType(); Alias nullAlias = nullLiteral.get(f.dataType()); // save the first field as null (per datatype) @@ -96,4 +101,10 @@ else if (plan instanceof Project project) { return plan; } + + private AttributeSet joinAttributes(Project project) { + var attributes = new AttributeSet(); + project.forEachDown(Join.class, j -> j.right().forEachDown(EsRelation.class, p -> attributes.addAll(p.output()))); + return attributes; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java index cafe3726f92ac..dc32a4ad3c282 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java @@ -23,14 +23,12 @@ import org.elasticsearch.xpack.esql.rule.Rule; import java.util.ArrayList; -import java.util.Collections; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; /** - * * Materialize the concrete fields that need to be extracted from the storage until the last possible moment. * Expects the local plan to already have a projection containing the fields needed upstream. *

    @@ -102,15 +100,18 @@ public PhysicalPlan apply(PhysicalPlan plan) { private static Set missingAttributes(PhysicalPlan p) { var missing = new LinkedHashSet(); - var inputSet = p.inputSet(); + var input = p.inputSet(); - // TODO: We need to extract whatever fields are missing from the left hand side. - // skip the lookup join since the right side is always materialized and a projection + // For LOOKUP JOIN we only need field-extraction on left fields used to match, since the right side is always materialized if (p instanceof LookupJoinExec join) { - return Collections.emptySet(); + join.leftFields().forEach(f -> { + if (input.contains(f) == false) { + missing.add(f); + } + }); + return missing; } - var input = inputSet; // collect field attributes used inside expressions // TODO: Rather than going over all expressions manually, this should just call .references() p.forEachExpression(TypedAttribute.class, f -> { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java index 2d3caa27da4cd..8b1cc047309e7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java @@ -102,7 +102,7 @@ public List output() { @Override public PhysicalPlan estimateRowSize(State state) { - state.add(false, output()); + state.add(false, addedFields); return this; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index a8afaa4d8119b..8c0488afdd42a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -565,6 +565,7 @@ private PhysicalOperation planHashJoin(HashJoinExec join, LocalExecutionPlannerC private PhysicalOperation planLookupJoin(LookupJoinExec join, LocalExecutionPlannerContext context) { PhysicalOperation source = plan(join.left(), context); + // TODO: The source builder includes incoming fields including the ones we're going to drop Layout.Builder layoutBuilder = source.layout.builder(); for (Attribute f : join.addedFields()) { layoutBuilder.append(f); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index f4ada1442efe5..37f89891860d8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; @@ -25,6 +26,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.core.util.Queries; +import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalPlanOptimizer; import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalOptimizerContext; @@ -117,12 +119,17 @@ public static String[] planOriginalIndices(PhysicalPlan plan) { var indices = new LinkedHashSet(); plan.forEachUp( FragmentExec.class, - f -> f.fragment() - .forEachUp(EsRelation.class, r -> indices.addAll(asList(Strings.commaDelimitedListToStringArray(r.index().name())))) + f -> f.fragment().forEachUp(EsRelation.class, r -> addOriginalIndexIfNotLookup(indices, r.index())) ); return indices.toArray(String[]::new); } + private static void addOriginalIndexIfNotLookup(Set indices, EsIndex index) { + if (index.indexNameWithModes().get(index.name()) != IndexMode.LOOKUP) { + indices.addAll(asList(Strings.commaDelimitedListToStringArray(index.name()))); + } + } + public static PhysicalPlan localPlan(List searchContexts, Configuration configuration, PhysicalPlan plan) { return localPlan(configuration, plan, SearchContextStats.from(searchContexts)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 9aea1577a4137..c9c8635a60f57 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -62,8 +62,12 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; import org.elasticsearch.xpack.esql.enrich.LookupFromIndexService; +import org.elasticsearch.xpack.esql.plan.logical.EsRelation; +import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; +import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; import org.elasticsearch.xpack.esql.plan.physical.OutputExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders; @@ -76,6 +80,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -160,9 +165,11 @@ public void execute( Map clusterToConcreteIndices = transportService.getRemoteClusterService() .groupIndices(SearchRequest.DEFAULT_INDICES_OPTIONS, PlannerUtils.planConcreteIndices(physicalPlan).toArray(String[]::new)); QueryPragmas queryPragmas = configuration.pragmas(); + Set lookupIndexNames = findLookupIndexNames(physicalPlan); + Set concreteIndexNames = selectConcreteIndices(clusterToConcreteIndices, lookupIndexNames); if (dataNodePlan == null) { - if (clusterToConcreteIndices.values().stream().allMatch(v -> v.indices().length == 0) == false) { - String error = "expected no concrete indices without data node plan; got " + clusterToConcreteIndices; + if (concreteIndexNames.isEmpty() == false) { + String error = "expected no concrete indices without data node plan; got " + concreteIndexNames; assert false : error; listener.onFailure(new IllegalStateException(error)); return; @@ -185,7 +192,7 @@ public void execute( return; } } else { - if (clusterToConcreteIndices.values().stream().allMatch(v -> v.indices().length == 0)) { + if (concreteIndexNames.isEmpty()) { var error = "expected concrete indices with data node plan but got empty; data node plan " + dataNodePlan; assert false : error; listener.onFailure(new IllegalStateException(error)); @@ -259,6 +266,42 @@ public void execute( } } + private Set selectConcreteIndices(Map clusterToConcreteIndices, Set indexesToIgnore) { + Set concreteIndexNames = new HashSet<>(); + clusterToConcreteIndices.forEach((clusterAlias, concreteIndices) -> { + for (String index : concreteIndices.indices()) { + if (indexesToIgnore.contains(index) == false) { + concreteIndexNames.add(index); + } + } + }); + return concreteIndexNames; + } + + private Set findLookupIndexNames(PhysicalPlan physicalPlan) { + Set lookupIndexNames = new HashSet<>(); + // When planning JOIN on the coordinator node: "LookupJoinExec.lookup()->FragmentExec.fragment()->EsRelation.index()" + physicalPlan.forEachDown( + LookupJoinExec.class, + lookupJoinExec -> lookupJoinExec.lookup() + .forEachDown( + FragmentExec.class, + frag -> frag.fragment().forEachDown(EsRelation.class, esRelation -> lookupIndexNames.add(esRelation.index().name())) + ) + ); + // When planning JOIN on the data node: "FragmentExec.fragment()->Join.right()->EsRelation.index()" + // TODO this only works for LEFT join, so we still need to support RIGHT join + physicalPlan.forEachDown( + FragmentExec.class, + fragmentExec -> fragmentExec.fragment() + .forEachDown( + Join.class, + join -> join.right().forEachDown(EsRelation.class, esRelation -> lookupIndexNames.add(esRelation.index().name())) + ) + ); + return lookupIndexNames; + } + // For queries like: FROM logs* | LIMIT 0 (including cross-cluster LIMIT 0 queries) private static void updateShardCountForCoordinatorOnlyQuery(EsqlExecutionInfo execInfo) { if (execInfo.isCrossClusterSearch()) { @@ -562,8 +605,9 @@ record DataNode(Transport.Connection connection, List shardIds, Map dataNodes, int totalShards, int skippedShards) {} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 3b0f9ab578df9..3d1ed8f70eae0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -178,7 +178,7 @@ public void executeOptimizedPlan( executeSubPlans(physicalPlan, planRunner, executionInfo, request, listener); } - private record PlanTuple(PhysicalPlan physical, LogicalPlan logical) {}; + private record PlanTuple(PhysicalPlan physical, LogicalPlan logical) {} private void executeSubPlans( PhysicalPlan physicalPlan, @@ -313,7 +313,7 @@ private void preAnalyze( // First resolve the lookup indices, then the main indices preAnalyzeLookupIndices( preAnalysis.lookupIndices, - fieldNames, + Set.of("*"), // Current LOOKUP JOIN syntax does not allow for field selection l.delegateFailureAndWrap( (lx, lookupIndexResolution) -> preAnalyzeIndices( indices, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index df974a88a4c57..2e8b856cf82a6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -263,7 +263,7 @@ public final void test() throws Throwable { ); assumeFalse( "lookup join disabled for csv tests", - testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP_V3.capabilityName()) + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP_V4.capabilityName()) ); if (Build.current().isSnapshot()) { assertThat( From ae1b1320996f8fb636f8f377bc9fa7b7743230a6 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 3 Dec 2024 06:34:13 -0800 Subject: [PATCH 374/386] Only check non-negative stats for active, current and queue (#117834) In SimpleThreadPoolIT, stats are gathered for each threadpool being checked, then measurements are collected. Some stats may go up or down depending on other background tasks outside the test. This commit adjusts the check for those stats to only check collecting non-negative values. closes #108320 --- muted-tests.yml | 3 --- .../org/elasticsearch/threadpool/SimpleThreadPoolIT.java | 6 +++--- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index cf39eae210f88..3652173327e84 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -141,9 +141,6 @@ tests: - class: org.elasticsearch.xpack.shutdown.NodeShutdownIT method: testAllocationPreventedForRemoval issue: https://github.com/elastic/elasticsearch/issues/116363 -- class: org.elasticsearch.threadpool.SimpleThreadPoolIT - method: testThreadPoolMetrics - issue: https://github.com/elastic/elasticsearch/issues/108320 - class: org.elasticsearch.xpack.downsample.ILMDownsampleDisruptionIT method: testILMDownsampleRollingRestart issue: https://github.com/elastic/elasticsearch/issues/114233 diff --git a/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java b/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java index be875421e036f..d2e021a8d7436 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java @@ -167,10 +167,10 @@ public void testThreadPoolMetrics() throws Exception { tps[0].forEach(stats -> { Map threadPoolStats = List.of( Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_COMPLETED, stats.completed()), - Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_ACTIVE, (long) stats.active()), - Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_CURRENT, (long) stats.threads()), + Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_ACTIVE, 0L), + Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_CURRENT, 0L), Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_LARGEST, (long) stats.largest()), - Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_QUEUE, (long) stats.queue()) + Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_QUEUE, 0L) ).stream().collect(toUnmodifiableSortedMap(e -> stats.name() + e.getKey(), Entry::getValue)); Function> measurementExtractor = name -> { From 5ed106a79b278052842865d2e63c4817230af7ab Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Tue, 3 Dec 2024 16:16:03 +0100 Subject: [PATCH 375/386] [test] Remove synchronization from InternalTestCluster#getInstance (#117780) The map of nodes is volatile and immutable and can be ready without synchronization. Getting a class's instance from the node's injector is also thread safe. Doing so prevents deadlocks if we restart the node and have a disruption scheme that internally calls `getInstance` from another thread. ``` 2> "elasticsearch[StatelessClusterIntegrityStressIT][server][T#1]" ID=3490 BLOCKED on org.elasticsearch.test.InternalTestCluster@18a6d098 owned by "elasticsearch[StatelessClusterIntegrityStressIT][server][T#2]" ID=3492 2> at app//org.elasticsearch.test.InternalTestCluster.getInstance(InternalTestCluster.java:1653) 2> - blocked on org.elasticsearch.test.InternalTestCluster@18a6d098 2> at app//org.elasticsearch.test.InternalTestCluster.getInstance(InternalTestCluster.java:1620) 2> at app//org.elasticsearch.test.disruption.NetworkDisruption.transport(NetworkDisruption.java:172) 2> at app//org.elasticsearch.test.disruption.NetworkDisruption.applyToNodes(NetworkDisruption.java:157) 2> at app//org.elasticsearch.test.disruption.Net 2> workDisruption.startDisrupting(NetworkDisruption.java:133) 2> "elasticsearch[StatelessClusterIntegrityStressIT][server][T#2]" ID=3492 BLOCKED on org.elasticsearch.test.disruption.NetworkDisruption@60fd3a1e owned by "elasticsearch[StatelessClusterIntegrityStressIT][server][T#1]" ID=3490 2> at app//org.elasticsearch.test.disruption.NetworkDisruption.applyToNode(NetworkDisruption.java:116) 2> - blocked on org.elasticsearch.test.disruption.NetworkDisruption@60fd3a1e 2> at app//org.elasticsearch.test.InternalTestCluster.applyDisruptionSchemeToNode(InternalTestCluster.java:2307) 2> at app//org.elasticsearch.test.InternalTestCluster.publishNode(InternalTestCluster.java:2258) 2> - locked org.elasticsearch.test.InternalTestCluster@18a6d098 2> at app//org.elasticsearch.test.InternalTestCluster.restartNode(InternalTestCluster.java:1901) 2> at app//org.elasticsearch.test.InternalTestCluster.restartNode(InternalTestCluster.java:1863) 2> - locked org.elasticsearch.test.InternalTestCluster@18a6d098 ``` --- .../main/java/org/elasticsearch/test/InternalTestCluster.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 7a04384298933..6d46605e201f9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -1649,7 +1649,7 @@ public T getAnyMasterNodeInstance(Class clazz) { return getInstance(clazz, MASTER_NODE_PREDICATE); } - private synchronized T getInstance(Class clazz, Predicate predicate) { + private T getInstance(Class clazz, Predicate predicate) { NodeAndClient randomNodeAndClient = getRandomNodeAndClient(predicate); if (randomNodeAndClient == null) { throw new AssertionError("no node matches [" + predicate + "]"); From 267dc1a41d49b11c6470ae1f83091debfc49e95f Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 3 Dec 2024 07:27:44 -0800 Subject: [PATCH 376/386] Fix BWC for ES|QL cluster request (#117865) We identified a BWC bug in the cluster computer request. Specifically, the indices options were not properly selected for requests from an older querying cluster. This caused the search_shards API on the remote cluster to use restricted indices options, leading to failures when resolving wildcard index patterns. Our tests didn't catch this issue because the current BWC tests for cross-cluster queries only cover one direction: the querying cluster on the current version and the remote cluster on a compatible version. This PR fixes the issue and expands BWC tests to support both directions: the querying cluster on the current version with the remote cluster on a compatible version, and vice versa. --- docs/changelog/117865.yaml | 5 + .../qa/server/multi-clusters/build.gradle | 17 +- .../xpack/esql/ccq/Clusters.java | 19 +- .../xpack/esql/ccq/EsqlRestValidationIT.java | 7 + .../xpack/esql/ccq/MultiClusterSpecIT.java | 7 +- .../xpack/esql/ccq/MultiClustersIT.java | 104 ++++++--- .../xpack/esql/qa/single_node/RestEsqlIT.java | 1 - .../xpack/esql/plugin/RemoteClusterPlan.java | 21 +- .../esql/plugin/ClusterRequestTests.java | 206 ++++++++++++++++++ 9 files changed, 345 insertions(+), 42 deletions(-) create mode 100644 docs/changelog/117865.yaml create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java diff --git a/docs/changelog/117865.yaml b/docs/changelog/117865.yaml new file mode 100644 index 0000000000000..33dc497725f92 --- /dev/null +++ b/docs/changelog/117865.yaml @@ -0,0 +1,5 @@ +pr: 117865 +summary: Fix BWC for ES|QL cluster request +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle b/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle index 7f3859e2229ef..d80cb764ca433 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle +++ b/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle @@ -23,9 +23,22 @@ def supportedVersion = bwcVersion -> { } buildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> - tasks.register(bwcTaskName(bwcVersion), StandaloneRestIntegTestTask) { + tasks.register("${baseName}#newToOld", StandaloneRestIntegTestTask) { + usesBwcDistribution(bwcVersion) + systemProperty("tests.version.remote_cluster", bwcVersion) + maxParallelForks = 1 + } + + tasks.register("${baseName}#oldToNew", StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) - systemProperty("tests.old_cluster_version", bwcVersion) + systemProperty("tests.version.local_cluster", bwcVersion) + maxParallelForks = 1 + } + + // TODO: avoid running tests twice with the current version + tasks.register(bwcTaskName(bwcVersion), StandaloneRestIntegTestTask) { + dependsOn tasks.named("${baseName}#oldToNew") + dependsOn tasks.named("${baseName}#newToOld") maxParallelForks = 1 } } diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/Clusters.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/Clusters.java index fa8cb49c59aed..5f3f135810322 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/Clusters.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/Clusters.java @@ -20,7 +20,7 @@ public static ElasticsearchCluster remoteCluster() { return ElasticsearchCluster.local() .name(REMOTE_CLUSTER_NAME) .distribution(DistributionType.DEFAULT) - .version(Version.fromString(System.getProperty("tests.old_cluster_version"))) + .version(distributionVersion("tests.version.remote_cluster")) .nodes(2) .setting("node.roles", "[data,ingest,master]") .setting("xpack.security.enabled", "false") @@ -34,7 +34,7 @@ public static ElasticsearchCluster localCluster(ElasticsearchCluster remoteClust return ElasticsearchCluster.local() .name(LOCAL_CLUSTER_NAME) .distribution(DistributionType.DEFAULT) - .version(Version.CURRENT) + .version(distributionVersion("tests.version.local_cluster")) .nodes(2) .setting("xpack.security.enabled", "false") .setting("xpack.license.self_generated.type", "trial") @@ -46,7 +46,18 @@ public static ElasticsearchCluster localCluster(ElasticsearchCluster remoteClust .build(); } - public static org.elasticsearch.Version oldVersion() { - return org.elasticsearch.Version.fromString(System.getProperty("tests.old_cluster_version")); + public static org.elasticsearch.Version localClusterVersion() { + String prop = System.getProperty("tests.version.local_cluster"); + return prop != null ? org.elasticsearch.Version.fromString(prop) : org.elasticsearch.Version.CURRENT; + } + + public static org.elasticsearch.Version remoteClusterVersion() { + String prop = System.getProperty("tests.version.remote_cluster"); + return prop != null ? org.elasticsearch.Version.fromString(prop) : org.elasticsearch.Version.CURRENT; + } + + private static Version distributionVersion(String key) { + final String val = System.getProperty(key); + return val != null ? Version.fromString(val) : Version.CURRENT; } } diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/EsqlRestValidationIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/EsqlRestValidationIT.java index 21307c5362417..55500aa1c9537 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/EsqlRestValidationIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/EsqlRestValidationIT.java @@ -10,12 +10,14 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import org.apache.http.HttpHost; +import org.elasticsearch.Version; import org.elasticsearch.client.RestClient; import org.elasticsearch.core.IOUtils; import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.esql.qa.rest.EsqlRestValidationTestCase; import org.junit.AfterClass; +import org.junit.Before; import org.junit.ClassRule; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; @@ -78,4 +80,9 @@ private RestClient remoteClusterClient() throws IOException { } return remoteClient; } + + @Before + public void skipTestOnOldVersions() { + assumeTrue("skip on old versions", Clusters.localClusterVersion().equals(Version.V_8_16_0)); + } } diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index 19b29764559d1..e658d169cbce8 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -12,6 +12,7 @@ import org.apache.http.HttpEntity; import org.apache.http.HttpHost; +import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; @@ -118,10 +119,8 @@ protected void shouldSkipTest(String testName) throws IOException { // Do not run tests including "METADATA _index" unless marked with metadata_fields_remote_test, // because they may produce inconsistent results with multiple clusters. assumeFalse("can't test with _index metadata", (remoteMetadata == false) && hasIndexMetadata(testCase.query)); - assumeTrue( - "Test " + testName + " is skipped on " + Clusters.oldVersion(), - isEnabled(testName, instructions, Clusters.oldVersion()) - ); + Version oldVersion = Version.min(Clusters.localClusterVersion(), Clusters.remoteClusterVersion()); + assumeTrue("Test " + testName + " is skipped on " + oldVersion, isEnabled(testName, instructions, oldVersion)); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS_V2.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_PLANNING_V1.capabilityName())); diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java index dbeaed1596eff..452f40baa34a8 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java @@ -10,6 +10,7 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import org.apache.http.HttpHost; +import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.Strings; @@ -29,7 +30,6 @@ import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -127,10 +127,12 @@ void indexDocs(RestClient client, String index, List docs) throws IOExcepti } private Map run(String query, boolean includeCCSMetadata) throws IOException { - Map resp = runEsql( - new RestEsqlTestCase.RequestObjectBuilder().query(query).includeCCSMetadata(includeCCSMetadata).build() - ); - logger.info("--> query {} response {}", query, resp); + var queryBuilder = new RestEsqlTestCase.RequestObjectBuilder().query(query); + if (includeCCSMetadata) { + queryBuilder.includeCCSMetadata(true); + } + Map resp = runEsql(queryBuilder.build()); + logger.info("--> query {} response {}", queryBuilder, resp); return resp; } @@ -156,7 +158,7 @@ private Map runEsql(RestEsqlTestCase.RequestObjectBuilder reques public void testCount() throws Exception { { - boolean includeCCSMetadata = randomBoolean(); + boolean includeCCSMetadata = includeCCSMetadata(); Map result = run("FROM test-local-index,*:test-remote-index | STATS c = COUNT(*)", includeCCSMetadata); var columns = List.of(Map.of("name", "c", "type", "long")); var values = List.of(List.of(localDocs.size() + remoteDocs.size())); @@ -165,13 +167,16 @@ public void testCount() throws Exception { if (includeCCSMetadata) { mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + if (ccsMetadataAvailable()) { + mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); + } + assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); if (includeCCSMetadata) { assertClusterDetailsMap(result, false); } } { - boolean includeCCSMetadata = randomBoolean(); + boolean includeCCSMetadata = includeCCSMetadata(); Map result = run("FROM *:test-remote-index | STATS c = COUNT(*)", includeCCSMetadata); var columns = List.of(Map.of("name", "c", "type", "long")); var values = List.of(List.of(remoteDocs.size())); @@ -180,7 +185,10 @@ public void testCount() throws Exception { if (includeCCSMetadata) { mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + if (ccsMetadataAvailable()) { + mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); + } + assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); if (includeCCSMetadata) { assertClusterDetailsMap(result, true); } @@ -189,7 +197,7 @@ public void testCount() throws Exception { public void testUngroupedAggs() throws Exception { { - boolean includeCCSMetadata = randomBoolean(); + boolean includeCCSMetadata = includeCCSMetadata(); Map result = run("FROM test-local-index,*:test-remote-index | STATS total = SUM(data)", includeCCSMetadata); var columns = List.of(Map.of("name", "total", "type", "long")); long sum = Stream.concat(localDocs.stream(), remoteDocs.stream()).mapToLong(d -> d.data).sum(); @@ -200,13 +208,16 @@ public void testUngroupedAggs() throws Exception { if (includeCCSMetadata) { mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + if (ccsMetadataAvailable()) { + mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); + } + assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); if (includeCCSMetadata) { assertClusterDetailsMap(result, false); } } { - boolean includeCCSMetadata = randomBoolean(); + boolean includeCCSMetadata = includeCCSMetadata(); Map result = run("FROM *:test-remote-index | STATS total = SUM(data)", includeCCSMetadata); var columns = List.of(Map.of("name", "total", "type", "long")); long sum = remoteDocs.stream().mapToLong(d -> d.data).sum(); @@ -216,12 +227,16 @@ public void testUngroupedAggs() throws Exception { if (includeCCSMetadata) { mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + if (ccsMetadataAvailable()) { + mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); + } + assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); if (includeCCSMetadata) { assertClusterDetailsMap(result, true); } } { + assumeTrue("requires ccs metadata", ccsMetadataAvailable()); Map result = runWithColumnarAndIncludeCCSMetadata("FROM *:test-remote-index | STATS total = SUM(data)"); var columns = List.of(Map.of("name", "total", "type", "long")); long sum = remoteDocs.stream().mapToLong(d -> d.data).sum(); @@ -293,7 +308,7 @@ private void assertClusterDetailsMap(Map result, boolean remoteO public void testGroupedAggs() throws Exception { { - boolean includeCCSMetadata = randomBoolean(); + boolean includeCCSMetadata = includeCCSMetadata(); Map result = run( "FROM test-local-index,*:test-remote-index | STATS total = SUM(data) BY color | SORT color", includeCCSMetadata @@ -311,13 +326,16 @@ public void testGroupedAggs() throws Exception { if (includeCCSMetadata) { mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + if (ccsMetadataAvailable()) { + mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); + } + assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); if (includeCCSMetadata) { assertClusterDetailsMap(result, false); } } { - boolean includeCCSMetadata = randomBoolean(); + boolean includeCCSMetadata = includeCCSMetadata(); Map result = run( "FROM *:test-remote-index | STATS total = SUM(data) by color | SORT color", includeCCSMetadata @@ -336,29 +354,57 @@ public void testGroupedAggs() throws Exception { if (includeCCSMetadata) { mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + if (ccsMetadataAvailable()) { + mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); + } + assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); if (includeCCSMetadata) { assertClusterDetailsMap(result, true); } } } + public void testIndexPattern() throws Exception { + { + String indexPattern = randomFrom( + "test-local-index,*:test-remote-index", + "test-local-index,*:test-remote-*", + "test-local-index,*:test-*", + "test-*,*:test-remote-index" + ); + Map result = run("FROM " + indexPattern + " | STATS c = COUNT(*)", false); + var columns = List.of(Map.of("name", "c", "type", "long")); + var values = List.of(List.of(localDocs.size() + remoteDocs.size())); + MapMatcher mapMatcher = matchesMap(); + if (ccsMetadataAvailable()) { + mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); + } + assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); + } + { + String indexPattern = randomFrom("*:test-remote-index", "*:test-remote-*", "*:test-*"); + Map result = run("FROM " + indexPattern + " | STATS c = COUNT(*)", false); + var columns = List.of(Map.of("name", "c", "type", "long")); + var values = List.of(List.of(remoteDocs.size())); + + MapMatcher mapMatcher = matchesMap(); + if (ccsMetadataAvailable()) { + mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); + } + assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); + } + } + private RestClient remoteClusterClient() throws IOException { var clusterHosts = parseClusterHosts(remoteCluster.getHttpAddresses()); return buildClient(restClientSettings(), clusterHosts.toArray(new HttpHost[0])); } - private TestFeatureService remoteFeaturesService() throws IOException { - if (remoteFeaturesService == null) { - try (RestClient remoteClient = remoteClusterClient()) { - var remoteNodeVersions = readVersionsFromNodesInfo(remoteClient); - var semanticNodeVersions = remoteNodeVersions.stream() - .map(ESRestTestCase::parseLegacyVersion) - .flatMap(Optional::stream) - .collect(Collectors.toSet()); - remoteFeaturesService = createTestFeatureService(getClusterStateFeatures(remoteClient), semanticNodeVersions); - } - } - return remoteFeaturesService; + private static boolean ccsMetadataAvailable() { + return Clusters.localClusterVersion().onOrAfter(Version.V_8_16_0); + } + + private static boolean includeCCSMetadata() { + return ccsMetadataAvailable() && randomBoolean(); } } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index 9a184b9a620fd..050259bbb5b5c 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -76,7 +76,6 @@ public void testBasicEsql() throws IOException { indexTimestampData(1); RequestObjectBuilder builder = requestObjectBuilder().query(fromIndex() + " | stats avg(value)"); - requestObjectBuilder().includeCCSMetadata(randomBoolean()); if (Build.current().isSnapshot()) { builder.pragmas(Settings.builder().put("data_partitioning", "shard").build()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/RemoteClusterPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/RemoteClusterPlan.java index 8564e4b3afde1..031bfd7139a84 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/RemoteClusterPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/RemoteClusterPlan.java @@ -9,12 +9,14 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.OriginalIndices; -import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import java.io.IOException; +import java.util.Arrays; +import java.util.Objects; record RemoteClusterPlan(PhysicalPlan plan, String[] targetIndices, OriginalIndices originalIndices) { static RemoteClusterPlan from(PlanStreamInput planIn) throws IOException { @@ -24,7 +26,8 @@ static RemoteClusterPlan from(PlanStreamInput planIn) throws IOException { if (planIn.getTransportVersion().onOrAfter(TransportVersions.ESQL_ORIGINAL_INDICES)) { originalIndices = OriginalIndices.readOriginalIndices(planIn); } else { - originalIndices = new OriginalIndices(planIn.readStringArray(), IndicesOptions.strictSingleIndexNoExpandForbidClosed()); + // fallback to the previous behavior + originalIndices = new OriginalIndices(planIn.readStringArray(), SearchRequest.DEFAULT_INDICES_OPTIONS); } return new RemoteClusterPlan(plan, targetIndices, originalIndices); } @@ -38,4 +41,18 @@ public void writeTo(PlanStreamOutput out) throws IOException { out.writeStringArray(originalIndices.indices()); } } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) return false; + RemoteClusterPlan that = (RemoteClusterPlan) o; + return Objects.equals(plan, that.plan) + && Objects.deepEquals(targetIndices, that.targetIndices) + && Objects.equals(originalIndices, that.originalIndices); + } + + @Override + public int hashCode() { + return Objects.hash(plan, Arrays.hashCode(targetIndices), originalIndices); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java new file mode 100644 index 0000000000000..07ca112e8c527 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ClusterRequestTests.java @@ -0,0 +1,206 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.test.TransportVersionUtils; +import org.elasticsearch.xpack.esql.ConfigurationTestUtils; +import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.analysis.Analyzer; +import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; +import org.elasticsearch.xpack.esql.core.type.EsField; +import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; +import org.elasticsearch.xpack.esql.index.EsIndex; +import org.elasticsearch.xpack.esql.index.IndexResolution; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; +import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; +import org.elasticsearch.xpack.esql.parser.EsqlParser; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.esql.ConfigurationTestUtils.randomConfiguration; +import static org.elasticsearch.xpack.esql.ConfigurationTestUtils.randomTables; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_CFG; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.emptyPolicyResolution; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; +import static org.hamcrest.Matchers.equalTo; + +public class ClusterRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return ClusterComputeRequest::new; + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + List writeables = new ArrayList<>(); + writeables.addAll(new SearchModule(Settings.EMPTY, List.of()).getNamedWriteables()); + writeables.addAll(new EsqlPlugin().getNamedWriteables()); + return new NamedWriteableRegistry(writeables); + } + + @Override + protected ClusterComputeRequest createTestInstance() { + var sessionId = randomAlphaOfLength(10); + String query = randomQuery(); + PhysicalPlan physicalPlan = DataNodeRequestTests.mapAndMaybeOptimize(parse(query)); + OriginalIndices originalIndices = new OriginalIndices( + generateRandomStringArray(10, 10, false, false), + IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()) + ); + String[] targetIndices = generateRandomStringArray(10, 10, false, false); + ClusterComputeRequest request = new ClusterComputeRequest( + randomAlphaOfLength(10), + sessionId, + randomConfiguration(query, randomTables()), + new RemoteClusterPlan(physicalPlan, targetIndices, originalIndices) + ); + request.setParentTask(randomAlphaOfLength(10), randomNonNegativeLong()); + return request; + } + + @Override + protected ClusterComputeRequest mutateInstance(ClusterComputeRequest in) throws IOException { + return switch (between(0, 4)) { + case 0 -> { + var request = new ClusterComputeRequest( + randomValueOtherThan(in.clusterAlias(), () -> randomAlphaOfLength(10)), + in.sessionId(), + in.configuration(), + in.remoteClusterPlan() + ); + request.setParentTask(in.getParentTask()); + yield request; + } + case 1 -> { + var request = new ClusterComputeRequest( + in.clusterAlias(), + randomValueOtherThan(in.sessionId(), () -> randomAlphaOfLength(10)), + in.configuration(), + in.remoteClusterPlan() + ); + request.setParentTask(in.getParentTask()); + yield request; + } + case 2 -> { + var request = new ClusterComputeRequest( + in.clusterAlias(), + in.sessionId(), + randomValueOtherThan(in.configuration(), ConfigurationTestUtils::randomConfiguration), + in.remoteClusterPlan() + ); + request.setParentTask(in.getParentTask()); + yield request; + } + case 3 -> { + RemoteClusterPlan plan = in.remoteClusterPlan(); + var request = new ClusterComputeRequest( + in.clusterAlias(), + in.sessionId(), + in.configuration(), + new RemoteClusterPlan( + plan.plan(), + randomValueOtherThan(plan.targetIndices(), () -> generateRandomStringArray(10, 10, false, false)), + plan.originalIndices() + ) + ); + request.setParentTask(in.getParentTask()); + yield request; + } + case 4 -> { + RemoteClusterPlan plan = in.remoteClusterPlan(); + var request = new ClusterComputeRequest( + in.clusterAlias(), + in.sessionId(), + in.configuration(), + new RemoteClusterPlan( + plan.plan(), + plan.targetIndices(), + new OriginalIndices( + plan.originalIndices().indices(), + randomValueOtherThan( + plan.originalIndices().indicesOptions(), + () -> IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()) + ) + ) + ) + ); + request.setParentTask(in.getParentTask()); + yield request; + } + default -> throw new AssertionError("invalid value"); + }; + } + + public void testFallbackIndicesOptions() throws Exception { + ClusterComputeRequest request = createTestInstance(); + var version = TransportVersionUtils.randomVersionBetween( + random(), + TransportVersions.V_8_14_0, + TransportVersions.ESQL_ORIGINAL_INDICES + ); + ClusterComputeRequest cloned = copyInstance(request, version); + assertThat(cloned.clusterAlias(), equalTo(request.clusterAlias())); + assertThat(cloned.sessionId(), equalTo(request.sessionId())); + assertThat(cloned.configuration(), equalTo(request.configuration())); + RemoteClusterPlan plan = cloned.remoteClusterPlan(); + assertThat(plan.plan(), equalTo(request.remoteClusterPlan().plan())); + assertThat(plan.targetIndices(), equalTo(request.remoteClusterPlan().targetIndices())); + OriginalIndices originalIndices = plan.originalIndices(); + assertThat(originalIndices.indices(), equalTo(request.remoteClusterPlan().originalIndices().indices())); + assertThat(originalIndices.indicesOptions(), equalTo(SearchRequest.DEFAULT_INDICES_OPTIONS)); + } + + private static String randomQuery() { + return randomFrom(""" + from test + | where round(emp_no) > 10 + | limit 10 + """, """ + from test + | sort last_name + | limit 10 + | where round(emp_no) > 10 + | eval c = first_name + """); + } + + static LogicalPlan parse(String query) { + Map mapping = loadMapping("mapping-basic.json"); + EsIndex test = new EsIndex("test", mapping, Map.of("test", IndexMode.STANDARD)); + IndexResolution getIndexResult = IndexResolution.valid(test); + var logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(TEST_CFG)); + var analyzer = new Analyzer( + new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResult, emptyPolicyResolution()), + TEST_VERIFIER + ); + return logicalOptimizer.optimize(analyzer.analyze(new EsqlParser().createStatement(query))); + } + + @Override + protected List filteredWarnings() { + return withDefaultLimitWarning(super.filteredWarnings()); + } +} From 00a1222f10a6bc605f67aee67d4053c5ba0557e8 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 4 Dec 2024 02:32:41 +1100 Subject: [PATCH 377/386] Mute org.elasticsearch.xpack.core.ml.search.SparseVectorQueryBuilderTests testToQuery #117904 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 3652173327e84..857266a5a47cd 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -238,6 +238,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/117862 - class: org.elasticsearch.validation.DotPrefixClientYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/117893 +- class: org.elasticsearch.xpack.core.ml.search.SparseVectorQueryBuilderTests + method: testToQuery + issue: https://github.com/elastic/elasticsearch/issues/117904 # Examples: # From c1a9d44ed4ac980130deb730991f10cce127c583 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 3 Dec 2024 08:42:49 -0800 Subject: [PATCH 378/386] Guard against missing file in CI upload (#117889) Somehow files can be lost before the build ends up uploading them, presumable from temporarily file deletion after tests complete. This commit guards against this case so that the build will not completely fail, but instead log a warning. --- .../internal/ElasticsearchBuildCompletePlugin.java | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java index 14baa55794c95..b1207a2f5161d 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java @@ -29,6 +29,8 @@ import org.gradle.api.provider.Property; import org.gradle.api.tasks.Input; import org.jetbrains.annotations.NotNull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; @@ -47,6 +49,8 @@ public abstract class ElasticsearchBuildCompletePlugin implements Plugin { + private static final Logger log = LoggerFactory.getLogger(ElasticsearchBuildCompletePlugin.class); + @Inject protected abstract FlowScope getFlowScope(); @@ -241,8 +245,11 @@ private static void createBuildArchiveTar(List files, File projectDir, Fil tOut.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); tOut.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_STAR); for (Path path : files.stream().map(File::toPath).toList()) { - if (!Files.isRegularFile(path)) { - throw new IOException("Support only file!"); + if (Files.exists(path) == false) { + log.warn("File disappeared before it could be added to CI archive: " + path); + continue; + } else if (!Files.isRegularFile(path)) { + throw new IOException("Support only file!: " + path); } long entrySize = Files.size(path); From 0a208279ea869fafe7ee9b4c4ac60d4b9816bd25 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Tue, 3 Dec 2024 17:53:10 +0100 Subject: [PATCH 379/386] ES|QL fix telemetry tests (usage stats) after promoting CATEGORIZE (#117878) --- muted-tests.yml | 3 --- .../resources/rest-api-spec/test/esql/60_usage.yml | 2 +- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 857266a5a47cd..7bd06a6605028 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -233,9 +233,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/117815 - class: org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT issue: https://github.com/elastic/elasticsearch/issues/111319 -- class: org.elasticsearch.xpack.test.rest.XPackRestIT - method: test {p0=esql/60_usage/Basic ESQL usage output (telemetry) non-snapshot version} - issue: https://github.com/elastic/elasticsearch/issues/117862 - class: org.elasticsearch.validation.DotPrefixClientYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/117893 - class: org.elasticsearch.xpack.core.ml.search.SparseVectorQueryBuilderTests diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index f7dd979540afa..c23b44c00bd14 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -163,4 +163,4 @@ setup: - match: {esql.functions.cos: $functions_cos} - gt: {esql.functions.to_long: $functions_to_long} - match: {esql.functions.coalesce: $functions_coalesce} - - length: {esql.functions: 118} # check the "sister" test above for a likely update to the same esql.functions length check + - length: {esql.functions: 119} # check the "sister" test above for a likely update to the same esql.functions length check From 22f4a799377ea8710076ff10b74fbb48724a0c09 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Tue, 3 Dec 2024 20:08:05 +0200 Subject: [PATCH 380/386] Smarter field caps with subscribable listener (#116755) --- docs/changelog/116755.yaml | 5 + .../multi_node/RequestIndexFilteringIT.java | 27 ++ .../single_node/RequestIndexFilteringIT.java | 27 ++ .../rest/RequestIndexFilteringTestCase.java | 284 ++++++++++++++++ .../esql/qa/rest/RestEnrichTestCase.java | 176 +++++++++- .../esql/enrich/EnrichPolicyResolver.java | 2 +- .../xpack/esql/session/EsqlSession.java | 315 ++++++++++++------ .../xpack/esql/session/IndexResolver.java | 13 +- 8 files changed, 741 insertions(+), 108 deletions(-) create mode 100644 docs/changelog/116755.yaml create mode 100644 x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/RequestIndexFilteringIT.java create mode 100644 x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RequestIndexFilteringIT.java create mode 100644 x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java diff --git a/docs/changelog/116755.yaml b/docs/changelog/116755.yaml new file mode 100644 index 0000000000000..3aa5ec8580b59 --- /dev/null +++ b/docs/changelog/116755.yaml @@ -0,0 +1,5 @@ +pr: 116755 +summary: Smarter field caps with subscribable listener +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/RequestIndexFilteringIT.java b/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/RequestIndexFilteringIT.java new file mode 100644 index 0000000000000..c2ba502b92554 --- /dev/null +++ b/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/RequestIndexFilteringIT.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.multi_node; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.xpack.esql.qa.rest.RequestIndexFilteringTestCase; +import org.junit.ClassRule; + +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) +public class RequestIndexFilteringIT extends RequestIndexFilteringTestCase { + + @ClassRule + public static ElasticsearchCluster cluster = Clusters.testCluster(ignored -> {}); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RequestIndexFilteringIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RequestIndexFilteringIT.java new file mode 100644 index 0000000000000..f13bcd618f0a8 --- /dev/null +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RequestIndexFilteringIT.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.single_node; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.TestClustersThreadFilter; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.xpack.esql.qa.rest.RequestIndexFilteringTestCase; +import org.junit.ClassRule; + +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) +public class RequestIndexFilteringIT extends RequestIndexFilteringTestCase { + + @ClassRule + public static ElasticsearchCluster cluster = Clusters.testCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java new file mode 100644 index 0000000000000..3314430d63eaa --- /dev/null +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java @@ -0,0 +1,284 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.rest; + +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.esql.AssertWarnings; +import org.junit.After; +import org.junit.Assert; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import static org.elasticsearch.test.ListMatcher.matchesList; +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; +import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.entityToMap; +import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.requestObjectBuilder; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.nullValue; + +public abstract class RequestIndexFilteringTestCase extends ESRestTestCase { + + @After + public void wipeTestData() throws IOException { + try { + var response = client().performRequest(new Request("DELETE", "/test*")); + assertEquals(200, response.getStatusLine().getStatusCode()); + } catch (ResponseException re) { + assertEquals(404, re.getResponse().getStatusLine().getStatusCode()); + } + } + + public void testTimestampFilterFromQuery() throws IOException { + int docsTest1 = 50; + int docsTest2 = 30; + indexTimestampData(docsTest1, "test1", "2024-11-26", "id1"); + indexTimestampData(docsTest2, "test2", "2023-11-26", "id2"); + + // filter includes both indices in the result (all columns, all rows) + RestEsqlTestCase.RequestObjectBuilder builder = timestampFilter("gte", "2023-01-01").query("FROM test*"); + Map result = runEsql(builder); + assertMap( + result, + matchesMap().entry( + "columns", + matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date")) + .item(matchesMap().entry("name", "id1").entry("type", "integer")) + .item(matchesMap().entry("name", "id2").entry("type", "integer")) + .item(matchesMap().entry("name", "value").entry("type", "long")) + ).entry("values", allOf(instanceOf(List.class), hasSize(docsTest1 + docsTest2))).entry("took", greaterThanOrEqualTo(0)) + ); + + // filter includes only test1. Columns from test2 are filtered out, as well (not only rows)! + builder = timestampFilter("gte", "2024-01-01").query("FROM test*"); + assertMap( + runEsql(builder), + matchesMap().entry( + "columns", + matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date")) + .item(matchesMap().entry("name", "id1").entry("type", "integer")) + .item(matchesMap().entry("name", "value").entry("type", "long")) + ).entry("values", allOf(instanceOf(List.class), hasSize(docsTest1))).entry("took", greaterThanOrEqualTo(0)) + ); + + // filter excludes both indices (no rows); the first analysis step fails because there are no columns, a second attempt succeeds + // after eliminating the index filter. All columns are returned. + builder = timestampFilter("gte", "2025-01-01").query("FROM test*"); + assertMap( + runEsql(builder), + matchesMap().entry( + "columns", + matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date")) + .item(matchesMap().entry("name", "id1").entry("type", "integer")) + .item(matchesMap().entry("name", "id2").entry("type", "integer")) + .item(matchesMap().entry("name", "value").entry("type", "long")) + ).entry("values", allOf(instanceOf(List.class), hasSize(0))).entry("took", greaterThanOrEqualTo(0)) + ); + } + + public void testFieldExistsFilter_KeepWildcard() throws IOException { + int docsTest1 = randomIntBetween(0, 10); + int docsTest2 = randomIntBetween(0, 10); + indexTimestampData(docsTest1, "test1", "2024-11-26", "id1"); + indexTimestampData(docsTest2, "test2", "2023-11-26", "id2"); + + // filter includes only test1. Columns are rows of test2 are filtered out + RestEsqlTestCase.RequestObjectBuilder builder = existsFilter("id1").query("FROM test*"); + Map result = runEsql(builder); + assertMap( + result, + matchesMap().entry( + "columns", + matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date")) + .item(matchesMap().entry("name", "id1").entry("type", "integer")) + .item(matchesMap().entry("name", "value").entry("type", "long")) + ).entry("values", allOf(instanceOf(List.class), hasSize(docsTest1))).entry("took", greaterThanOrEqualTo(0)) + ); + + // filter includes only test1. Columns from test2 are filtered out, as well (not only rows)! + builder = existsFilter("id1").query("FROM test* METADATA _index | KEEP _index, id*"); + result = runEsql(builder); + assertMap( + result, + matchesMap().entry( + "columns", + matchesList().item(matchesMap().entry("name", "_index").entry("type", "keyword")) + .item(matchesMap().entry("name", "id1").entry("type", "integer")) + ).entry("values", allOf(instanceOf(List.class), hasSize(docsTest1))).entry("took", greaterThanOrEqualTo(0)) + ); + @SuppressWarnings("unchecked") + var values = (List>) result.get("values"); + for (List row : values) { + assertThat(row.get(0), equalTo("test1")); + assertThat(row.get(1), instanceOf(Integer.class)); + } + } + + public void testFieldExistsFilter_With_ExplicitUseOfDiscardedIndexFields() throws IOException { + int docsTest1 = randomIntBetween(1, 5); + int docsTest2 = randomIntBetween(0, 5); + indexTimestampData(docsTest1, "test1", "2024-11-26", "id1"); + indexTimestampData(docsTest2, "test2", "2023-11-26", "id2"); + + // test2 is explicitly used in a query with "SORT id2" even if the index filter should discard test2 + RestEsqlTestCase.RequestObjectBuilder builder = existsFilter("id1").query( + "FROM test* METADATA _index | SORT id2 | KEEP _index, id*" + ); + Map result = runEsql(builder); + assertMap( + result, + matchesMap().entry( + "columns", + matchesList().item(matchesMap().entry("name", "_index").entry("type", "keyword")) + .item(matchesMap().entry("name", "id1").entry("type", "integer")) + .item(matchesMap().entry("name", "id2").entry("type", "integer")) + ).entry("values", allOf(instanceOf(List.class), hasSize(docsTest1))).entry("took", greaterThanOrEqualTo(0)) + ); + @SuppressWarnings("unchecked") + var values = (List>) result.get("values"); + for (List row : values) { + assertThat(row.get(0), equalTo("test1")); + assertThat(row.get(1), instanceOf(Integer.class)); + assertThat(row.get(2), nullValue()); + } + } + + public void testFieldNameTypo() throws IOException { + int docsTest1 = randomIntBetween(0, 5); + int docsTest2 = randomIntBetween(0, 5); + indexTimestampData(docsTest1, "test1", "2024-11-26", "id1"); + indexTimestampData(docsTest2, "test2", "2023-11-26", "id2"); + + // idx field name is explicitly used, though it doesn't exist in any of the indices. First test - without filter + ResponseException e = expectThrows( + ResponseException.class, + () -> runEsql(requestObjectBuilder().query("FROM test* | WHERE idx == 123")) + ); + assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); + assertThat(e.getMessage(), containsString("verification_exception")); + assertThat(e.getMessage(), containsString("Found 1 problem")); + assertThat(e.getMessage(), containsString("line 1:20: Unknown column [idx]")); + + e = expectThrows(ResponseException.class, () -> runEsql(requestObjectBuilder().query("FROM test1 | WHERE idx == 123"))); + assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); + assertThat(e.getMessage(), containsString("verification_exception")); + assertThat(e.getMessage(), containsString("Found 1 problem")); + assertThat(e.getMessage(), containsString("line 1:20: Unknown column [idx]")); + + e = expectThrows( + ResponseException.class, + () -> runEsql(timestampFilter("gte", "2020-01-01").query("FROM test* | WHERE idx == 123")) + ); + assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); + assertThat(e.getMessage(), containsString("Found 1 problem")); + assertThat(e.getMessage(), containsString("line 1:20: Unknown column [idx]")); + + e = expectThrows( + ResponseException.class, + () -> runEsql(timestampFilter("gte", "2020-01-01").query("FROM test2 | WHERE idx == 123")) + ); + assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); + assertThat(e.getMessage(), containsString("Found 1 problem")); + assertThat(e.getMessage(), containsString("line 1:20: Unknown column [idx]")); + } + + public void testIndicesDontExist() throws IOException { + int docsTest1 = 0; // we are interested only in the created index, not necessarily that it has data + indexTimestampData(docsTest1, "test1", "2024-11-26", "id1"); + + ResponseException e = expectThrows(ResponseException.class, () -> runEsql(timestampFilter("gte", "2020-01-01").query("FROM foo"))); + assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); + assertThat(e.getMessage(), containsString("verification_exception")); + assertThat(e.getMessage(), containsString("Unknown index [foo]")); + + e = expectThrows(ResponseException.class, () -> runEsql(timestampFilter("gte", "2020-01-01").query("FROM foo*"))); + assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); + assertThat(e.getMessage(), containsString("verification_exception")); + assertThat(e.getMessage(), containsString("Unknown index [foo*]")); + + e = expectThrows(ResponseException.class, () -> runEsql(timestampFilter("gte", "2020-01-01").query("FROM foo,test1"))); + assertEquals(404, e.getResponse().getStatusLine().getStatusCode()); + assertThat(e.getMessage(), containsString("index_not_found_exception")); + assertThat(e.getMessage(), containsString("no such index [foo]")); + } + + private static RestEsqlTestCase.RequestObjectBuilder timestampFilter(String op, String date) throws IOException { + return requestObjectBuilder().filter(b -> { + b.startObject("range"); + { + b.startObject("@timestamp").field(op, date).endObject(); + } + b.endObject(); + }); + } + + private static RestEsqlTestCase.RequestObjectBuilder existsFilter(String field) throws IOException { + return requestObjectBuilder().filter(b -> b.startObject("exists").field("field", field).endObject()); + } + + public Map runEsql(RestEsqlTestCase.RequestObjectBuilder requestObject) throws IOException { + return RestEsqlTestCase.runEsql(requestObject, new AssertWarnings.NoWarnings(), RestEsqlTestCase.Mode.SYNC); + } + + protected void indexTimestampData(int docs, String indexName, String date, String differentiatorFieldName) throws IOException { + Request createIndex = new Request("PUT", indexName); + createIndex.setJsonEntity(""" + { + "settings": { + "index": { + "number_of_shards": 3 + } + }, + "mappings": { + "properties": { + "@timestamp": { + "type": "date" + }, + "%differentiator_field_name%": { + "type": "integer" + } + } + } + }""".replace("%differentiator_field_name%", differentiatorFieldName)); + Response response = client().performRequest(createIndex); + assertThat( + entityToMap(response.getEntity(), XContentType.JSON), + matchesMap().entry("shards_acknowledged", true).entry("index", indexName).entry("acknowledged", true) + ); + + if (docs > 0) { + StringBuilder b = new StringBuilder(); + for (int i = 0; i < docs; i++) { + b.append(String.format(Locale.ROOT, """ + {"create":{"_index":"%s"}} + {"@timestamp":"%s","value":%d,"%s":%d} + """, indexName, date, i, differentiatorFieldName, i)); + } + Request bulk = new Request("POST", "/_bulk"); + bulk.addParameter("refresh", "true"); + bulk.addParameter("filter_path", "errors"); + bulk.setJsonEntity(b.toString()); + response = client().performRequest(bulk); + Assert.assertEquals("{\"errors\":false}", EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8)); + } + } +} diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java index def6491fb920f..bf4a4400e13cf 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java @@ -12,7 +12,9 @@ import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xcontent.XContentBuilder; import org.junit.After; import org.junit.Before; @@ -29,7 +31,6 @@ public abstract class RestEnrichTestCase extends ESRestTestCase { private static final String sourceIndexName = "countries"; - private static final String testIndexName = "test"; private static final String policyName = "countries"; public enum Mode { @@ -56,7 +57,7 @@ public void assertRequestBreakerEmpty() throws Exception { @Before public void loadTestData() throws IOException { - Request request = new Request("PUT", "/" + testIndexName); + Request request = new Request("PUT", "/test1"); request.setJsonEntity(""" { "mappings": { @@ -72,7 +73,7 @@ public void loadTestData() throws IOException { }"""); assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode()); - request = new Request("POST", "/" + testIndexName + "/_bulk"); + request = new Request("POST", "/test1/_bulk"); request.addParameter("refresh", "true"); request.setJsonEntity(""" { "index": {"_id": 1} } @@ -84,6 +85,34 @@ public void loadTestData() throws IOException { """); assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode()); + request = new Request("PUT", "/test2"); + request.setJsonEntity(""" + { + "mappings": { + "properties": { + "geo.dest": { + "type": "keyword" + }, + "country_number": { + "type": "long" + } + } + } + }"""); + assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode()); + + request = new Request("POST", "/test2/_bulk"); + request.addParameter("refresh", "true"); + request.setJsonEntity(""" + { "index": {"_id": 1} } + { "geo.dest": "IN", "country_number": 2 } + { "index": {"_id": 2} } + { "geo.dest": "IN", "country_number": 2 } + { "index": {"_id": 3} } + { "geo.dest": "US", "country_number": 3 } + """); + assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode()); + request = new Request("PUT", "/" + sourceIndexName); request.setJsonEntity(""" { @@ -131,7 +160,7 @@ public void loadTestData() throws IOException { @After public void wipeTestData() throws IOException { try { - var response = client().performRequest(new Request("DELETE", "/" + testIndexName)); + var response = client().performRequest(new Request("DELETE", "/test1,test2")); assertEquals(200, response.getStatusLine().getStatusCode()); response = client().performRequest(new Request("DELETE", "/" + sourceIndexName)); assertEquals(200, response.getStatusLine().getStatusCode()); @@ -143,7 +172,7 @@ public void wipeTestData() throws IOException { } public void testNonExistentEnrichPolicy() throws IOException { - ResponseException re = expectThrows(ResponseException.class, () -> runEsql("from test | enrich countris", Mode.SYNC)); + ResponseException re = expectThrows(ResponseException.class, () -> runEsql("from test1 | enrich countris", null, Mode.SYNC)); assertThat( EntityUtils.toString(re.getResponse().getEntity()), containsString("cannot find enrich policy [countris], did you mean [countries]?") @@ -151,7 +180,10 @@ public void testNonExistentEnrichPolicy() throws IOException { } public void testNonExistentEnrichPolicy_KeepField() throws IOException { - ResponseException re = expectThrows(ResponseException.class, () -> runEsql("from test | enrich countris | keep number", Mode.SYNC)); + ResponseException re = expectThrows( + ResponseException.class, + () -> runEsql("from test1 | enrich countris | keep number", null, Mode.SYNC) + ); assertThat( EntityUtils.toString(re.getResponse().getEntity()), containsString("cannot find enrich policy [countris], did you mean [countries]?") @@ -159,25 +191,147 @@ public void testNonExistentEnrichPolicy_KeepField() throws IOException { } public void testMatchField_ImplicitFieldsList() throws IOException { - Map result = runEsql("from test | enrich countries | keep number | sort number"); + Map result = runEsql("from test1 | enrich countries | keep number | sort number"); var columns = List.of(Map.of("name", "number", "type", "long")); var values = List.of(List.of(1000), List.of(1000), List.of(5000)); assertMap(result, matchesMap().entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); } public void testMatchField_ImplicitFieldsList_WithStats() throws IOException { - Map result = runEsql("from test | enrich countries | stats s = sum(number) by country_name"); + Map result = runEsql("from test1 | enrich countries | stats s = sum(number) by country_name"); var columns = List.of(Map.of("name", "s", "type", "long"), Map.of("name", "country_name", "type", "keyword")); var values = List.of(List.of(2000, "United States of America"), List.of(5000, "China")); assertMap(result, matchesMap().entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); } + public void testSimpleIndexFilteringWithEnrich() throws IOException { + // no filter + Map result = runEsql(""" + from test* metadata _index + | enrich countries + | keep *number, geo.dest, _index + | sort geo.dest, _index + """); + var columns = List.of( + Map.of("name", "country_number", "type", "long"), + Map.of("name", "number", "type", "long"), + Map.of("name", "geo.dest", "type", "keyword"), + Map.of("name", "_index", "type", "keyword") + ); + var values = List.of( + Arrays.asList(null, 5000, "CN", "test1"), + Arrays.asList(2, null, "IN", "test2"), + Arrays.asList(2, null, "IN", "test2"), + Arrays.asList(null, 1000, "US", "test1"), + Arrays.asList(null, 1000, "US", "test1"), + Arrays.asList(3, null, "US", "test2") + ); + assertMap(result, matchesMap().entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + + // filter something that won't affect the columns + result = runEsql(""" + from test* metadata _index + | enrich countries + | keep *number, geo.dest, _index + | sort geo.dest, _index + """, b -> b.startObject("exists").field("field", "foobar").endObject()); + assertMap(result, matchesMap().entry("columns", columns).entry("values", List.of()).entry("took", greaterThanOrEqualTo(0))); + } + + public void testIndexFilteringWithEnrich_RemoveOneIndex() throws IOException { + // filter out test2 but specifically use one of its fields in the query (country_number) + Map result = runEsql(""" + from test* metadata _index + | enrich countries + | keep country_number, number, geo.dest, _index + | sort geo.dest, _index + """, b -> b.startObject("exists").field("field", "number").endObject()); + + var columns = List.of( + Map.of("name", "country_number", "type", "long"), + Map.of("name", "number", "type", "long"), + Map.of("name", "geo.dest", "type", "keyword"), + Map.of("name", "_index", "type", "keyword") + ); + var values = List.of( + Arrays.asList(null, 5000, "CN", "test1"), + Arrays.asList(null, 1000, "US", "test1"), + Arrays.asList(null, 1000, "US", "test1") + ); + + assertMap(result, matchesMap().entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + + // filter out test2 and use a wildcarded field name in the "keep" command + result = runEsql(""" + from test* metadata _index + | enrich countries + | keep *number, geo.dest, _index + | sort geo.dest, _index + """, b -> b.startObject("exists").field("field", "number").endObject()); + + columns = List.of( + Map.of("name", "number", "type", "long"), + Map.of("name", "geo.dest", "type", "keyword"), + Map.of("name", "_index", "type", "keyword") + ); + values = List.of(Arrays.asList(5000, "CN", "test1"), Arrays.asList(1000, "US", "test1"), Arrays.asList(1000, "US", "test1")); + assertMap(result, matchesMap().entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + } + + public void testIndexFilteringWithEnrich_ExpectException() throws IOException { + // no filter, just a simple query with "enrich" that should throw a valid VerificationException + ResponseException e = expectThrows(ResponseException.class, () -> runEsql(""" + from test* metadata _index + | enrich countries + | where foobar == 123 + """)); + assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); + assertThat(e.getMessage(), containsString("Found 1 problem")); + assertThat(e.getMessage(), containsString("line 3:13: Unknown column [foobar]")); + + // same query, but with a filter this time + e = expectThrows(ResponseException.class, () -> runEsql(""" + from test* metadata _index + | enrich countries + | where foobar == 123 + """, b -> b.startObject("exists").field("field", "number").endObject())); + assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); + assertThat(e.getMessage(), containsString("Found 1 problem")); + assertThat(e.getMessage(), containsString("line 3:13: Unknown column [foobar]")); + } + + public void testIndexFilteringWithEnrich_FilterUnusedIndexFields() throws IOException { + // filter out "test1". The field that is specific to "test1" ("number") is not actually used in the query + Map result = runEsql(""" + from test* metadata _index + | enrich countries + | keep country_number, geo.dest, _index + | sort geo.dest, _index + """, b -> b.startObject("exists").field("field", "country_number").endObject()); + + var columns = List.of( + Map.of("name", "country_number", "type", "long"), + Map.of("name", "geo.dest", "type", "keyword"), + Map.of("name", "_index", "type", "keyword") + ); + var values = List.of(Arrays.asList(2, "IN", "test2"), Arrays.asList(2, "IN", "test2"), Arrays.asList(3, "US", "test2")); + assertMap(result, matchesMap().entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + } + private Map runEsql(String query) throws IOException { - return runEsql(query, mode); + return runEsql(query, null, mode); } - private Map runEsql(String query, Mode mode) throws IOException { - var requestObject = new RestEsqlTestCase.RequestObjectBuilder().query(query); + private Map runEsql(String query, CheckedConsumer filter) throws IOException { + return runEsql(query, filter, mode); + } + + private Map runEsql(String query, CheckedConsumer filter, Mode mode) throws IOException { + var requestObject = new RestEsqlTestCase.RequestObjectBuilder(); + if (filter != null) { + requestObject.filter(filter); + } + requestObject.query(query); if (mode == Mode.ASYNC) { return RestEsqlTestCase.runEsqlAsync(requestObject); } else { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java index c8a7a6bcc4e98..c8e993b7dbf0b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java @@ -411,7 +411,7 @@ public void messageReceived(LookupRequest request, TransportChannel channel, Tas } try (ThreadContext.StoredContext ignored = threadContext.stashWithOrigin(ClientHelper.ENRICH_ORIGIN)) { String indexName = EnrichPolicy.getBaseName(policyName); - indexResolver.resolveAsMergedMapping(indexName, IndexResolver.ALL_FIELDS, refs.acquire(indexResult -> { + indexResolver.resolveAsMergedMapping(indexName, IndexResolver.ALL_FIELDS, null, refs.acquire(indexResult -> { if (indexResult.isValid() && indexResult.get().concreteIndices().size() == 1) { EsIndex esIndex = indexResult.get(); var concreteIndices = Map.of(request.clusterAlias, Iterables.get(esIndex.concreteIndices(), 0)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 3d1ed8f70eae0..71fba5683644d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.Strings; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.collect.Iterators; @@ -25,6 +26,7 @@ import org.elasticsearch.indices.IndicesExpressionGrouper; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; import org.elasticsearch.xpack.esql.analysis.Analyzer; @@ -151,6 +153,7 @@ public void execute(EsqlQueryRequest request, EsqlExecutionInfo executionInfo, P analyzedPlan( parse(request.query(), request.params()), executionInfo, + request.filter(), new EsqlSessionCCSUtils.CssPartialErrorsActionListener(executionInfo, listener) { @Override public void onResponse(LogicalPlan analyzedPlan) { @@ -268,31 +271,28 @@ private LogicalPlan parse(String query, QueryParams params) { return parsed; } - public void analyzedPlan(LogicalPlan parsed, EsqlExecutionInfo executionInfo, ActionListener listener) { + public void analyzedPlan( + LogicalPlan parsed, + EsqlExecutionInfo executionInfo, + QueryBuilder requestFilter, + ActionListener logicalPlanListener + ) { if (parsed.analyzed()) { - listener.onResponse(parsed); + logicalPlanListener.onResponse(parsed); return; } - preAnalyze(parsed, executionInfo, (indices, lookupIndices, policies) -> { + TriFunction analyzeAction = (indices, lookupIndices, policies) -> { planningMetrics.gatherPreAnalysisMetrics(parsed); Analyzer analyzer = new Analyzer( new AnalyzerContext(configuration, functionRegistry, indices, lookupIndices, policies), verifier ); - var plan = analyzer.analyze(parsed); + LogicalPlan plan = analyzer.analyze(parsed); plan.setAnalyzed(); - LOGGER.debug("Analyzed plan:\n{}", plan); return plan; - }, listener); - } + }; - private void preAnalyze( - LogicalPlan parsed, - EsqlExecutionInfo executionInfo, - TriFunction action, - ActionListener listener - ) { PreAnalyzer.PreAnalysis preAnalysis = preAnalyzer.preAnalyze(parsed); var unresolvedPolicies = preAnalysis.enriches.stream() .map(e -> new EnrichPolicyResolver.UnresolvedPolicy((String) e.policyName().fold(), e.mode())) @@ -302,81 +302,113 @@ private void preAnalyze( final Set targetClusters = enrichPolicyResolver.groupIndicesPerCluster( indices.stream().flatMap(t -> Arrays.stream(Strings.commaDelimitedListToStringArray(t.id().index()))).toArray(String[]::new) ).keySet(); - enrichPolicyResolver.resolvePolicies(targetClusters, unresolvedPolicies, listener.delegateFailureAndWrap((l, enrichResolution) -> { - // first we need the match_fields names from enrich policies and THEN, with an updated list of fields, we call field_caps API - var enrichMatchFields = enrichResolution.resolvedEnrichPolicies() - .stream() - .map(ResolvedEnrichPolicy::matchField) - .collect(Collectors.toSet()); - // get the field names from the parsed plan combined with the ENRICH match fields from the ENRICH policy - var fieldNames = fieldNames(parsed, enrichMatchFields); - // First resolve the lookup indices, then the main indices - preAnalyzeLookupIndices( - preAnalysis.lookupIndices, + + SubscribableListener.newForked(l -> enrichPolicyResolver.resolvePolicies(targetClusters, unresolvedPolicies, l)) + .andThen((l, enrichResolution) -> { + // we need the match_fields names from enrich policies and THEN, with an updated list of fields, we call field_caps API + var enrichMatchFields = enrichResolution.resolvedEnrichPolicies() + .stream() + .map(ResolvedEnrichPolicy::matchField) + .collect(Collectors.toSet()); + // get the field names from the parsed plan combined with the ENRICH match fields from the ENRICH policy + var fieldNames = fieldNames(parsed, enrichMatchFields); + ListenerResult listenerResult = new ListenerResult(null, null, enrichResolution, fieldNames); + + // first resolve the lookup indices, then the main indices + preAnalyzeLookupIndices(preAnalysis.lookupIndices, listenerResult, l); + }) + .andThen((l, listenerResult) -> { + // resolve the main indices + preAnalyzeIndices(preAnalysis.indices, executionInfo, listenerResult, requestFilter, l); + }) + .andThen((l, listenerResult) -> { + // TODO in follow-PR (for skip_unavailable handling of missing concrete indexes) add some tests for + // invalid index resolution to updateExecutionInfo + if (listenerResult.indices.isValid()) { + // CCS indices and skip_unavailable cluster values can stop the analysis right here + if (analyzeCCSIndices(executionInfo, targetClusters, unresolvedPolicies, listenerResult, logicalPlanListener, l)) + return; + } + // whatever tuple we have here (from CCS-special handling or from the original pre-analysis), pass it on to the next step + l.onResponse(listenerResult); + }) + .andThen((l, listenerResult) -> { + // first attempt (maybe the only one) at analyzing the plan + analyzeAndMaybeRetry(analyzeAction, requestFilter, listenerResult, logicalPlanListener, l); + }) + .andThen((l, listenerResult) -> { + assert requestFilter != null : "The second pre-analysis shouldn't take place when there is no index filter in the request"; + + // "reset" execution information for all ccs or non-ccs (local) clusters, since we are performing the indices + // resolving one more time (the first attempt failed and the query had a filter) + for (String clusterAlias : executionInfo.clusterAliases()) { + executionInfo.swapCluster(clusterAlias, (k, v) -> null); + } + + // here the requestFilter is set to null, performing the pre-analysis after the first step failed + preAnalyzeIndices(preAnalysis.indices, executionInfo, listenerResult, null, l); + }) + .andThen((l, listenerResult) -> { + assert requestFilter != null : "The second analysis shouldn't take place when there is no index filter in the request"; + LOGGER.debug("Analyzing the plan (second attempt, without filter)"); + LogicalPlan plan; + try { + plan = analyzeAction.apply(listenerResult.indices, listenerResult.lookupIndices, listenerResult.enrichResolution); + } catch (Exception e) { + l.onFailure(e); + return; + } + LOGGER.debug("Analyzed plan (second attempt, without filter):\n{}", plan); + l.onResponse(plan); + }) + .addListener(logicalPlanListener); + } + + private void preAnalyzeLookupIndices(List indices, ListenerResult listenerResult, ActionListener listener) { + if (indices.size() > 1) { + // Note: JOINs on more than one index are not yet supported + listener.onFailure(new MappingException("More than one LOOKUP JOIN is not supported")); + } else if (indices.size() == 1) { + TableInfo tableInfo = indices.get(0); + TableIdentifier table = tableInfo.id(); + // call the EsqlResolveFieldsAction (field-caps) to resolve indices and get field types + indexResolver.resolveAsMergedMapping( + table.index(), Set.of("*"), // Current LOOKUP JOIN syntax does not allow for field selection - l.delegateFailureAndWrap( - (lx, lookupIndexResolution) -> preAnalyzeIndices( - indices, - executionInfo, - enrichResolution.getUnavailableClusters(), - fieldNames, - lx.delegateFailureAndWrap((ll, indexResolution) -> { - // TODO in follow-PR (for skip_unavailble handling of missing concrete indexes) add some tests for invalid - // index resolution to updateExecutionInfo - if (indexResolution.isValid()) { - EsqlSessionCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); - EsqlSessionCCSUtils.updateExecutionInfoWithUnavailableClusters( - executionInfo, - indexResolution.unavailableClusters() - ); - if (executionInfo.isCrossClusterSearch() - && executionInfo.getClusterStateCount(EsqlExecutionInfo.Cluster.Status.RUNNING) == 0) { - // for a CCS, if all clusters have been marked as SKIPPED, nothing to search so send a sentinel - // Exception to let the LogicalPlanActionListener decide how to proceed - ll.onFailure(new NoClustersToSearchException()); - return; - } - - Set newClusters = enrichPolicyResolver.groupIndicesPerCluster( - indexResolution.get().concreteIndices().toArray(String[]::new) - ).keySet(); - // If new clusters appear when resolving the main indices, we need to resolve the enrich policies again - // or exclude main concrete indices. Since this is rare, it's simpler to resolve the enrich policies - // again. - // TODO: add a test for this - if (targetClusters.containsAll(newClusters) == false - // do not bother with a re-resolution if only remotes were requested and all were offline - && executionInfo.getClusterStateCount(EsqlExecutionInfo.Cluster.Status.RUNNING) > 0) { - enrichPolicyResolver.resolvePolicies( - newClusters, - unresolvedPolicies, - ll.map( - newEnrichResolution -> action.apply(indexResolution, lookupIndexResolution, newEnrichResolution) - ) - ); - return; - } - } - ll.onResponse(action.apply(indexResolution, lookupIndexResolution, enrichResolution)); - }) - ) - ) + null, + listener.map(indexResolution -> listenerResult.withLookupIndexResolution(indexResolution)) ); - })); + } else { + try { + // No lookup indices specified + listener.onResponse( + new ListenerResult( + listenerResult.indices, + IndexResolution.invalid("[none specified]"), + listenerResult.enrichResolution, + listenerResult.fieldNames + ) + ); + } catch (Exception ex) { + listener.onFailure(ex); + } + } } private void preAnalyzeIndices( List indices, EsqlExecutionInfo executionInfo, - Map unavailableClusters, // known to be unavailable from the enrich policy API call - Set fieldNames, - ActionListener listener + ListenerResult listenerResult, + QueryBuilder requestFilter, + ActionListener listener ) { // TODO we plan to support joins in the future when possible, but for now we'll just fail early if we see one if (indices.size() > 1) { // Note: JOINs are not supported but we detect them when listener.onFailure(new MappingException("Queries with multiple indices are not supported")); } else if (indices.size() == 1) { + // known to be unavailable from the enrich policy API call + Map unavailableClusters = listenerResult.enrichResolution.getUnavailableClusters(); TableInfo tableInfo = indices.get(0); TableIdentifier table = tableInfo.id(); @@ -409,38 +441,116 @@ private void preAnalyzeIndices( String indexExpressionToResolve = EsqlSessionCCSUtils.createIndexExpressionFromAvailableClusters(executionInfo); if (indexExpressionToResolve.isEmpty()) { // if this was a pure remote CCS request (no local indices) and all remotes are offline, return an empty IndexResolution - listener.onResponse(IndexResolution.valid(new EsIndex(table.index(), Map.of(), Map.of()))); + listener.onResponse( + new ListenerResult( + IndexResolution.valid(new EsIndex(table.index(), Map.of(), Map.of())), + listenerResult.lookupIndices, + listenerResult.enrichResolution, + listenerResult.fieldNames + ) + ); } else { // call the EsqlResolveFieldsAction (field-caps) to resolve indices and get field types - indexResolver.resolveAsMergedMapping(indexExpressionToResolve, fieldNames, listener); + indexResolver.resolveAsMergedMapping( + indexExpressionToResolve, + listenerResult.fieldNames, + requestFilter, + listener.map(indexResolution -> listenerResult.withIndexResolution(indexResolution)) + ); } } else { try { // occurs when dealing with local relations (row a = 1) - listener.onResponse(IndexResolution.invalid("[none specified]")); + listener.onResponse( + new ListenerResult( + IndexResolution.invalid("[none specified]"), + listenerResult.lookupIndices, + listenerResult.enrichResolution, + listenerResult.fieldNames + ) + ); } catch (Exception ex) { listener.onFailure(ex); } } } - private void preAnalyzeLookupIndices(List indices, Set fieldNames, ActionListener listener) { - if (indices.size() > 1) { - // Note: JOINs on more than one index are not yet supported - listener.onFailure(new MappingException("More than one LOOKUP JOIN is not supported")); - } else if (indices.size() == 1) { - TableInfo tableInfo = indices.get(0); - TableIdentifier table = tableInfo.id(); - // call the EsqlResolveFieldsAction (field-caps) to resolve indices and get field types - indexResolver.resolveAsMergedMapping(table.index(), fieldNames, listener); - } else { - try { - // No lookup indices specified - listener.onResponse(IndexResolution.invalid("[none specified]")); - } catch (Exception ex) { - listener.onFailure(ex); + private boolean analyzeCCSIndices( + EsqlExecutionInfo executionInfo, + Set targetClusters, + Set unresolvedPolicies, + ListenerResult listenerResult, + ActionListener logicalPlanListener, + ActionListener l + ) { + IndexResolution indexResolution = listenerResult.indices; + EsqlSessionCCSUtils.updateExecutionInfoWithClustersWithNoMatchingIndices(executionInfo, indexResolution); + EsqlSessionCCSUtils.updateExecutionInfoWithUnavailableClusters(executionInfo, indexResolution.unavailableClusters()); + if (executionInfo.isCrossClusterSearch() && executionInfo.getClusterStateCount(EsqlExecutionInfo.Cluster.Status.RUNNING) == 0) { + // for a CCS, if all clusters have been marked as SKIPPED, nothing to search so send a sentinel Exception + // to let the LogicalPlanActionListener decide how to proceed + logicalPlanListener.onFailure(new NoClustersToSearchException()); + return true; + } + + Set newClusters = enrichPolicyResolver.groupIndicesPerCluster( + indexResolution.get().concreteIndices().toArray(String[]::new) + ).keySet(); + // If new clusters appear when resolving the main indices, we need to resolve the enrich policies again + // or exclude main concrete indices. Since this is rare, it's simpler to resolve the enrich policies again. + // TODO: add a test for this + if (targetClusters.containsAll(newClusters) == false + // do not bother with a re-resolution if only remotes were requested and all were offline + && executionInfo.getClusterStateCount(EsqlExecutionInfo.Cluster.Status.RUNNING) > 0) { + enrichPolicyResolver.resolvePolicies( + newClusters, + unresolvedPolicies, + l.map(enrichResolution -> listenerResult.withEnrichResolution(enrichResolution)) + ); + return true; + } + return false; + } + + private static void analyzeAndMaybeRetry( + TriFunction analyzeAction, + QueryBuilder requestFilter, + ListenerResult listenerResult, + ActionListener logicalPlanListener, + ActionListener l + ) { + LogicalPlan plan = null; + var filterPresentMessage = requestFilter == null ? "without" : "with"; + var attemptMessage = requestFilter == null ? "the only" : "first"; + LOGGER.debug("Analyzing the plan ({} attempt, {} filter)", attemptMessage, filterPresentMessage); + + try { + plan = analyzeAction.apply(listenerResult.indices, listenerResult.lookupIndices, listenerResult.enrichResolution); + } catch (Exception e) { + if (e instanceof VerificationException ve) { + LOGGER.debug( + "Analyzing the plan ({} attempt, {} filter) failed with {}", + attemptMessage, + filterPresentMessage, + ve.getDetailedMessage() + ); + if (requestFilter == null) { + // if the initial request didn't have a filter, then just pass the exception back to the user + logicalPlanListener.onFailure(ve); + } else { + // interested only in a VerificationException, but this time we are taking out the index filter + // to try and make the index resolution work without any index filtering. In the next step... to be continued + l.onResponse(listenerResult); + } + } else { + // if the query failed with any other type of exception, then just pass the exception back to the user + logicalPlanListener.onFailure(e); } + return; } + LOGGER.debug("Analyzed plan ({} attempt, {} filter):\n{}", attemptMessage, filterPresentMessage, plan); + // the analysis succeeded from the first attempt, irrespective if it had a filter or not, just continue with the planning + logicalPlanListener.onResponse(plan); } static Set fieldNames(LogicalPlan parsed, Set enrichPolicyMatchFields) { @@ -591,4 +701,23 @@ public PhysicalPlan optimizedPhysicalPlan(LogicalPlan optimizedPlan) { LOGGER.debug("Optimized physical plan:\n{}", plan); return plan; } + + private record ListenerResult( + IndexResolution indices, + IndexResolution lookupIndices, + EnrichResolution enrichResolution, + Set fieldNames + ) { + ListenerResult withEnrichResolution(EnrichResolution newEnrichResolution) { + return new ListenerResult(indices(), lookupIndices(), newEnrichResolution, fieldNames()); + } + + ListenerResult withIndexResolution(IndexResolution newIndexResolution) { + return new ListenerResult(newIndexResolution, lookupIndices(), enrichResolution(), fieldNames()); + } + + ListenerResult withLookupIndexResolution(IndexResolution newIndexResolution) { + return new ListenerResult(indices(), newIndexResolution, enrichResolution(), fieldNames()); + } + }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java index f61be4b59830e..d000b2765e2b1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.mapper.TimeSeriesParams; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.esql.action.EsqlResolveFieldsAction; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -76,10 +77,15 @@ public IndexResolver(Client client) { /** * Resolves a pattern to one (potentially compound meaning that spawns multiple indices) mapping. */ - public void resolveAsMergedMapping(String indexWildcard, Set fieldNames, ActionListener listener) { + public void resolveAsMergedMapping( + String indexWildcard, + Set fieldNames, + QueryBuilder requestFilter, + ActionListener listener + ) { client.execute( EsqlResolveFieldsAction.TYPE, - createFieldCapsRequest(indexWildcard, fieldNames), + createFieldCapsRequest(indexWildcard, fieldNames, requestFilter), listener.delegateFailureAndWrap((l, response) -> l.onResponse(mergedMappings(indexWildcard, response))) ); } @@ -252,10 +258,11 @@ private EsField conflictingMetricTypes(String name, String fullName, FieldCapabi return new InvalidMappedField(name, "mapped as different metric types in indices: " + indices); } - private static FieldCapabilitiesRequest createFieldCapsRequest(String index, Set fieldNames) { + private static FieldCapabilitiesRequest createFieldCapsRequest(String index, Set fieldNames, QueryBuilder requestFilter) { FieldCapabilitiesRequest req = new FieldCapabilitiesRequest().indices(Strings.commaDelimitedListToStringArray(index)); req.fields(fieldNames.toArray(String[]::new)); req.includeUnmapped(true); + req.indexFilter(requestFilter); // lenient because we throw our own errors looking at the response e.g. if something was not resolved // also because this way security doesn't throw authorization exceptions but rather honors ignore_unavailable req.indicesOptions(FIELD_CAPS_INDICES_OPTIONS); From f2addbc69a2aa7fb512c1d5ca9a839f5fc7f5134 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Tue, 3 Dec 2024 20:10:30 +0200 Subject: [PATCH 381/386] Parse the contents of dynamic objects for [subobjects:false] (#117762) * Parse the contents of dynamic objects for [subobjects:false] * Update docs/changelog/117762.yaml * add tests * tests * test dynamic field * test dynamic field * fix tests --- docs/changelog/117762.yaml | 6 + .../test/search/330_fetch_fields.yml | 118 ++++++++++++++++++ .../index/mapper/DocumentParser.java | 7 +- .../index/mapper/MapperFeatures.java | 1 + .../index/mapper/DocumentParserTests.java | 63 ++++++++++ 5 files changed, 194 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/117762.yaml diff --git a/docs/changelog/117762.yaml b/docs/changelog/117762.yaml new file mode 100644 index 0000000000000..123432e0f0507 --- /dev/null +++ b/docs/changelog/117762.yaml @@ -0,0 +1,6 @@ +pr: 117762 +summary: "Parse the contents of dynamic objects for [subobjects:false]" +area: Mapping +type: bug +issues: + - 117544 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml index 8a8dffda69e20..44d966b76f34e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml @@ -1177,3 +1177,121 @@ fetch geo_point: - is_false: hits.hits.0.fields.message - match: { hits.hits.0._source.message.foo: 10 } - match: { hits.hits.0._source.message.foo\.bar: 20 } + +--- +root with subobjects false and dynamic false: + - requires: + cluster_features: mapper.fix_parsing_subobjects_false_dynamic_false + reason: bug fix + + - do: + indices.create: + index: test + body: + mappings: + subobjects: false + dynamic: false + properties: + id: + type: integer + my.keyword.field: + type: keyword + + - do: + bulk: + index: test + refresh: true + body: + - '{ "index": { } }' + - '{ "id": 1, "my": { "keyword.field": "abc" } }' + - match: { errors: false } + + # indexing a dynamically-mapped field still fails (silently) + - do: + bulk: + index: test + refresh: true + body: + - '{ "index": { } }' + - '{ "id": 2, "my": { "random.field": "abc" } }' + - match: { errors: false } + + - do: + search: + index: test + body: + sort: id + fields: [ "*" ] + + - match: { hits.hits.0.fields: { my.keyword.field: [ abc ], id: [ 1 ] } } + - match: { hits.hits.1.fields: { id: [ 2 ] } } + + - do: + search: + index: test + body: + query: + match: + my.keyword.field: abc + + - match: { hits.total.value: 1 } + +--- +object with subobjects false and dynamic false: + - requires: + cluster_features: mapper.fix_parsing_subobjects_false_dynamic_false + reason: bug fix + + - do: + indices.create: + index: test + body: + mappings: + properties: + my: + subobjects: false + dynamic: false + properties: + id: + type: integer + nested.keyword.field: + type: keyword + + - do: + bulk: + index: test + refresh: true + body: + - '{ "index": { } }' + - '{ "id": 1, "my": { "nested": { "keyword.field": "abc" } } }' + - match: { errors: false } + + # indexing a dynamically-mapped field still fails (silently) + - do: + bulk: + index: test + refresh: true + body: + - '{ "index": { } }' + - '{ "id": 2, "my": { "nested": { "random.field": "abc" } } }' + - match: { errors: false } + + - do: + search: + index: test + body: + sort: id + fields: [ "*" ] + + - match: { hits.hits.0.fields: { my.nested.keyword.field: [ abc ], id: [ 1 ] } } + - match: { hits.hits.1.fields: { id: [ 2 ] } } + + - do: + search: + index: test + body: + query: + match: + my.nested.keyword.field: abc + + - match: { hits.total.value: 1 } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 82004356ceb57..e00e7b2320000 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.fielddata.FieldDataContext; @@ -53,6 +54,9 @@ public final class DocumentParser { public static final IndexVersion DYNAMICALLY_MAP_DENSE_VECTORS_INDEX_VERSION = IndexVersions.FIRST_DETACHED_INDEX_VERSION; + static final NodeFeature FIX_PARSING_SUBOBJECTS_FALSE_DYNAMIC_FALSE = new NodeFeature( + "mapper.fix_parsing_subobjects_false_dynamic_false" + ); private final XContentParserConfiguration parserConfiguration; private final MappingParserContext mappingParserContext; @@ -531,7 +535,8 @@ private static void doParseObject(DocumentParserContext context, String currentF private static void parseObjectDynamic(DocumentParserContext context, String currentFieldName) throws IOException { ensureNotStrict(context, currentFieldName); - if (context.dynamic() == ObjectMapper.Dynamic.FALSE) { + // For [subobjects:false], intermediate objects get flattened so we can't skip parsing children. + if (context.dynamic() == ObjectMapper.Dynamic.FALSE && context.parent().subobjects() != ObjectMapper.Subobjects.DISABLED) { failIfMatchesRoutingPath(context, currentFieldName); if (context.canAddIgnoredField()) { context.addIgnoredField( diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index bf6c729f95653..ffb38d229078e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -73,6 +73,7 @@ public Set getTestFeatures() { IgnoredSourceFieldMapper.IGNORED_SOURCE_AS_TOP_LEVEL_METADATA_ARRAY_FIELD, IgnoredSourceFieldMapper.ALWAYS_STORE_OBJECT_ARRAYS_IN_NESTED_OBJECTS, MapperService.LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT, + DocumentParser.FIX_PARSING_SUBOBJECTS_FALSE_DYNAMIC_FALSE, CONSTANT_KEYWORD_SYNTHETIC_SOURCE_WRITE_FIX, META_FETCH_FIELDS_ERROR_CODE_CHANGED ); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index 09d57d0e34c3c..d128b25038a59 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -2053,6 +2053,38 @@ public void testSubobjectsFalseWithInnerDottedObject() throws Exception { assertNotNull(doc.rootDoc().getField("metrics.service.test.with.dots.max")); } + public void testSubobjectsFalseWithInnerDottedObjectDynamicFalse() throws Exception { + DocumentMapper mapper = createDocumentMapper(mapping(b -> { + b.startObject("metrics").field("type", "object").field("subobjects", false).field("dynamic", randomFrom("false", "runtime")); + b.startObject("properties").startObject("service.test.with.dots").field("type", "keyword").endObject().endObject(); + b.endObject(); + })); + + ParsedDocument doc = mapper.parse(source(""" + { "metrics": { "service": { "test.with.dots": "foo" } } }""")); + assertNotNull(doc.rootDoc().getField("metrics.service.test.with.dots")); + + doc = mapper.parse(source(""" + { "metrics": { "service.test": { "with.dots": "foo" } } }""")); + assertNotNull(doc.rootDoc().getField("metrics.service.test.with.dots")); + + doc = mapper.parse(source(""" + { "metrics": { "service": { "test": { "with.dots": "foo" } } } }""")); + assertNotNull(doc.rootDoc().getField("metrics.service.test.with.dots")); + + doc = mapper.parse(source(""" + { "metrics": { "service": { "test.other.dots": "foo" } } }""")); + assertNull(doc.rootDoc().getField("metrics.service.test.other.dots")); + + doc = mapper.parse(source(""" + { "metrics": { "service.test": { "other.dots": "foo" } } }""")); + assertNull(doc.rootDoc().getField("metrics.service.test.other.dots")); + + doc = mapper.parse(source(""" + { "metrics": { "service": { "test": { "other.dots": "foo" } } } }""")); + assertNull(doc.rootDoc().getField("metrics.service.test.other.dots")); + } + public void testSubobjectsFalseRoot() throws Exception { DocumentMapper mapper = createDocumentMapper(mappingNoSubobjects(xContentBuilder -> {})); ParsedDocument doc = mapper.parse(source(""" @@ -2074,6 +2106,37 @@ public void testSubobjectsFalseRoot() throws Exception { assertNotNull(doc.rootDoc().getField("metrics.service.test.with.dots")); } + public void testSubobjectsFalseRootWithInnerDottedObjectDynamicFalse() throws Exception { + DocumentMapper mapper = createDocumentMapper(topMapping(b -> { + b.field("subobjects", false).field("dynamic", randomFrom("false", "runtime")); + b.startObject("properties").startObject("service.test.with.dots").field("type", "keyword").endObject().endObject(); + })); + + ParsedDocument doc = mapper.parse(source(""" + { "service": { "test.with.dots": "foo" } }""")); + assertNotNull(doc.rootDoc().getField("service.test.with.dots")); + + doc = mapper.parse(source(""" + { "service.test": { "with.dots": "foo" } }""")); + assertNotNull(doc.rootDoc().getField("service.test.with.dots")); + + doc = mapper.parse(source(""" + { "service": { "test": { "with.dots": "foo" } } }""")); + assertNotNull(doc.rootDoc().getField("service.test.with.dots")); + + doc = mapper.parse(source(""" + { "service": { "test.other.dots": "foo" } }""")); + assertNull(doc.rootDoc().getField("service.test.other.dots")); + + doc = mapper.parse(source(""" + { "service.test": { "other.dots": "foo" } }""")); + assertNull(doc.rootDoc().getField("service.test.other.dots")); + + doc = mapper.parse(source(""" + { "service": { "test": { "other.dots": "foo" } } }""")); + assertNull(doc.rootDoc().getField("service.test.other.dots")); + } + public void testSubobjectsFalseStructuredPath() throws Exception { DocumentMapper mapper = createDocumentMapper( mapping(b -> b.startObject("metrics.service").field("type", "object").field("subobjects", false).endObject()) From f5ff9c6bbb84805647c03a280343a531610f2138 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 3 Dec 2024 20:20:56 +0100 Subject: [PATCH 382/386] [TEST] Adjust exception type in SearchServiceTests#testWaitOnRefreshTimeout (#117884) This test has been failing due to #114526, which changed the exception type to SearchTimeoutException. Closes #115935 --- muted-tests.yml | 3 --- .../java/org/elasticsearch/search/SearchServiceTests.java | 4 ++-- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 7bd06a6605028..7e5e7f15700f3 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -106,9 +106,6 @@ tests: - class: org.elasticsearch.search.StressSearchServiceReaperIT method: testStressReaper issue: https://github.com/elastic/elasticsearch/issues/115816 -- class: org.elasticsearch.search.SearchServiceTests - method: testWaitOnRefreshTimeout - issue: https://github.com/elastic/elasticsearch/issues/115935 - class: org.elasticsearch.search.SearchServiceTests method: testParseSourceValidation issue: https://github.com/elastic/elasticsearch/issues/115936 diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 5dc07a41b3f8c..d1ccfcbe78732 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -20,7 +20,6 @@ import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; @@ -111,6 +110,7 @@ import org.elasticsearch.search.query.NonCountingTermQuery; import org.elasticsearch.search.query.QuerySearchRequest; import org.elasticsearch.search.query.QuerySearchResult; +import org.elasticsearch.search.query.SearchTimeoutException; import org.elasticsearch.search.rank.RankBuilder; import org.elasticsearch.search.rank.RankDoc; import org.elasticsearch.search.rank.RankShardResult; @@ -2616,7 +2616,7 @@ public void testWaitOnRefreshTimeout() { ); service.executeQueryPhase(request, task, future); - ElasticsearchTimeoutException ex = expectThrows(ElasticsearchTimeoutException.class, future::actionGet); + SearchTimeoutException ex = expectThrows(SearchTimeoutException.class, future::actionGet); assertThat(ex.getMessage(), containsString("Wait for seq_no [0] refreshed timed out [")); } From ebf470acdf645b92dcc82e492699da5df3f6ab39 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 3 Dec 2024 13:04:02 -0800 Subject: [PATCH 383/386] Provide a mechanism to modify config files in a running test cluster (#117859) --- .../local/AbstractLocalClusterFactory.java | 103 +++++++++++++----- .../util/resource/MutableResource.java | 53 +++++++++ .../xpack/eql/EqlSecurityTestCluster.java | 2 +- 3 files changed, 128 insertions(+), 30 deletions(-) create mode 100644 test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/MutableResource.java diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java index 2dac2ee232aa5..6070ec140d254 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java @@ -26,6 +26,8 @@ import org.elasticsearch.test.cluster.util.ProcessUtils; import org.elasticsearch.test.cluster.util.Retry; import org.elasticsearch.test.cluster.util.Version; +import org.elasticsearch.test.cluster.util.resource.MutableResource; +import org.elasticsearch.test.cluster.util.resource.Resource; import java.io.BufferedInputStream; import java.io.BufferedReader; @@ -115,6 +117,9 @@ public static class Node { private Version currentVersion; private Process process = null; private DistributionDescriptor distributionDescriptor; + private Set extraConfigListeners = new HashSet<>(); + private Set keystoreFileListeners = new HashSet<>(); + private Set roleFileListeners = new HashSet<>(); public Node(Path baseWorkingDir, DistributionResolver distributionResolver, LocalNodeSpec spec) { this(baseWorkingDir, distributionResolver, spec, null, false); @@ -436,6 +441,10 @@ private void writeConfiguration() { private void copyExtraConfigFiles() { spec.getExtraConfigFiles().forEach((fileName, resource) -> { + if (fileName.equals("roles.yml")) { + throw new IllegalArgumentException("Security roles should be configured via 'rolesFile()' method."); + } + final Path target = configDir.resolve(fileName); final Path directory = target.getParent(); if (Files.exists(directory) == false) { @@ -446,6 +455,14 @@ private void copyExtraConfigFiles() { } } resource.writeTo(target); + + // Register and update listener for this config file + if (resource instanceof MutableResource && extraConfigListeners.add(fileName)) { + ((MutableResource) resource).addUpdateListener(updated -> { + LOGGER.info("Updating config file '{}'", fileName); + updated.writeTo(target); + }); + } }); } @@ -485,29 +502,39 @@ private void addKeystoreSettings() { private void addKeystoreFiles() { spec.getKeystoreFiles().forEach((key, file) -> { - try { - Path path = Files.createTempFile(tempDir, key, null); - file.writeTo(path); - - ProcessUtils.exec( - spec.getKeystorePassword(), - workingDir, - OS.conditional( - c -> c.onWindows(() -> distributionDir.resolve("bin").resolve("elasticsearch-keystore.bat")) - .onUnix(() -> distributionDir.resolve("bin").resolve("elasticsearch-keystore")) - ), - getEnvironmentVariables(), - false, - "add-file", - key, - path.toString() - ).waitFor(); - } catch (InterruptedException | IOException e) { - throw new RuntimeException(e); + addKeystoreFile(key, file); + if (file instanceof MutableResource && keystoreFileListeners.add(key)) { + ((MutableResource) file).addUpdateListener(updated -> { + LOGGER.info("Updating keystore file '{}'", key); + addKeystoreFile(key, updated); + }); } }); } + private void addKeystoreFile(String key, Resource file) { + try { + Path path = Files.createTempFile(tempDir, key, null); + file.writeTo(path); + + ProcessUtils.exec( + spec.getKeystorePassword(), + workingDir, + OS.conditional( + c -> c.onWindows(() -> distributionDir.resolve("bin").resolve("elasticsearch-keystore.bat")) + .onUnix(() -> distributionDir.resolve("bin").resolve("elasticsearch-keystore")) + ), + getEnvironmentVariables(), + false, + "add-file", + key, + path.toString() + ).waitFor(); + } catch (InterruptedException | IOException e) { + throw new RuntimeException(e); + } + } + private void writeSecureSecretsFile() { if (spec.getKeystoreFiles().isEmpty() == false) { throw new IllegalStateException( @@ -535,16 +562,20 @@ private void configureSecurity() { if (spec.isSecurityEnabled()) { if (spec.getUsers().isEmpty() == false) { LOGGER.info("Setting up roles.yml for node '{}'", name); - - Path destination = workingDir.resolve("config").resolve("roles.yml"); - spec.getRolesFiles().forEach(rolesFile -> { - try ( - Writer writer = Files.newBufferedWriter(destination, StandardOpenOption.APPEND); - Reader reader = new BufferedReader(new InputStreamReader(rolesFile.asStream())) - ) { - reader.transferTo(writer); - } catch (IOException e) { - throw new UncheckedIOException("Failed to append roles file " + rolesFile + " to " + destination, e); + writeRolesFile(); + spec.getRolesFiles().forEach(resource -> { + if (resource instanceof MutableResource && roleFileListeners.add(resource)) { + ((MutableResource) resource).addUpdateListener(updated -> { + LOGGER.info("Updating roles.yml for node '{}'", name); + Path rolesFile = workingDir.resolve("config").resolve("roles.yml"); + try { + Files.delete(rolesFile); + Files.copy(distributionDir.resolve("config").resolve("roles.yml"), rolesFile); + writeRolesFile(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }); } }); } @@ -596,6 +627,20 @@ private void configureSecurity() { } } + private void writeRolesFile() { + Path destination = workingDir.resolve("config").resolve("roles.yml"); + spec.getRolesFiles().forEach(rolesFile -> { + try ( + Writer writer = Files.newBufferedWriter(destination, StandardOpenOption.APPEND); + Reader reader = new BufferedReader(new InputStreamReader(rolesFile.asStream())) + ) { + reader.transferTo(writer); + } catch (IOException e) { + throw new UncheckedIOException("Failed to append roles file " + rolesFile + " to " + destination, e); + } + }); + } + private void installPlugins() { if (spec.getPlugins().isEmpty() == false) { Pattern pattern = Pattern.compile("(.+)(?:-\\d+\\.\\d+\\.\\d+(-SNAPSHOT)?\\.zip)"); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/MutableResource.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/MutableResource.java new file mode 100644 index 0000000000000..477ad82e5944a --- /dev/null +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/MutableResource.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.test.cluster.util.resource; + +import java.io.InputStream; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Consumer; + +/** + * A mutable version of {@link Resource}. Anywhere a {@link Resource} is accepted in the test clusters API a {@link MutableResource} can + * be supplied instead. Unless otherwise specified, when the {@link #update(Resource)} method is called, the backing configuration will + * be updated in-place. + */ +public class MutableResource implements Resource { + private final List> listeners = new ArrayList<>(); + private Resource delegate; + + private MutableResource(Resource delegate) { + this.delegate = delegate; + } + + @Override + public InputStream asStream() { + return delegate.asStream(); + } + + public static MutableResource from(Resource delegate) { + return new MutableResource(delegate); + } + + public void update(Resource delegate) { + this.delegate = delegate; + this.listeners.forEach(listener -> listener.accept(this)); + } + + /** + * Registers a listener that will be notified when any updates are made to this resource. This listener will receive a reference to + * the resource with the updated value. + * + * @param listener action to be called on update + */ + public synchronized void addUpdateListener(Consumer listener) { + listeners.add(listener); + } +} diff --git a/x-pack/plugin/eql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSecurityTestCluster.java b/x-pack/plugin/eql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSecurityTestCluster.java index a1a417d91aeb8..33f048d81ef52 100644 --- a/x-pack/plugin/eql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSecurityTestCluster.java +++ b/x-pack/plugin/eql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/eql/EqlSecurityTestCluster.java @@ -19,7 +19,7 @@ public static ElasticsearchCluster getCluster() { .setting("xpack.license.self_generated.type", "basic") .setting("xpack.monitoring.collection.enabled", "true") .setting("xpack.security.enabled", "true") - .configFile("roles.yml", Resource.fromClasspath("roles.yml")) + .rolesFile(Resource.fromClasspath("roles.yml")) .user("test-admin", "x-pack-test-password", "test-admin", false) .user("user1", "x-pack-test-password", "user1", false) .user("user2", "x-pack-test-password", "user2", false) From becf069ddad97bd4091d75ad1c0342c8d489a7f5 Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Tue, 3 Dec 2024 16:13:46 -0500 Subject: [PATCH 384/386] Esql refactor date tests (#117923) This refactors a bit of the type logic in the parametrized testing to pass the input values as java Instants for millisecond and nanosecond date. Mainly, this impacts verifier functions. The goal here is to ensure that the values are correctly converted based on the type they were generated as, rather than relying on the verifier function to know how to convert from a long with no additional information. This will make tests that have mixed millisecond and nanosecond inputs easier to write correctly. --- .../xpack/esql/core/util/DateUtils.java | 4 + .../expression/function/TestCaseSupplier.java | 78 +++---------------- .../scalar/convert/ToDateNanosTests.java | 17 ++-- .../scalar/convert/ToDatetimeTests.java | 14 +++- .../scalar/convert/ToDoubleTests.java | 6 +- .../scalar/convert/ToIntegerTests.java | 7 +- .../function/scalar/convert/ToLongTests.java | 11 ++- .../scalar/convert/ToStringTests.java | 11 ++- .../scalar/convert/ToUnsignedLongTests.java | 6 +- .../operator/arithmetic/AddTests.java | 22 +++--- .../operator/arithmetic/SubTests.java | 16 ++-- .../comparison/GreaterThanOrEqualTests.java | 39 ++++------ .../operator/comparison/GreaterThanTests.java | 5 +- .../comparison/LessThanOrEqualTests.java | 39 ++++------ .../operator/comparison/LessThanTests.java | 5 +- 15 files changed, 117 insertions(+), 163 deletions(-) diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/DateUtils.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/DateUtils.java index 280cf172a8a58..20f7b400e9364 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/DateUtils.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/DateUtils.java @@ -174,6 +174,10 @@ public static ZonedDateTime asDateTime(long millis) { return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), UTC); } + public static ZonedDateTime asDateTime(Instant instant) { + return ZonedDateTime.ofInstant(instant, UTC); + } + public static long asMillis(ZonedDateTime zonedDateTime) { return zonedDateTime.toInstant().toEpochMilli(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 816c9ef6f352c..377027b70fb54 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -620,70 +620,6 @@ public static void forUnaryBoolean( unary(suppliers, expectedEvaluatorToString, booleanCases(), expectedType, v -> expectedValue.apply((Boolean) v), warnings); } - /** - * Generate positive test cases for a unary function operating on an {@link DataType#DATETIME}. - * This variant defaults to maximum range of possible values - */ - public static void forUnaryDatetime( - List suppliers, - String expectedEvaluatorToString, - DataType expectedType, - Function expectedValue, - List warnings - ) { - unaryNumeric( - suppliers, - expectedEvaluatorToString, - dateCases(), - expectedType, - n -> expectedValue.apply(Instant.ofEpochMilli(n.longValue())), - warnings - ); - } - - /** - * Generate positive test cases for a unary function operating on an {@link DataType#DATETIME}. - * This variant accepts a range of values - */ - public static void forUnaryDatetime( - List suppliers, - String expectedEvaluatorToString, - DataType expectedType, - long min, - long max, - Function expectedValue, - List warnings - ) { - unaryNumeric( - suppliers, - expectedEvaluatorToString, - dateCases(min, max), - expectedType, - n -> expectedValue.apply(Instant.ofEpochMilli(n.longValue())), - warnings - ); - } - - /** - * Generate positive test cases for a unary function operating on an {@link DataType#DATE_NANOS}. - */ - public static void forUnaryDateNanos( - List suppliers, - String expectedEvaluatorToString, - DataType expectedType, - Function expectedValue, - List warnings - ) { - unaryNumeric( - suppliers, - expectedEvaluatorToString, - dateNanosCases(), - expectedType, - n -> expectedValue.apply(DateUtils.toInstant((long) n)), - warnings - ); - } - /** * Generate positive test cases for a unary function operating on an {@link DataType#GEO_POINT}. */ @@ -1912,11 +1848,19 @@ public List multiRowData() { } /** - * @return the data value being supplied, casting unsigned longs into BigIntegers correctly + * @return the data value being supplied, casting to java objects when appropriate */ public Object getValue() { - if (type == DataType.UNSIGNED_LONG && data instanceof Long l) { - return NumericUtils.unsignedLongAsBigInteger(l); + if (data instanceof Long l) { + if (type == DataType.UNSIGNED_LONG) { + return NumericUtils.unsignedLongAsBigInteger(l); + } + if (type == DataType.DATETIME) { + return Instant.ofEpochMilli(l); + } + if (type == DataType.DATE_NANOS) { + return DateUtils.toInstant(l); + } } return data; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosTests.java index 485073d1a91d2..7459abf29410d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; +import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; @@ -36,14 +37,20 @@ public static Iterable parameters() { final String read = "Attribute[channel=0]"; final List suppliers = new ArrayList<>(); - TestCaseSupplier.forUnaryDateNanos(suppliers, read, DataType.DATE_NANOS, DateUtils::toLong, List.of()); - TestCaseSupplier.forUnaryDatetime( + TestCaseSupplier.unary( + suppliers, + read, + TestCaseSupplier.dateNanosCases(), + DataType.DATE_NANOS, + v -> DateUtils.toLong((Instant) v), + List.of() + ); + TestCaseSupplier.unary( suppliers, "ToDateNanosFromDatetimeEvaluator[field=" + read + "]", + TestCaseSupplier.dateCases(0, DateUtils.MAX_NANOSECOND_INSTANT.toEpochMilli()), DataType.DATE_NANOS, - 0, - DateUtils.MAX_NANOSECOND_INSTANT.toEpochMilli(), - i -> DateUtils.toNanoSeconds(i.toEpochMilli()), + i -> DateUtils.toNanoSeconds(((Instant) i).toEpochMilli()), List.of() ); TestCaseSupplier.forUnaryLong( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java index 2852b92ba156e..43b889baf5306 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java @@ -37,12 +37,20 @@ public static Iterable parameters() { final String read = "Attribute[channel=0]"; final List suppliers = new ArrayList<>(); - TestCaseSupplier.forUnaryDatetime(suppliers, read, DataType.DATETIME, Instant::toEpochMilli, emptyList()); - TestCaseSupplier.forUnaryDateNanos( + TestCaseSupplier.unary( + suppliers, + read, + TestCaseSupplier.dateCases(), + DataType.DATETIME, + v -> ((Instant) v).toEpochMilli(), + emptyList() + ); + TestCaseSupplier.unary( suppliers, "ToDatetimeFromDateNanosEvaluator[field=" + read + "]", + TestCaseSupplier.dateNanosCases(), DataType.DATETIME, - i -> DateUtils.toMilliSeconds(DateUtils.toLong(i)), + i -> DateUtils.toMilliSeconds(DateUtils.toLong((Instant) i)), emptyList() ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java index d5153019c1e41..b68306d6cac80 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import java.math.BigInteger; +import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.function.Function; @@ -49,11 +50,12 @@ public static Iterable parameters() { ); TestCaseSupplier.forUnaryBoolean(suppliers, evaluatorName.apply("Boolean"), DataType.DOUBLE, b -> b ? 1d : 0d, List.of()); - TestCaseSupplier.forUnaryDatetime( + TestCaseSupplier.unary( suppliers, evaluatorName.apply("Long"), + TestCaseSupplier.dateCases(), DataType.DOUBLE, - i -> (double) i.toEpochMilli(), + i -> (double) ((Instant) i).toEpochMilli(), List.of() ); // random strings that don't look like a double diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java index eb81d48e0c5be..6a3f7022c9d3e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; +import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.function.Function; @@ -48,7 +49,7 @@ public static Iterable parameters() { evaluatorName.apply("Long"), dateCases(0, Integer.MAX_VALUE), DataType.INTEGER, - l -> ((Long) l).intValue(), + l -> Long.valueOf(((Instant) l).toEpochMilli()).intValue(), List.of() ); // datetimes that fall outside Integer's range @@ -60,7 +61,9 @@ public static Iterable parameters() { l -> null, l -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + l + "] out of [integer] range" + "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + + ((Instant) l).toEpochMilli() + + "] out of [integer] range" ) ); // random strings that don't look like an Integer diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java index 4c2cf14af41e9..c7101ab730aba 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java @@ -43,8 +43,15 @@ public static Iterable parameters() { TestCaseSupplier.forUnaryBoolean(suppliers, evaluatorName.apply("Boolean"), DataType.LONG, b -> b ? 1L : 0L, List.of()); // datetimes - TestCaseSupplier.forUnaryDatetime(suppliers, read, DataType.LONG, Instant::toEpochMilli, List.of()); - TestCaseSupplier.forUnaryDateNanos(suppliers, read, DataType.LONG, DateUtils::toLong, List.of()); + TestCaseSupplier.unary(suppliers, read, TestCaseSupplier.dateCases(), DataType.LONG, v -> ((Instant) v).toEpochMilli(), List.of()); + TestCaseSupplier.unary( + suppliers, + read, + TestCaseSupplier.dateNanosCases(), + DataType.LONG, + v -> DateUtils.toLong((Instant) v), + List.of() + ); // random strings that don't look like a long TestCaseSupplier.forUnaryStrings( suppliers, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java index 0b101efa073d9..3b30e4b353ae5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; +import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; @@ -81,18 +82,20 @@ public static Iterable parameters() { b -> new BytesRef(b.toString()), List.of() ); - TestCaseSupplier.forUnaryDatetime( + TestCaseSupplier.unary( suppliers, "ToStringFromDatetimeEvaluator[field=" + read + "]", + TestCaseSupplier.dateCases(), DataType.KEYWORD, - i -> new BytesRef(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(i.toEpochMilli())), + i -> new BytesRef(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(((Instant) i).toEpochMilli())), List.of() ); - TestCaseSupplier.forUnaryDateNanos( + TestCaseSupplier.unary( suppliers, "ToStringFromDateNanosEvaluator[field=" + read + "]", + TestCaseSupplier.dateNanosCases(), DataType.KEYWORD, - i -> new BytesRef(DateFieldMapper.DEFAULT_DATE_TIME_NANOS_FORMATTER.formatNanos(DateUtils.toLong(i))), + i -> new BytesRef(DateFieldMapper.DEFAULT_DATE_TIME_NANOS_FORMATTER.formatNanos(DateUtils.toLong((Instant) i))), List.of() ); TestCaseSupplier.forUnaryGeoPoint( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java index d8122aa73f81a..ca48bb029f223 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java @@ -19,6 +19,7 @@ import java.math.BigDecimal; import java.math.BigInteger; +import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.function.Function; @@ -58,11 +59,12 @@ public static Iterable parameters() { ); // datetimes - TestCaseSupplier.forUnaryDatetime( + TestCaseSupplier.unary( suppliers, evaluatorName.apply("Long"), + TestCaseSupplier.dateCases(), DataType.UNSIGNED_LONG, - instant -> BigInteger.valueOf(instant.toEpochMilli()), + instant -> BigInteger.valueOf(((Instant) instant).toEpochMilli()), List.of() ); // random strings that don't look like an unsigned_long diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java index abfb634d5f301..aa4c037e5e961 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java @@ -159,7 +159,7 @@ public static Iterable parameters() { }; BiFunction> warnings = (lhs, rhs) -> { try { - addDatesAndTemporalAmount(lhs.data(), rhs.data(), AddTests::addMillis); + addDatesAndTemporalAmount(lhs.getValue(), rhs.getValue(), AddTests::addMillis); return List.of(); } catch (ArithmeticException e) { return List.of( @@ -193,6 +193,7 @@ public static Iterable parameters() { BinaryOperator nanosResult = (lhs, rhs) -> { try { + assert (lhs instanceof Instant) || (rhs instanceof Instant); return addDatesAndTemporalAmount(lhs, rhs, AddTests::addNanos); } catch (ArithmeticException e) { return null; @@ -327,29 +328,28 @@ private static String addErrorMessageString(boolean includeOrdinal, List adder) { + private static Object addDatesAndTemporalAmount(Object lhs, Object rhs, ToLongBiFunction adder) { // this weird casting dance makes the expected value lambda symmetric - Long date; + Instant date; TemporalAmount period; - if (lhs instanceof Long) { - date = (Long) lhs; + assert (lhs instanceof Instant) || (rhs instanceof Instant); + if (lhs instanceof Instant) { + date = (Instant) lhs; period = (TemporalAmount) rhs; } else { - date = (Long) rhs; + date = (Instant) rhs; period = (TemporalAmount) lhs; } return adder.applyAsLong(date, period); } - private static long addMillis(Long date, TemporalAmount period) { + private static long addMillis(Instant date, TemporalAmount period) { return asMillis(asDateTime(date).plus(period)); } - private static long addNanos(Long date, TemporalAmount period) { + private static long addNanos(Instant date, TemporalAmount period) { return DateUtils.toLong( - Instant.from( - ZonedDateTime.ofInstant(DateUtils.toInstant(date), org.elasticsearch.xpack.esql.core.util.DateUtils.UTC).plus(period) - ) + Instant.from(ZonedDateTime.ofInstant(date, org.elasticsearch.xpack.esql.core.util.DateUtils.UTC).plus(period)) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java index 1338299b3a121..bce5dea30f849 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java @@ -277,25 +277,23 @@ protected Expression build(Source source, List args) { return new Sub(source, args.get(0), args.get(1)); } - private static Object subtractDatesAndTemporalAmount(Object lhs, Object rhs, ToLongBiFunction subtract) { + private static Object subtractDatesAndTemporalAmount(Object lhs, Object rhs, ToLongBiFunction subtract) { // this weird casting dance makes the expected value lambda symmetric - Long date; + Instant date; TemporalAmount period; - if (lhs instanceof Long) { - date = (Long) lhs; + if (lhs instanceof Instant) { + date = (Instant) lhs; period = (TemporalAmount) rhs; } else { - date = (Long) rhs; + date = (Instant) rhs; period = (TemporalAmount) lhs; } return subtract.applyAsLong(date, period); } - private static long subtractNanos(Long date, TemporalAmount period) { + private static long subtractNanos(Instant date, TemporalAmount period) { return DateUtils.toLong( - Instant.from( - ZonedDateTime.ofInstant(DateUtils.toInstant(date), org.elasticsearch.xpack.esql.core.util.DateUtils.UTC).minus(period) - ) + Instant.from(ZonedDateTime.ofInstant(date, org.elasticsearch.xpack.esql.core.util.DateUtils.UTC).minus(period)) ); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java index a4f1a19e135ef..395a574028f6a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; +import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; @@ -106,33 +107,19 @@ public static Iterable parameters() { ) ); // Datetime - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "GreaterThanOrEqualLongsEvaluator", - "lhs", - "rhs", - (l, r) -> ((Number) l).longValue() >= ((Number) r).longValue(), - DataType.BOOLEAN, - TestCaseSupplier.dateCases(), - TestCaseSupplier.dateCases(), - List.of(), - false - ) - ); + suppliers.addAll(TestCaseSupplier.forBinaryNotCasting("GreaterThanOrEqualLongsEvaluator", "lhs", "rhs", (lhs, rhs) -> { + if (lhs instanceof Instant l && rhs instanceof Instant r) { + return l.isAfter(r) || l.equals(r); + } + throw new UnsupportedOperationException("Got some weird types"); + }, DataType.BOOLEAN, TestCaseSupplier.dateCases(), TestCaseSupplier.dateCases(), List.of(), false)); - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "GreaterThanOrEqualLongsEvaluator", - "lhs", - "rhs", - (l, r) -> ((Number) l).longValue() >= ((Number) r).longValue(), - DataType.BOOLEAN, - TestCaseSupplier.dateNanosCases(), - TestCaseSupplier.dateNanosCases(), - List.of(), - false - ) - ); + suppliers.addAll(TestCaseSupplier.forBinaryNotCasting("GreaterThanOrEqualLongsEvaluator", "lhs", "rhs", (lhs, rhs) -> { + if (lhs instanceof Instant l && rhs instanceof Instant r) { + return l.isAfter(r) || l.equals(r); + } + throw new UnsupportedOperationException("Got some weird types"); + }, DataType.BOOLEAN, TestCaseSupplier.dateNanosCases(), TestCaseSupplier.dateNanosCases(), List.of(), false)); suppliers.addAll( TestCaseSupplier.stringCases( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java index 86a4676e35009..b56ecd7392ba6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; +import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; @@ -111,7 +112,7 @@ public static Iterable parameters() { "GreaterThanLongsEvaluator", "lhs", "rhs", - (l, r) -> ((Number) l).longValue() > ((Number) r).longValue(), + (l, r) -> ((Instant) l).isAfter((Instant) r), DataType.BOOLEAN, TestCaseSupplier.dateCases(), TestCaseSupplier.dateCases(), @@ -125,7 +126,7 @@ public static Iterable parameters() { "GreaterThanLongsEvaluator", "lhs", "rhs", - (l, r) -> ((Number) l).longValue() > ((Number) r).longValue(), + (l, r) -> ((Instant) l).isAfter((Instant) r), DataType.BOOLEAN, TestCaseSupplier.dateNanosCases(), TestCaseSupplier.dateNanosCases(), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java index 5793f26ecd447..60062f071c183 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; +import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; @@ -106,33 +107,19 @@ public static Iterable parameters() { ) ); // Datetime - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "LessThanOrEqualLongsEvaluator", - "lhs", - "rhs", - (l, r) -> ((Number) l).longValue() <= ((Number) r).longValue(), - DataType.BOOLEAN, - TestCaseSupplier.dateCases(), - TestCaseSupplier.dateCases(), - List.of(), - false - ) - ); + suppliers.addAll(TestCaseSupplier.forBinaryNotCasting("LessThanOrEqualLongsEvaluator", "lhs", "rhs", (lhs, rhs) -> { + if (lhs instanceof Instant l && rhs instanceof Instant r) { + return l.isBefore(r) || l.equals(r); + } + throw new UnsupportedOperationException("Got some weird types"); + }, DataType.BOOLEAN, TestCaseSupplier.dateCases(), TestCaseSupplier.dateCases(), List.of(), false)); - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "LessThanOrEqualLongsEvaluator", - "lhs", - "rhs", - (l, r) -> ((Number) l).longValue() <= ((Number) r).longValue(), - DataType.BOOLEAN, - TestCaseSupplier.dateNanosCases(), - TestCaseSupplier.dateNanosCases(), - List.of(), - false - ) - ); + suppliers.addAll(TestCaseSupplier.forBinaryNotCasting("LessThanOrEqualLongsEvaluator", "lhs", "rhs", (lhs, rhs) -> { + if (lhs instanceof Instant l && rhs instanceof Instant r) { + return l.isBefore(r) || l.equals(r); + } + throw new UnsupportedOperationException("Got some weird types"); + }, DataType.BOOLEAN, TestCaseSupplier.dateNanosCases(), TestCaseSupplier.dateNanosCases(), List.of(), false)); suppliers.addAll( TestCaseSupplier.stringCases( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java index 0d114b4964920..30812cf8e538d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; +import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; @@ -111,7 +112,7 @@ public static Iterable parameters() { "LessThanLongsEvaluator", "lhs", "rhs", - (l, r) -> ((Number) l).longValue() < ((Number) r).longValue(), + (l, r) -> ((Instant) l).isBefore((Instant) r), DataType.BOOLEAN, TestCaseSupplier.dateNanosCases(), TestCaseSupplier.dateNanosCases(), @@ -125,7 +126,7 @@ public static Iterable parameters() { "LessThanLongsEvaluator", "lhs", "rhs", - (l, r) -> ((Number) l).longValue() < ((Number) r).longValue(), + (l, r) -> ((Instant) l).isBefore((Instant) r), DataType.BOOLEAN, TestCaseSupplier.dateCases(), TestCaseSupplier.dateCases(), From bf418cbcf94b1cb1063d1eba3cf864f8eb02f81e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 4 Dec 2024 08:14:31 +1100 Subject: [PATCH 385/386] Mute org.elasticsearch.packaging.test.ArchiveGenerateInitialCredentialsTests test20NoAutoGenerationWhenAutoConfigurationDisabled #117891 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 7e5e7f15700f3..1259da6257dc3 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -235,6 +235,9 @@ tests: - class: org.elasticsearch.xpack.core.ml.search.SparseVectorQueryBuilderTests method: testToQuery issue: https://github.com/elastic/elasticsearch/issues/117904 +- class: org.elasticsearch.packaging.test.ArchiveGenerateInitialCredentialsTests + method: test20NoAutoGenerationWhenAutoConfigurationDisabled + issue: https://github.com/elastic/elasticsearch/issues/117891 # Examples: # From d582b82ae85f74150af374efa0f272378bb2d08a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 4 Dec 2024 08:14:55 +1100 Subject: [PATCH 386/386] Mute org.elasticsearch.packaging.test.BootstrapCheckTests test20RunWithBootstrapChecks #117890 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 1259da6257dc3..ec09f948fc3ef 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -238,6 +238,9 @@ tests: - class: org.elasticsearch.packaging.test.ArchiveGenerateInitialCredentialsTests method: test20NoAutoGenerationWhenAutoConfigurationDisabled issue: https://github.com/elastic/elasticsearch/issues/117891 +- class: org.elasticsearch.packaging.test.BootstrapCheckTests + method: test20RunWithBootstrapChecks + issue: https://github.com/elastic/elasticsearch/issues/117890 # Examples: #